[ { "qid": "b8677742616fef051f00", "term": "Genghis Khan", "description": "founder and first Great Khan of the Mongol Empire", "question": "Are more people today related to Genghis Khan than Julius Caesar?", "answer": true, "facts": [ "Julius Caesar had three children.", "Genghis Khan had sixteen children.", "Modern geneticists have determined that out of every 200 men today has DNA that can be traced to Genghis Khan." ], "decomposition": [ "How many kids did Julius Caesar have?", "How many kids did Genghis Khan have?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Caesarion-2", "Julia (daughter of Caesar)-1" ] ], [ [ "Alakhai Bekhi-1", "Tolui-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Julius Caesar-75" ] ], [ [ "Genghis Khan-17" ] ], [ "operation" ] ], [ [ [ "Gaius Julius Caesar-7" ] ], [ [ "Genghis Khan-15" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "4668264df84fb7c25b39", "term": "The Police", "description": "English rock band", "question": "Could the members of The Police perform lawful arrests?", "answer": false, "facts": [ "The members of The Police were musicians, not law enforcement officers.", "Only law enforcement officers can perform lawful arrests." ], "decomposition": [ "Who can perform lawful arrests?", "Are members of The Police also #1?" ], "evidence": [ [ [ [ "Arrest-2" ] ], [ [ "Citizen's arrest-2", "The Police-1" ], "operation" ] ], [ [ [ "Arrest-7", "Law enforcement officer-13" ] ], [ [ "Law enforcement officer-13", "The Police-8" ] ] ], [ [ [ "Lawful interception-28" ] ], [ "operation" ] ] ] }, { "qid": "41450dbd623437269f8f", "term": "Depression (mood)", "description": "state of low mood and fatigue", "question": "Would a Monoamine Oxidase candy bar cheer up a depressed friend?", "answer": false, "facts": [ "Depression is caused by low levels of serotonin, dopamine and norepinephrine.", "Monoamine Oxidase breaks down neurotransmitters and lowers levels of serotonin, dopamine and norepinephrine." ], "decomposition": [ "Depression is caused by low levels of what chemicals?", "Monoamine Oxidase has an effect on what chemicals?", "Of the chemicals listed in both #1 and #2, does Monoamine Oxidase raise their levels?" ], "evidence": [ [ [ [ "Monoamine oxidase-8" ] ], [ [ "Monoamine oxidase-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Depression (mood)-13" ] ], [ [ "Monoamine oxidase-8" ] ], [ [ "Serotonin-36" ], "operation" ] ], [ [ [ "Biology of depression-1" ] ], [ [ "Monoamine oxidase-7" ] ], [ "operation" ] ] ] }, { "qid": "7419826e4373eb1e83ff", "term": "Grey seal", "description": "species of seal", "question": "Would a dog respond to bell before Grey seal?", "answer": true, "facts": [ "Grey seals have no ear flaps and their ears canals are filled with wax.", "Grey seals hear better underwater when their ears open like a valve.", "Dogs have sensitive ears that can hear as far as a quarter of a mile away." ], "decomposition": [ "How sensitive is a grey seal's hearing on land?", "How sensitive is a dog's hearing on land?", "Is #2 better than #1?" ], "evidence": [ [ [ [ "Pinniped-24" ] ], [ [ "Hearing range-11", "Hertz-5" ] ], [ "operation" ] ], [ [ [ "Grey seal-1" ], "no_evidence" ], [ [ "Dog-54" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Grey seal-1" ], "no_evidence" ], [ [ "Dog anatomy-114" ] ], [ "operation" ] ] ] }, { "qid": "1b29d402c3e17cb3b435", "term": "Pound sterling", "description": "Official currency of the United Kingdom and other territories", "question": "Is a pound sterling valuable?", "answer": false, "facts": [ "A pound sterling is fiat money.", "Fiat money is backed by government decree and has no intrinsic value.", "One pound sterling is worth about 1.24 US dollars by May of 2020." ], "decomposition": [ "What is the value of the Pound Sterling based on?", "Is #1 the material used in making it?" ], "evidence": [ [ [ [ "Pound sterling-16" ] ], [ [ "Pound sterling-16" ] ] ], [ [ [ "Pound sterling-1", "Pound sterling-12" ] ], [ [ "Pound sterling-71" ] ] ], [ [ [ "Pound sterling-16" ] ], [ [ "One pound (British coin)-3" ], "operation" ] ] ] }, { "qid": "c2d2b9ff5a1e682c88dc", "term": "Shrimp", "description": "Decapod crustaceans", "question": "Is shrimp scampi definitely free of plastic?", "answer": false, "facts": [ "Shrimp scampi is a dish made with shrimp.", "Shrimp have been found to contain microplastics.", "Microplastics are plastic material." ], "decomposition": [ "What protein is Shrimp scampi made out of?", "What have #1 been found to contain?", "Are #2 free from plastic?" ], "evidence": [ [ [ [ "Scampi-1" ] ], [ [ "Plastic pollution-31", "Plastic pollution-48" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Fish-92", "Scampi-2" ] ], [ [ "Microplastics-12" ] ], [ [ "Microplastics-1" ] ] ], [ [ [ "Scampi-8" ] ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "265dd54c248f8b048851", "term": "Rede Globo", "description": "Brazilian commercial television network", "question": "Do the anchors on Rede Globo speak Chinese?", "answer": false, "facts": [ "Rede Globo is a Brazilian television network.", "The official language of Brazil is Portuguese." ], "decomposition": [ "What country broadcasts Rede Globo?", "What is the official language of #1?", "Is #2 Chinese?" ], "evidence": [ [ [ [ "Rede Globo-1" ] ], [ [ "Brazil-1" ] ], [ "operation" ] ], [ [ [ "Rede Globo-1" ] ], [ [ "Brazil-1" ] ], [ "operation" ] ], [ [ [ "Rede Globo-1" ] ], [ [ "Portuguese language-1" ] ], [ "operation" ] ] ] }, { "qid": "29bf9aa61ed4124cd84c", "term": "Albany, Georgia", "description": "City in Georgia, United States", "question": "Will the Albany in Georgia reach a hundred thousand occupants before the one in New York?", "answer": false, "facts": [ "Albany, GA has around 75,000 people", "Albany, NY has almost 100,000 people" ], "decomposition": [ "What is the population of Albany, Georgia?", "What is the population of Albany, New York?", "What is the difference between 100,000 and #1?", "What is the difference between 100,000 and #2?", "Is #3 smaller than #4?" ], "evidence": [ [ [ [ "Albany, Georgia-1" ] ], [ [ "Albany, New York-2" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Albany, Georgia-35" ] ], [ [ "Albany, New York-2" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Albany, Georgia-1" ] ], [ [ "Albany, New York-2" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "f231532fe17fd971d1e6", "term": "Wonder Woman (2017 film)", "description": "American superhero film directed by Patty Jenkins", "question": "Is a Boeing 737 cost covered by Wonder Woman (2017 film) box office receipts?", "answer": true, "facts": [ "The average cost of a US Boeing 737 plane is 1.6 million dollars.", "Wonder Woman (2017 film) grossed over 800 million dollars at the box office." ], "decomposition": [ "How much does a Boeing 737 cost?", "How much did the 2017 movie Wonder Woman gross?", "Is #2 greater than #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Wonder Woman (2017 film)-3" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Wonder Woman (2017 film)-3" ] ], [ "operation" ] ], [ [ [ "Boeing 737-13" ], "no_evidence" ], [ [ "Wonder Woman (2017 film)-31" ] ], [ "operation" ] ] ] }, { "qid": "decb9886ca17f7af209d", "term": "Saint Vincent and the Grenadines", "description": "Country in the Caribbean", "question": "Is the language used in Saint Vincent and the Grenadines rooted in English?", "answer": true, "facts": [ "The primary language spoken in Saint Vincent and the Grenadines is Vincentian Creole.", "Vincentian Creole is English-based, with elements of French, Antillean Creole, and indigenous South American and African languages." ], "decomposition": [ "What language is used in Saint Vincent and the Grenadines?", "Is #1 based in English?" ], "evidence": [ [ [ [ "Demographics of Saint Vincent and the Grenadines-9" ] ], [ [ "Saint Vincent and the Grenadines-59" ], "operation" ] ], [ [ [ "Vincentian Creole-1" ] ], [ [ "Vincentian Creole-1" ] ] ], [ [ [ "Saint Vincent and the Grenadines-59" ] ], [ [ "Saint Vincent and the Grenadines-59" ] ] ] ] }, { "qid": "622c71f22ddb683c15a8", "term": "Casio", "description": "Japanese electronics company", "question": "Can you buy Casio products at Petco?", "answer": false, "facts": [ "Casio is a manufacturer of consumer electronics and watches", "Petco is a chain store that sells pet supplies like food, bowls, litter, toys, cages and grooming equipment" ], "decomposition": [ "What kind of products does Casio manufacture?", "What kind of products does Petco sell?", "Does #1 overlap with #2?" ], "evidence": [ [ [ [ "Casio-1" ] ], [ [ "Petco-1" ] ], [ "operation" ] ], [ [ [ "Casio-1" ] ], [ [ "Petco-1" ] ], [ "operation" ] ], [ [ [ "Casio-1" ] ], [ [ "Petco-1" ] ], [ "operation" ] ] ] }, { "qid": "b77d2efee37741e44c32", "term": "Space Race", "description": "Competition between the USSR and the USA to explore space", "question": "Did the Space Race use relay batons?", "answer": false, "facts": [ "The Space Race was a competition between the USA and USSR regarding spaceflight and exploration", "Relay batons are used in relay races", "Relay races are athletic track and field events" ], "decomposition": [ "What was the Space Race?", "What are relay batons used for?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Space Race-1" ] ], [ [ "Relay race-11" ] ], [ [ "Relay race-11", "Space Race-1" ], "operation" ] ], [ [ [ "Space Race-1" ] ], [ [ "Relay race-1" ] ], [ "operation" ] ], [ [ [ "Space Race-1" ] ], [ [ "Relay race-1" ] ], [ "operation" ] ] ] }, { "qid": "d697f6246a7d06e195ee", "term": "Deciduous", "description": "Trees or shrubs that lose their leaves seasonally", "question": "Are Christmas trees dissimilar to deciduous trees?", "answer": true, "facts": [ "Christmas trees are usually pine trees.", "Pine trees keep their needles all year round." ], "decomposition": [ "Which kind of trees are commonly used as Christmas trees?", "Are #1 dissimilar to deciduous trees?" ], "evidence": [ [ [ [ "Christmas tree-1" ] ], [ [ "Deciduous-1" ], "operation" ] ], [ [ [ "Christmas tree-56" ] ], [ [ "Fir-1" ], "operation" ] ], [ [ [ "Christmas tree-1" ] ], [ [ "Deciduous-1" ] ] ] ] }, { "qid": "98a30c58fdff7676076c", "term": "Biochemistry", "description": "study of chemical processes in living organisms", "question": "Does Biochemistry study gluons?", "answer": false, "facts": [ "Biochemistry studies role, function, and structure of biomolecules.", "Gluon, the so-called messenger particle of the strong nuclear force, which binds sub-atomic particles known as quarks within the protons and neutrons of stable matter as well as within heavier, short-lived particles created at high energies.", "biomolecules are comprised of atoms. " ], "decomposition": [ "What are gluons?", "What things are studied in biochemistry?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Gluon-1" ] ], [ [ "Biochemistry-1" ] ], [ "operation" ] ], [ [ [ "Gluon-1" ] ], [ [ "Biochemistry-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Gluon-1" ] ], [ [ "Biochemistry-1", "Biochemistry-4" ] ], [ "operation" ] ] ] }, { "qid": "a2a388a19b87a34d902b", "term": "Kingdom of Hungary", "description": "former Central European monarchy (1000–1946)", "question": "Did land owners elect their rulers in the Kingdom of Hungary?", "answer": false, "facts": [ "The Kingdom of Hungary was a monarchy.", "Monarchies do not allow citizens to elect their own rulers." ], "decomposition": [ "Which kind of government ruled over the Kingdom of Hungary?", "Does #1 allow citizens to elect their own rulers?" ], "evidence": [ [ [ [ "Kingdom of Hungary-1" ] ], [ [ "Monarchy-1" ], "operation" ] ], [ [ [ "Kingdom of Hungary-1" ] ], [ [ "Kingdom of Hungary-1" ] ] ], [ [ [ "Kingdom of Hungary-1" ] ], [ [ "Monarchy-16" ], "operation" ] ] ] }, { "qid": "df84924c94e9e929ef10", "term": "Nancy Pelosi", "description": "52nd speaker of the United States House of Representatives", "question": "Would Nancy Pelosi publicly denounce abortion?", "answer": false, "facts": [ "Abortion is a topic that divides the Republican Party and the Democratic Party.", "Nancy Pelosi is a Democrat and appeals to that base.", "The Democratic base is largely pro-choice with abortion. " ], "decomposition": [ "Which US political party does Nancy Pelosi identify with?", "Do most people who identify with #1 oppose abortion?" ], "evidence": [ [ [ [ "Nancy Pelosi-1" ] ], [ [ "Democratic Party (United States)-3" ], "operation" ] ], [ [ [ "Nancy Pelosi-1" ] ], [ [ "Democratic Party (United States)-3" ], "no_evidence" ] ], [ [ [ "Speaker of the United States House of Representatives-3" ] ], [ [ "Democratic Party (United States)-70" ] ] ] ] }, { "qid": "ad9bf53d84f83ebc9822", "term": "Dragon Ball", "description": "Japanese media franchise", "question": "Does Dragon Ball shows and movies fall short of Friday 13th number of projects?", "answer": true, "facts": [ "Dragon Ball has 6 TV series, 3 TV specials, and 2 direct to video spinoffs as of 2020.", "Friday the 13th has 12 movies in the franchise and 1 TV series as of 2020." ], "decomposition": [ "How many Dragon Ball series, TV specials and other features have been released?", "How many Friday the 13th franchise films and television series have been released?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Dragon Ball-2" ] ], [ [ "Friday the 13th (franchise)-1" ] ], [ "operation" ] ], [ [ [ "Dragon Ball-2" ] ], [ [ "Friday the 13th (franchise)-1", "Friday the 13th (franchise)-2" ] ], [ "operation" ] ], [ [ [ "Dragon Ball-28", "Dragon Ball-33" ], "no_evidence" ], [ [ "Friday the 13th (franchise)-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "58fd5574f3447c8f590f", "term": "Amnesia", "description": "Cognitive disorder where the memory is disturbed or lost", "question": "Would a student of the class of 2017 have amnesia about 9/11?", "answer": true, "facts": [ "Childhood amnesia is common, with most adults not remembering their lives before 2 or 3 years of age", "9/11 occurred in 2001", "Students graduating high-school in 2017 would have been born in 1999" ], "decomposition": [ "What year did 9/11 occur?", "In what year would students graduating high-school in 2017 have been born in?", "What age is childhood amnesia most common in?", "If someone was born in #2, how old would they have been in #1?", "Does #3 overlap with #4?" ], "evidence": [ [ [ [ "September 11 attacks-1" ] ], [ [ "Yara Shahidi-1" ], "no_evidence" ], [ [ "Childhood amnesia-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "September 11 attacks-1" ] ], [ [ "Secondary school-1" ], "no_evidence" ], [ [ "Childhood amnesia-1" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Inside 9/11-1" ] ], [ [ "Twelfth grade-54" ] ], [ [ "Childhood amnesia-3" ] ], [ [ "Infant-2" ] ], [ [ "Childhood amnesia-3", "Infant-2" ], "operation" ] ] ] }, { "qid": "8e01fbec3b932a3252ef", "term": "Greed", "description": "an inordinate or insatiable longing, especially for wealth, status, and power", "question": "Is greed the most prevalent of the Seven Deadly Sins?", "answer": false, "facts": [ "Greed is a longing for wealth and power.", "White collar crime involves businesses stealing money or assets from people. ", "5-10% of arrests per 100,000 arrests were for white collar crime according to Department of Justice statistics.", "Gluttony is the overindulgence in food or similar things.", "39.8% of US adults were classified as overweight according to the National Center for Health Statistics." ], "decomposition": [ "Is greed a deadly sin?", "Is gluttonly a deadly sin?", "What percent of crimes involved greed?", "What percent of people are overweight?", "If #1 and #2 are both yes is #3 a higher percentager than #4?" ], "evidence": [ [ [ [ "Seven deadly sins-1" ] ], [ [ "Seven deadly sins-1" ] ], [ [ "Theft-52" ], "no_evidence" ], [ [ "Overweight-18" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Greed-10" ] ], [ [ "Gluttony-2" ] ], [ [ "Theft-1" ], "no_evidence" ], [ [ "Obesity-4" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Seven deadly sins-1" ] ], [ [ "Seven deadly sins-1" ] ], [ [ "White-collar crime-3" ], "no_evidence" ], [ [ "Obesity in the United States-41" ] ], [ "operation" ] ] ] }, { "qid": "d8649d55a6e1a22b92d9", "term": "Psychic", "description": "person who claims to use extrasensory perception to identify information hidden from the normal senses", "question": "Would a psychic who admits to hot reading be trustworthy?", "answer": false, "facts": [ "Hot reading is a technique used by people presenting themselves as psychics to acquire information about a subject prior to the psychic session.", "Hot reading is considered deception in the psychic community." ], "decomposition": [ "What do people pretend to be in order to successfully carry out hot reading?", "Do the 'real' #1 consider hot reading to be genuine?" ], "evidence": [ [ [ [ "Hot reading-1", "Hot reading-2" ] ], [ "operation" ] ], [ [ [ "Hot reading-1" ] ], [ [ "Hot reading-1", "Hot reading-2" ] ] ], [ [ [ "Hot reading-1" ] ], [ [ "Psychic-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "e9e78aa7b0c1ec3f9cf9", "term": "Comma", "description": "Punctuation mark", "question": "Is average number of peas in a pod enough commas for a billion?", "answer": true, "facts": [ "The average number of peas in a pod is 6 or 7.", "A billion is a number that has three commas in it." ], "decomposition": [ "How many peas are in the average pod?", "How many commas are needed for a billion?", "Is #1 at least equal to #2?" ], "evidence": [ [ [ [ "Pea-1" ], "no_evidence" ], [ [ "Billion-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Pea-1" ], "no_evidence" ], [ [ "Billion-2" ] ], [ "operation" ] ], [ [ [ "Pea-1" ] ], [ [ "1,000,000,000-1" ] ], [ "operation" ] ] ] }, { "qid": "903178c36c89e5f55a20", "term": "Model (person)", "description": "person employed to display, advertise and promote products, or to serve as a visual aid", "question": "Does actress Leila George lack the height to be a model?", "answer": false, "facts": [ "Actress Leila George, the daughter of Vincent D'onofrio, is 5'9\" tall.", "Model Cindy Crawford is 5'9\" tall.", "Model Agyness Deyn is 5'8\" tall.", "Model Sara Sampaio is 5'8\" tall." ], "decomposition": [ "How tall is Leila George?", "How tall is Cindy Crawford? ", "What is the height of model Sara Sampaio?", "Is #1 shorter than both #2 and #3?" ], "evidence": [ [ [ [ "Leila George-2" ], "no_evidence" ], [ [ "Cindy Crawford-9" ] ], [ [ "Sara Sampaio-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Leila George-1" ], "no_evidence" ], [ [ "Cindy Crawford-9" ] ], [ [ "Sara Sampaio-1" ], "operation" ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Cindy Crawford-9" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "86ead3f15417204affc2", "term": "Dustin Hoffman", "description": "American actor and director", "question": "Is Dustin Hoffman one of the B'nei Yisrael?", "answer": true, "facts": [ "Dustin Hoffman was raised in a Jewish family.", "In modern Hebrew, b'nei yisrael (\"children of Israel\") can denote the Jewish people at any time in history." ], "decomposition": [ "What does B'nei Yisrael refer to?", "What religion was Dustin Hoffman family as he was growing up?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Israelites-11" ] ], [ [ "Dustin Hoffman-7", "Dustin Hoffman-8" ] ], [ "operation" ] ], [ [ [ "Israelites-8" ] ], [ [ "Dustin Hoffman-7" ] ], [ "operation" ] ], [ [ [ "Indian Jews in Israel-7" ] ], [ [ "Dustin Hoffman-7" ] ], [ "operation" ] ] ] }, { "qid": "3e023027bb296245506a", "term": "McDonald's", "description": "American fast food restaurant chain", "question": "If you were on a diet, would you have to skip lunch at McDonald's?", "answer": false, "facts": [ "McDonald's offers low calorie brunch options like parfaits and egg white sandwiches. ", "McDonald's offers low calorie lunch options including basic hamburgers and salads." ], "decomposition": [ "What is characteristic of food eaten by someone on a diet?", "Are lunch options characterized by #1 unavailable at McDonald's?" ], "evidence": [ [ [ [ "Dieting-1" ] ], [ [ "McDonald's-2" ], "operation" ] ], [ [ [ "Healthy diet-2" ] ], [ [ "McDonald's-2" ] ] ], [ [ [ "Dieting-21" ], "no_evidence" ], [ [ "McDonald's-2" ], "operation" ] ] ] }, { "qid": "e56fdbdf08cdc8694536", "term": "The Powerpuff Girls", "description": "American animated television series", "question": "Could the Powerpuff Girls hypothetically attend the Camden Military Academy?", "answer": false, "facts": [ "The Powerpuff Girls are kindergarten aged girls.", "Camden Military Academy is a private, all-male, military boarding school located in Camden, South Carolina.", "Camden Military Academy accepts male students in grades 7 through 12." ], "decomposition": [ "What gender are the Powerpuff Girls?", "What gender is allowed to attend the Camden Military Academy?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "The Powerpuff Girls-1" ] ], [ [ "Camden Military Academy-1" ] ], [ [ "Camden Military Academy-1", "The Powerpuff Girls-1" ] ] ], [ [ [ "The Powerpuff Girls-1" ] ], [ [ "Camden Military Academy-1" ] ], [ "operation" ] ], [ [ [ "The Powerpuff Girls-1" ] ], [ [ "Camden Military Academy-1" ] ], [ "operation" ] ] ] }, { "qid": "646d3503f3a3939e2e63", "term": "Sea of Japan", "description": "Marginal sea between Japan, Russia and Korea", "question": "Would the top of Mount Fuji stick out of the Sea of Japan? ", "answer": true, "facts": [ "The average depth of the Sea of Japan is 5,748 feet (1,752 metres) and its maximum depth is 12,276 feet (3,742 metres)", "Mount Fuji is 3,776.24 metres (12,389.2 ft) tall. " ], "decomposition": [ "How tall is Mount Fuji?", "What is the maximum depth of the Sea of Japan?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Mount Fuji-18" ] ], [ [ "Sea of Japan-15" ] ], [ "operation" ] ], [ [ [ "Mount Fuji-1" ] ], [ [ "Sea of Japan-15" ] ], [ "operation" ] ], [ [ [ "Mount Fuji-1" ] ], [ [ "Sea of Japan-15" ] ], [ "operation" ] ] ] }, { "qid": "a96d28e1221bfbcf50a9", "term": "CT scan", "description": "medical imaging procedure which uses X-rays to produce cross-sectional images", "question": "Would an uninsured person be more likely than an insured person to decline a CT scan?", "answer": true, "facts": [ "Without insurance, a CT scan can cost up to $5,000.", "Most insurance companies will cover or reimburse the cost of a CT scan." ], "decomposition": [ "Typically how much does it cost to get a CT scan without insurance?", "On average, how much does it cost to get a CT scan with insurance?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Full-body CT scan-12" ] ], [ [ "Full-body CT scan-12" ] ], [ "operation" ] ], [ [ [ "CT scan-53" ], "no_evidence" ], [ [ "CT scan-53" ], "no_evidence" ], [ "operation" ] ], [ [ [ "CT scan-53" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c87e343f444912cc9680", "term": "Donatello", "description": "Italian painter and sculptor", "question": "Was the Donatello crucifix identified in 2020 life size?", "answer": false, "facts": [ "The crucifix discovered in the church of Sant’Angelo depicts an adult man.", "The crucifix discovered in the church of Sant’Angelo is 89 cm high.", "The crucifix discovered in the church of Sant'Angelo was identified as being a work of Donatello.", "The average height of an adult man has been at least 150 cm in historical times." ], "decomposition": [ "The crucifix sculpted by Donatello and identified in 2020 is a depiction of what?", "What is the average height of #1?", "What is the average height of a real, living, #1?", "Is #2 equal to #3?" ], "evidence": [ [ [ [ "Donatello-17" ] ], [ "no_evidence" ], [ [ "Instrument of Jesus' crucifixion-22" ] ], [ "operation" ] ], [ [ [ "Crucifix-1", "Donatello-17" ] ], [ "no_evidence" ], [ [ "Crucifixion of Jesus-65" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Donatello-17" ] ], [ [ "Crucifix-1" ], "no_evidence" ], [ [ "Short stature-2" ] ], [ "operation" ] ] ] }, { "qid": "8716b85472632d6e200e", "term": "Disneyland Paris", "description": "Theme park resort in France owned by The Walt Disney Company", "question": "Is Disneyland Paris the largest Disney resort?", "answer": false, "facts": [ "Disneyland Paris contains two parks, several hotels, and a shopping district.", "By comparison, Walt Disney World in Florida contains four parks, two waterparks, a shopping district, and many hotels.", "Disney World is bigger than Disneyland Paris Resort and Disneyland California Resort combined." ], "decomposition": [ "How big is Disneyland Paris in square miles?", "How big is Walt Disney World in square miles?", "Is #1 larger than #2?" ], "evidence": [ [ [ [ "Disneyland Paris-40" ] ], [ [ "Walt Disney World-1" ] ], [ "operation" ] ], [ [ [ "Disneyland Paris-6" ] ], [ [ "Walt Disney World-1" ] ], [ "operation" ] ], [ [ [ "Disneyland Paris-40" ] ], [ [ "Walt Disney World-1" ] ], [ "operation" ] ] ] }, { "qid": "8ab96f428e06984bf0c6", "term": "Keyboard layout", "description": "any specific mechanical, visual, or functional arrangement of the keys of a keyboard or typewriter", "question": "Could someone with fine motor control issues benefit from an altered keyboard layout?", "answer": true, "facts": [ "Fine motor control involves making small, precise movements like painting or typing. ", "The standard keyboard layout is designed to be used by someone without any motor control issues.", "There are modified keyboards for multiple types of disability." ], "decomposition": [ "What types of keyboard layouts exist?", "Among #1, which keyboard layouts are optimized for disabilities?", "Are any of #2 better for those with limited fine motor control?" ], "evidence": [ [ [ [ "Keyboard layout-170", "Keyboard layout-43", "Keyboard layout-52" ] ], [ [ "Keyboard layout-170" ] ], [ [ "Keyboard layout-170" ], "no_evidence" ] ], [ [ [ "Keyboard layout-33" ] ], [ [ "Keyboard layout-170" ] ], [ [ "Keyboard layout-170" ] ] ], [ [ [ "Computer keyboard-27" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7870b1cef39a4f685911", "term": "Adam Sandler", "description": "American actor, comedian, screenwriter, and producer", "question": "Would Adam Sandler get a reference to Cole Spouse and a scuba man doll?", "answer": true, "facts": [ "Adam Sandler and Cole Sprouse starred together in \"Big Daddy\".", "A prop used throughout \"Big Daddy\" was a scuba diving action figure that Cole's character called 'Scuba Steve'." ], "decomposition": [ "What did Adam Sandler and Cole Sprouse star together in?", "Was a scuba man doll used as a prop by Cole in #1?" ], "evidence": [ [ [ [ "Big Daddy (1999 film)-1" ] ], [ "no_evidence" ] ], [ [ [ "Big Daddy (1999 film)-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Big Daddy (1999 film)-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "1f0e2597327ddd338fc2", "term": "Common warthog", "description": "Wild member of the pig family", "question": "Is there a warthog on Broadway?", "answer": true, "facts": [ "Disney's The Lion King is a popular Broadway musical.", "One of the characters is named Pumbaa.", "Pumbaa is a warthog." ], "decomposition": [ "Which animals did Disney movie 'Lion King' feature?", "Is a warthog included in #1?", "Was Lion King adapted for a Broadway musical?", "Considering #2 and #3, would the warthog appear on Broadway?" ], "evidence": [ [ [ [ "The Lion King-10" ] ], [ [ "The Lion King-10" ] ], [ [ "The Lion King-24" ] ], [ [ "The Lion King-10", "The Lion King-24" ], "operation" ] ], [ [ [ "The Lion King-10" ] ], [ "operation" ], [ [ "The Lion King (musical)-1" ] ], [ "operation" ] ], [ [ [ "The Lion King-10" ] ], [ "operation" ], [ [ "The Lion King (musical)-2" ] ], [ "operation" ] ] ] }, { "qid": "50c3af76816a5c2f07d2", "term": "Ice", "description": "water frozen into the solid state", "question": "Does Disney have an ice princess?", "answer": true, "facts": [ "In 2013, Disney released Frozen.", "Frozen features Elsa, a princess with magical ice powers." ], "decomposition": [ "What are some popular Disney characters?", "Is any of #1 an ice princess?" ], "evidence": [ [ [ [ "Disney Princess-1" ], "no_evidence" ], [ [ "Elsa (Frozen)-2" ] ] ], [ [ [ "Elsa (Frozen)-2" ] ], [ "operation" ] ], [ [ [ "Disney Princess-33" ] ], [ [ "Disney Princess-33", "Elsa (Frozen)-3" ], "no_evidence" ] ] ] }, { "qid": "3a7d825a0fcb4211742f", "term": "Biochemistry", "description": "study of chemical processes in living organisms", "question": "Would a geographer use biochemistry in their work?", "answer": false, "facts": [ "A geographer is a scientist whose area of study is geography, the study of Earth's natural environment and human society.", "Biochemistry is the study of chemical processes within and relating to living organisms." ], "decomposition": [ "What is the area of study of a geographer?", "What is the area of study of Biochemistry", "Is any of #1 in #2?" ], "evidence": [ [ [ [ "Geography-22" ] ], [ [ "Biochemistry-1" ] ], [ "operation" ] ], [ [ [ "Geographer-1" ] ], [ [ "Biochemistry-1" ] ], [ "operation" ] ], [ [ [ "Geographer-1" ] ], [ [ "Biochemistry-1", "Biochemistry-4" ] ], [ "operation" ] ] ] }, { "qid": "ed6d9f875c9787e1817c", "term": "Compact disc", "description": "Optical disc for storage and playback of digital audio", "question": "Could George Washington's own speeches have been recorded live to a compact disc?", "answer": false, "facts": [ "George Washington died in 1799.", "CDs weren't invented until 1982." ], "decomposition": [ "When did George Washington die?", "When were compact discs introduced?", "Is #1 after #2?" ], "evidence": [ [ [ [ "George Washington-1" ] ], [ [ "Compact disc-1" ] ], [ "operation" ] ], [ [ [ "George Washington-121" ] ], [ [ "Compact disc-7" ] ], [ "operation" ] ], [ [ [ "George Washington-1" ] ], [ [ "Compact disc-1" ] ], [ "operation" ] ] ] }, { "qid": "93f0f18ac8c96a44b849", "term": "Lil Jon", "description": "American rapper, record producer and DJ from Georgia", "question": "Was Lil Jon's top ranked Billboard song a collaboration with a member of The Lox?", "answer": false, "facts": [ "Lil Jon's highest ranked billboard song was Yeah.", "Yeah was a collaboration between Lil Jon, Usher, and Ludacris.", "The Lox is a rap trio consisting of: Styles P, Sheek Louch, and Jadakiss." ], "decomposition": [ "What is Lil Jon's top ranked Billboard song?", "What artists contributed to #1?", "Who makes up the group The Lox?", "Is any element of #3 also an element of #2?" ], "evidence": [ [ [ [ "Yeah! (Usher song)-1" ] ], [ [ "Yeah! (Usher song)-1" ] ], [ [ "The Lox-1" ] ], [ [ "The Lox-1", "Yeah! (Usher song)-7" ] ] ], [ [ [ "Lil Jon-1", "Yeah! (Usher song)-2" ] ], [ [ "Yeah! (Usher song)-10" ] ], [ [ "The Lox-1" ] ], [ "operation" ] ], [ [ [ "Lil Jon-1" ] ], [ [ "Yeah! (Usher song)-1" ] ], [ [ "The Lox-1" ] ], [ "operation" ] ] ] }, { "qid": "9a87e2e0f53e5d8689f2", "term": "Sulfur", "description": "Chemical element with atomic number 16", "question": "Would food made with black salt smell of sulfur?", "answer": true, "facts": [ "Black Salt has is a kiln-fired rock salt that contains sulfur.", "When black salt is cooked with, it smells similar to rotten eggs. ", "Rotten eggs smell like sulfur." ], "decomposition": [ "What does black salt smell like when cooked?", "Does #1 smell similar to sulfur?" ], "evidence": [ [ [ [ "Kala namak-8" ] ], [ [ "Kala namak-8" ] ] ], [ [ [ "Kala namak-1" ] ], [ [ "Kala namak-1" ] ] ], [ [ [ "Sea salt-9" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "83717baab39233083d34", "term": "Spirit (rover)", "description": "NASA Mars rover", "question": "Would a broadcast from Spirit make the news in 2020?", "answer": true, "facts": [ "In 2010, the Spirit rover sent what is recorded as its final transmission.", "In 2019, another rover made the news with its viral \"final words\"" ], "decomposition": [ "When did the martian rover 'Spirit' send its last transmission?", "Which rover sent another 'final message' in 2019?", "Is #1 before 2020 and did #2 make the news?" ], "evidence": [ [ [ [ "Spirit (rover)-1" ] ], [ [ "Opportunity (rover)-1", "Opportunity (rover)-15" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Spirit (rover)-1" ] ], [ [ "Opportunity (rover)-1" ] ], [ [ "Opportunity (rover)-1", "Spirit (rover)-1" ], "operation" ] ], [ [ [ "Mars Exploration Rover-5" ] ], [ [ "Opportunity (rover)-15" ] ], [ [ "Opportunity (rover)-16" ], "operation" ] ] ] }, { "qid": "d24514ae216c7a55f4f0", "term": "Miami", "description": "City in Florida, United States", "question": "Is Miami a city on the American West Coast?", "answer": false, "facts": [ "Miami is a city in the state of Florida.", "Florida is the southernmost state on the American East Coast." ], "decomposition": [ "What state is Miami located in?", "Which states are part of the American West Coast?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Miami-1" ] ], [ [ "West Coast of the United States-1" ] ], [ "operation" ] ], [ [ [ "Miami-2" ] ], [ [ "Pacific states-1" ] ], [ [ "Pacific states-1" ], "operation" ] ], [ [ [ "Miami-1" ] ], [ [ "West Coast of the United States-1" ] ], [ "operation" ] ] ] }, { "qid": "7f79c8faf724cc8f0e72", "term": "Frankenstein", "description": "1818 novel by Mary Shelley", "question": "Could Robert Wadlow hypothetically see Frankenstein's monster's bald spot from above?", "answer": true, "facts": [ "The monster in Mary Shelley's novel, Frankenstein, was said to be 8 feet tall.", "Robert Wadlow was the world's tallest man.", "Robert Wadlow was 8 feet 11.1 inches tall." ], "decomposition": [ "How tall is Frankenstein?", "How tall is Robert Wadlow?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Frankenstein-8" ] ], [ [ "Robert Wadlow-2" ] ], [ "operation" ] ], [ [ [ "Frankenstein-8" ] ], [ [ "Robert Wadlow-2" ] ], [ "operation" ] ], [ [ [ "Frankenstein-8" ], "no_evidence" ], [ [ "Robert Wadlow-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "427fe3968e32005479b9", "term": "Tibia", "description": "larger of the two bones of the leg below the knee for vertebrates", "question": "Is the tibia necessary to win the Stanley Cup?", "answer": true, "facts": [ "The Stanley Cup is the championship trophy of the National Hockey League", "Ice hockey is a game played by individuals wearing ice skates to move around a frozen playing field", "The tibia is a leg bone", "Legs are required in order to use ice skates" ], "decomposition": [ "Which achievement leads to the award of the Stanley Cup?", "Which sport does #1 involve?", "Which body parts are actively involved in playing #2", "Which part of the body is the tibia found in?", "Is #4 included in #3?" ], "evidence": [ [ [ [ "Stanley Cup-1" ] ], [ [ "Ice hockey-1" ] ], [ [ "Ice hockey-55" ] ], [ [ "Tibia-1" ] ], [ "operation" ] ], [ [ [ "Stanley Cup-1" ] ], [ [ "Ice hockey-1" ] ], [ [ "Ice skate-1" ] ], [ [ "Tibia-1" ] ], [ "operation" ] ], [ [ [ "Stanley Cup-1" ] ], [ [ "Stanley Cup-1" ] ], [ [ "Ice hockey-43" ], "no_evidence" ], [ [ "Tibia-1" ] ], [ "operation" ] ] ] }, { "qid": "27368c21e50b6af694ab", "term": "Ethics", "description": "branch of philosophy that systematizes, defends, and recommends concepts of right and wrong conduct", "question": "Would an ethics professor teach a class on Cezanne?", "answer": false, "facts": [ "Cezanne was an Impressionist painter", "Aesthetics is the branch of philosophy that deals with the arts" ], "decomposition": [ "What was Cezanne known for?", "What branch of philosophy would deal with #1?", "Is #2 the same as ethics? " ], "evidence": [ [ [ [ "Paul Cézanne-1" ] ], [ [ "Paul Cézanne-33" ], "no_evidence" ], [ [ "Ethics-1" ], "operation" ] ], [ [ [ "Paul Cézanne-1" ] ], [ [ "Aesthetics-1" ] ], [ [ "Ethics-1" ], "operation" ] ], [ [ [ "Paul Cézanne-1" ] ], [ [ "Paul Cézanne-33" ] ], [ "operation" ] ] ] }, { "qid": "88b682482647a80a080d", "term": "Rice pudding", "description": "Dish made from rice mixed with water or milk", "question": "If you add water to rice pudding is it horchata?", "answer": false, "facts": [ "Horchata is a drink made from soaking dry rice in water for hours, then pulverizing and straining the mixture and adding spices and sweetener.", "Rice pudding includes ingredients like eggs, and whole grains of cooked rice. " ], "decomposition": [ "What ingredients are in horchata?", "What ingredients are in rice pudding?", "If you add water to #2, is it the same as #1?" ], "evidence": [ [ [ [ "Horchata-1" ] ], [ [ "Rice pudding-1" ] ], [ "operation" ] ], [ [ [ "Horchata-9" ] ], [ [ "Rice pudding-4" ] ], [ "operation" ] ], [ [ [ "Horchata-1" ] ], [ [ "Rice pudding-1" ] ], [ "operation" ] ] ] }, { "qid": "267609f49ccd3c0839c3", "term": "Swiss Guard", "description": "Military of Vatican City", "question": "Can the Swiss Guard fill the Virginia General Assembly chairs?", "answer": false, "facts": [ "The Virginia General Assembly has 140 seats.", "The Swiss Guard is an honour guard of Vatican City that consists of 135 men." ], "decomposition": [ "What is the size of the Swiss Guard?", "What is the seating capacity of the Virginia General Assembly?", "Is #1 equal to or greater than #2?" ], "evidence": [ [ [ [ "Swiss Guards-18" ], "no_evidence" ], [ [ "Virginia General Assembly-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Swiss Guard-31" ] ], [ [ "Virginia General Assembly-1" ] ], [ "operation" ] ], [ [ [ "Swiss Guard-9" ] ], [ [ "Virginia General Assembly-1" ] ], [ "operation" ] ] ] }, { "qid": "88016d1d8b284aa9113c", "term": "Nickel", "description": "Chemical element with atomic number 28", "question": "If your skin was turning the color of a zombie, could it be because of nickel?", "answer": true, "facts": [ "Zombies are often depicted as green in pallor. ", "Nickel in jewelry often turns skin a greenish color." ], "decomposition": [ "What color skin are zombies typically depicted with?", "Does Nickel turn a person's skin #1?" ], "evidence": [ [ [ [ "Zombie-3" ] ], [ [ "Nickel allergy-12" ] ] ], [ [ "no_evidence" ], [ [ "Glass coloring and color marking-3" ], "no_evidence" ] ], [ [ [ "Zombie-3" ], "no_evidence" ], [ [ "Pallor mortis-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "5c969a7fccde48210ec8", "term": "Dolce & Gabbana", "description": "Italian fashion house", "question": "Would a Dolce & Gabbana suit wearer be shunned by their Amish cousins?", "answer": true, "facts": [ "Dolce & Gabbana is an Italian luxury fashion design company.", "The Amish, who value plain clothes, frown upon buttons and have banned velcro and zippers", "The Two Tone Dolce & Gabbana suit has several buttons.", "The Amish cease interactions with sinners by avoiding, or shunning them." ], "decomposition": [ "What type of clothing do the Amish prefer?", "What happens if an Amish person wears clothes going against #1?", "What clothing pieces are Dolce & Gabbana known for?", "If Amish cousins wore #3, would #2 happen to them?" ], "evidence": [ [ [ [ "Amish-27" ] ], [ [ "Excommunication-39" ] ], [ [ "Dolce & Gabbana-1" ] ], [ "operation" ] ], [ [ [ "Plain dress-3" ] ], [ [ "Amish-6" ] ], [ [ "Dolce & Gabbana-1" ] ], [ "operation" ] ], [ [ [ "Amish-1" ] ], [ [ "Amish-6" ] ], [ [ "Dolce & Gabbana-1", "Dolce & Gabbana-32" ] ], [ [ "Amish-6" ], "operation" ] ] ] }, { "qid": "30e2cf44640c4fe81d80", "term": "Illuminati", "description": "A name given to several groups, both real and fictitious", "question": "Is the Illuminati card game still popular?", "answer": false, "facts": [ "The original version of the game was released in 1982.", "A collectible card game version was released in 1995 but only had one set.", "The most recent edition of the base game was published in 2007." ], "decomposition": [ "When was the last Illuminati card game published?", "Was #1 with the last few years?" ], "evidence": [ [ [ [ "Illuminati (game)-1" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Illuminati (game)-2", "Illuminati (game)-4" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Illuminati (game)-13" ] ], [ "operation" ] ] ] }, { "qid": "c2f573c79ceab25e8fcd", "term": "Strawberry", "description": "edible fruit", "question": "Can a strawberry get worms similar to dogs?", "answer": true, "facts": [ "Strawberry can suffer from black root rot and nematodes.", "Dogs can suffer from a variety of worms including roundworms that lay eggs on them.", "Nematodes are parasites that are also called roundworms and ascarids.", "Nematodes are parasites that feed off of strawberry plants." ], "decomposition": [ "What types of worms can strawberries become infected with?", "What types of worms can dogs become infected with?", "Are any of #1 present in #2?" ], "evidence": [ [ [ [ "Ditylenchus dipsaci-6" ], "no_evidence" ], [ [ "Dog-18" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Strawberry-26" ] ], [ [ "Worm-7" ] ], [ [ "Worm-7" ] ] ], [ [ "no_evidence" ], [ [ "Dog-18" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "af531b69039676567cb9", "term": "Menthol", "description": "chemical compound", "question": "Is Menthol associated with Thanksgiving?", "answer": false, "facts": [ "Menthol is the main component of peppermint oil and is responsible for the noticeable cooling sensation. ", "During Thanksgiving, turkey, potatoes, gravy, and pie are common dishes. None of which have menthol in it." ], "decomposition": [ "What are some common dishes served during Thanksgiving?", "Does any of #1 contain menthol?" ], "evidence": [ [ [ [ "Thanksgiving (United States)-1" ] ], [ "operation" ] ], [ [ [ "Thanksgiving-6" ] ], [ [ "Menthol-1" ] ] ], [ [ [ "Thanksgiving dinner-10", "Thanksgiving dinner-16" ] ], [ [ "Menthol-23", "Thanksgiving dinner-10", "Thanksgiving dinner-16" ] ] ] ] }, { "qid": "3d01af5db202bc7d33b9", "term": "Eggplant", "description": "plant species Solanum melongena", "question": "Would someone in Mumbai refer to Solanum melongena as an eggplant?", "answer": false, "facts": [ "Mumbia is a city in India.", "India is a country located in South Asia.", "In South Asia the Solanum melongena plant is referred to as a brinjal." ], "decomposition": [ "In what country Mumbai located?", "In what region is #1 located?", "What is Solanum melongena referred to as in #2?", "Is #3 the word \"eggplant\"?" ], "evidence": [ [ [ [ "Mumbai-1" ] ], [ [ "India-1" ] ], [ [ "Eggplant-1" ] ], [ "operation" ] ], [ [ [ "Mumbai-3" ] ], [ [ "India-1" ] ], [ [ "Eggplant-16" ] ], [ [ "Eggplant-16" ] ] ], [ [ [ "Mumbai-1" ] ], [ [ "India-1" ] ], [ [ "Eggplant-1" ] ], [ "operation" ] ] ] }, { "qid": "f318d0f8f873ce921ac9", "term": "Lie", "description": "intentionally false statement to a person or group made by another person or group who knows it is not wholly the truth", "question": "Is it okay to lie after taking an oath in a court of law?", "answer": false, "facts": [ "In a court of law, lying under oath is considered perjury. ", "Perjury is considered a crime." ], "decomposition": [ "When you lie in court, what is that considered?", "Is #1 legal?" ], "evidence": [ [ [ [ "Perjury-1" ] ], [ [ "Perjury-2" ] ] ], [ [ [ "Perjury-1" ] ], [ "operation" ] ], [ [ [ "Perjury-1" ] ], [ [ "Perjury-2" ] ] ] ] }, { "qid": "e7327e171bd965f6a3cf", "term": "Art", "description": "Creative work to evoke emotional response", "question": "Is art prioritized in the US education system?", "answer": false, "facts": [ "Art classes are often the first to be cut during times of low school funds. ", "Between 1999 and 2009, schools offering visual arts curriculum decreased by 7%. " ], "decomposition": [ "When funding is low in schools, what subjects are typically cut first?", "What types of classes are #1 classified as?", "Is art not classified as #2?" ], "evidence": [ [ [ [ "North Andover High School-14" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Art education in the United States-2" ] ], [ [ "The arts-3" ] ], [ "operation" ] ], [ [ [ "Public school funding in the United States-9" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d07d517db865174dcee4", "term": "Soup", "description": "primarily liquid food", "question": "While on a liquid diet, are there some types of soup you cannot eat?", "answer": true, "facts": [ "Italian wedding soup has large chunks including meatballs and pasta which require chewing.", "Chicken Noodle soup has chunks of chicken and large noodles in it that require chewing." ], "decomposition": [ "Are there any soups that contain substantially solid portions?" ], "evidence": [ [ [ [ "Gumbo-1", "Menudo (soup)-1" ], "no_evidence" ] ], [ [ [ "Soup-1", "Stew-1" ], "operation" ] ], [ [ [ "Chicken soup-1" ] ] ] ] }, { "qid": "136496e0b5e8bed459da", "term": "Twelve-tone technique", "description": "method of musical composition devised by Arnold Schönberg to ensure that all 12 notes of the chromatic scale are equally often, so that the music avoids being in a key", "question": "Would it be impossible to use an Iwato scale for a twelve-tone technique composition?", "answer": true, "facts": [ "The Iwato scale has 5 pitch cases.", "The chromatic scale has 12 pitch cases." ], "decomposition": [ "How many pitch cases are used for the Iwato scale?", "Is #1 not equal to the number of pitch cases needed for the twelve-tone technique?" ], "evidence": [ [ [ [ "Iwato scale-1" ] ], [ [ "Twelve-tone technique-1" ], "operation" ] ], [ [ [ "Iwato scale-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Iwato scale-1" ] ], [ [ "Twelve-tone technique-1" ], "operation" ] ] ] }, { "qid": "ca09aea1e9589ef40c61", "term": "Gulf of Finland", "description": "arm of the Baltic Sea", "question": "Would the Titanic be well preserved at the bottom of the Gulf of Finland?", "answer": true, "facts": [ "The bottom of the gulf is one of the world's largest ship cemeteries. Because of the low salinity and cold waters, and no shipworms, the ships are relatively well preserved.", "RMS Titanic was a British passenger ship." ], "decomposition": [ "Are ships well-preserved in the Gulf of Finland?", "Was the RMS Titanic a ship?", "Is both #1 and #2 \"yes\"" ], "evidence": [ [ [ [ "Gulf of Finland-28" ] ], [ [ "RMS Titanic-1" ] ], [ "operation" ] ], [ [ [ "Gulf of Finland-28" ] ], [ [ "RMS Titanic-1" ] ], [ "operation" ] ], [ [ [ "Gulf of Finland-28" ] ], [ [ "RMS Titanic-1" ] ], [ "operation" ] ] ] }, { "qid": "66b9bb0849ceff60cfde", "term": "Hepatitis", "description": "inflammation of the liver tissue", "question": "Could a dandelion suffer from hepatitis?", "answer": false, "facts": [ "Only creatures that contain a liver can suffer from hepatitis.", "The liver is an organ only found in vertebrates.", "Vertebrates exist in the kingdom Animalia.", "Dandelions are plants in the kingdom Plantae." ], "decomposition": [ "Hepatitis is the inflammation of what?", "In which kingdom is #1 found?", "In what kingdom are dandelions found?", "Is #3 the same as #2?" ], "evidence": [ [ [ [ "Hepatitis-1" ] ], [ [ "Animal-1", "Animal-2", "Liver-1", "Vertebrate-1" ] ], [ [ "Plant-1", "Taraxacum-1" ] ], [ "operation" ] ], [ [ [ "Hepatitis-1" ] ], [ [ "Liver-1", "Vertebrate-1" ] ], [ [ "Taraxacum-1" ] ], [ "operation" ] ], [ [ [ "Hepatitis-4" ] ], [ [ "Liver-98" ] ], [ [ "Chondrilla (plant)-3" ] ], [ "operation" ] ] ] }, { "qid": "e51ea4cf89bc91a77f3c", "term": "Portuguese Colonial War", "description": "1961–1974 armed conflicts in Africa between Portugal and independence movements", "question": "Did any country in Portuguese Colonial War share Switzerlands role in WWII?", "answer": true, "facts": [ "The Portuguese Colonial War was between Portugal and several groups including People's Movement for Liberation of Angola.", "Switzerland remained neutral in World War II and did not get involved.", "Portugal stayed out of world affairs during World War II." ], "decomposition": [ "What was Switzerland's position in World War II?", "Which countries were involved in the Portuguese Colonial War?", "Did any of #2 maintain a #1 position through World War II?" ], "evidence": [ [ [ [ "Switzerland-33" ] ], [ [ "Portuguese Colonial War-2" ] ], [ [ "The Two Faces of War-11" ] ] ], [ [ [ "Switzerland during the World Wars-20" ] ], [ [ "Liberal Wars-1" ] ], [ [ "Neutral powers during World War II-6" ], "operation" ] ], [ [ [ "Switzerland during the World Wars-1" ] ], [ [ "Portuguese Colonial War-1" ] ], [ "operation" ] ] ] }, { "qid": "de62d38623fde2c2285a", "term": "Week", "description": "unit of time", "question": "Would a week be enough time to watch every episode of Ugly Betty?", "answer": true, "facts": [ "There are 168 hours in a week.", "The entire Ugly Betty series is 85 hours long." ], "decomposition": [ "How many hours are in a week?", "How long is the entire Ugly Betty series?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Week-9" ] ], [ [ "Ugly Betty-10" ] ], [ [ "Ugly Betty-10" ], "operation" ] ], [ [ "no_evidence" ], [ [ "Ugly Betty-16" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Week-9" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "60c525b944e991fb9821", "term": "Old English", "description": "Early form of English; Anglo-Saxon", "question": "Would a Pict be confused by Old English?", "answer": true, "facts": [ "Old English was spoken by the Anglo-Saxons, a Germanic tribe that inhabited England.", "The Picts were a Celtic-speaking people that lived in what is now Scotland.", "The Pictish language died out by 1100AD and was replaced by Gaelic.", "Gaelic and Old English are completely different languages from different branches of the Indo-European language family.", "Gaelic vocabulary is very different from Old English and verbs are also conjugated differently." ], "decomposition": [ "What language was spoken by the Picts?", "In what language family is Old English?", "Is #2 not closely related to #1?" ], "evidence": [ [ [ [ "Picts-1" ] ], [ [ "Old English-2" ] ], [ [ "Pictish language-2" ], "operation" ] ], [ [ [ "Pictish language-1", "Picts-1" ] ], [ [ "Old English-1" ] ], [ [ "English language-2", "Picts-36" ], "operation" ] ], [ [ [ "Picts-1" ] ], [ [ "Old English-3" ] ], [ [ "Celtic languages-1", "West Germanic languages-2" ] ] ] ] }, { "qid": "0b3c9c9aea94adef6e3a", "term": "Bitcoin", "description": "decentralized cryptocurrency", "question": "Was the Louisiana Purchase made with bitcoin?", "answer": false, "facts": [ "Bitcoin was launched as a currency in 2009.", "The Louisiana Purchase was in 1803." ], "decomposition": [ "When was Bitcoin launched?", "When did the Louisiana Purchase take place?", "Is #1 prior to #2?" ], "evidence": [ [ [ [ "Bitcoin-2" ] ], [ [ "Louisiana Purchase-1" ] ], [ "operation" ] ], [ [ [ "Bitcoin-2" ] ], [ [ "Louisiana Purchase-1" ] ], [ "operation" ] ], [ [ [ "Bitcoin-4" ] ], [ [ "Louisiana Purchase-1" ] ], [ "operation" ] ] ] }, { "qid": "4a915ea5d025292cd7ec", "term": "Serfdom", "description": "status of peasants under feudalism", "question": "Did Japanese serfdom have higher status than English counterpart?", "answer": true, "facts": [ "Serfs in Medieval England were peasants that were indentured servants to their lords.", "Serfs were often harshly treated and had little legal redress against the actions of their lords.", "Japanese serfs were farmers and fishermen.", "Japanese believed that serfs produced food, which was depended on by all classes, therefore, they worked harder." ], "decomposition": [ "How did English lords treat their serfs?", "What did the Japanese recognize serfs as?", "Is #2 higher in importance than #1?" ], "evidence": [ [ [ [ "Serfdom-2" ] ], [ [ "Serfdom-5" ] ], [ "operation" ] ], [ [ [ "Serfdom-2" ] ], [ [ "Manorialism-17" ] ], [ "operation" ] ], [ [ [ "Serfdom-2" ], "no_evidence" ], [ [ "Shōen-8", "Shōen-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "49f6cc28a8385f71d7ca", "term": "Papaya", "description": "species of plant, use Q12330939 for the papaya (the fruit)", "question": "Is the best tasting part of the papaya in the center?", "answer": false, "facts": [ "The center of a papaya contains a cluster of seeds.", "Papaya seeds, while edible, are often discarded for the soft flesh that surrounds them." ], "decomposition": [ "What is usually located in the center of papayas?", "Is #1 usually consumed by people when eating papayas?" ], "evidence": [ [ [ [ "Avocado-26" ], "no_evidence" ], [ [ "Fruit pit carving-1" ], "no_evidence", "operation" ] ], [ [ [ "Papaya-3" ] ], [ [ "Papaya-30" ] ] ], [ [ [ "Papaya-3" ], "no_evidence" ], [ [ "Papaya-21" ], "operation" ] ] ] }, { "qid": "50d09b10f985b4763225", "term": "Lil Wayne", "description": "American rapper, record executive and actor from Louisiana", "question": "Could Lil Wayne's children ride in a Chevrolet Corvette ZR1 together?", "answer": false, "facts": [ "Lil Wayne has four children.", "A Chevrolet Corvette ZR1 has 2 seats." ], "decomposition": [ "How many people can a Chevrolet Corvette ZR1 seat at a time?", "How many children does Lil Wayne have?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Chevrolet Corvette-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Chevrolet Corvette-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Chevrolet Corvette-1" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "b3b209d7fe1a38ad7844", "term": "Dancing with the Stars", "description": "several international television series based on the format of the British TV series Strictly Come Dancing", "question": "Is double duty an incorrect phrase for host of Dancing With The Stars?", "answer": false, "facts": [ "Double duty refers to having more than one job at the same time.", "The host of Dancing WIth The Stars is Tom Bergeron.", "Tom Bergeron is the host of America's Funniest Home Videos." ], "decomposition": [ "Who is the host of TV series 'Dancing WIth The Stars'?", "Who hosts America's Funniest Home Videos?", "Do #1 and #2 being the same fail to meet the definition of double duty?" ], "evidence": [ [ [ [ "Dancing with the Stars (American TV series)-1" ] ], [ [ "America's Funniest Home Videos-16" ] ], [ "operation" ] ], [ [ [ "Dancing with the Stars-18" ] ], [ [ "America's Funniest Home Videos-23" ] ], [ "operation" ] ], [ [ [ "Dancing with the Stars-18" ] ], [ [ "America's Funniest Home Videos-16" ] ], [ [ "Double Duty-9" ], "operation" ] ] ] }, { "qid": "22b385fc4948e6ff5795", "term": "Lil Wayne", "description": "American rapper, record executive and actor from Louisiana", "question": "Could Lil Wayne legally operate a vehicle on his own at the beginning of his career?", "answer": false, "facts": [ "Lil Wayne's career began in 1995, at the age of 12, when he was signed by Birdman and joined Cash Money Records as the youngest member of the label.", "A driver's license is required to legally operate a vehicle by yourself in the USA.", "The eligible age to first obtain a driver's license varies substantially from state to state, from 14 years, three months, in South Dakota to 17 in New Jersey." ], "decomposition": [ "How old was Lil Wayne when he started his career in music?", "What is the minimum age required to obtain a valid driver's license in the US?", "Is #1 at least equal to #2?" ], "evidence": [ [ [ [ "Lil Wayne-1" ] ], [ [ "Driver's licenses in the United States-9" ] ], [ "operation" ] ], [ [ [ "Lil Wayne-7" ] ], [ [ "Driver's licenses in the United States-9" ] ], [ "operation" ] ], [ [ [ "Lil Wayne-1" ] ], [ [ "Driver's licenses in the United States-12" ] ], [ "operation" ] ] ] }, { "qid": "93eafc413d86df9c0755", "term": "Ocelot", "description": "Small wild cat", "question": "Could an ocelot subsist on a single bee hummingbird per day?", "answer": false, "facts": [ "An ocelot requires 600–800 grams (21–28 oz) of food every day to satisfy its energy requirements.", "Bee hummingbirds are the smallest living birds, with an average weight of around 2-3 grams." ], "decomposition": [ "How much food does an ocelot need to live per day?", "How much does a bee hummingbird weigh?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Ocelot-25" ], "no_evidence" ], [ [ "Bee hummingbird-2" ] ], [ "operation" ] ], [ [ [ "Ocelot-25" ] ], [ [ "Bee hummingbird-2" ] ], [ "operation" ] ], [ [ [ "Ocelot-26" ], "no_evidence" ], [ [ "Bee hummingbird-2" ] ], [ "operation" ] ] ] }, { "qid": "97d147b2e3e29fa85a71", "term": "Ukrainian Greek Catholic Church", "description": "Byzantine Rite Eastern Catholic Church", "question": "Does Ukrainian Greek Catholic Church recognize Alexander Nevsky as a saint?", "answer": false, "facts": [ "Alexander Nevsky was a Prince of Novgorod that fought against German and Swiss Invaders.", "The Russian Orthodox Church named Alexander Nevsky.a saint in 1547.", "The Russian Orthodox Church is a member of the Eastern Orthodox Church and has their own list of saints.", "The Catholic Church and the Eastern Orthodox Church have been in a state of official schism since the East–West Schism of 1054.", "The Ukrainian Greek Catholic Church is a branch of the Catholic Church." ], "decomposition": [ "In which religion is Alexander Nevsky considered a saint?", "What religion is the Ukrainian Greek Catholic Church a part of?", "Is #1 and #2 the same?" ], "evidence": [ [ [ [ "Alexander Nevsky-2" ] ], [ [ "Ukrainian Greek Catholic Church-1" ] ], [ [ "Russian Orthodox Church-73" ], "operation" ] ], [ [ [ "Alexander Nevsky-2" ] ], [ [ "Ukrainian Greek Catholic Church-1" ] ], [ "operation" ] ], [ [ [ "Alexander Nevsky-2" ] ], [ [ "Ukrainian Greek Catholic Church-31" ] ], [ "operation" ] ] ] }, { "qid": "cc233b6426cb4e8dffb6", "term": "James Bond", "description": "Media franchise about a British spy", "question": "Was the original James Bond actor born near the Washington Monument?", "answer": false, "facts": [ "The original James Bond actor was Sean Connery.", "Sean Connery was born in Scotland.", "The Washington Monument is located in Washington, D.C.", "Washington, D.C. and Scotland are nearly 3,500 miles apart." ], "decomposition": [ "Who originally played James Bond?", "Where was #1 born?", "Where is the Washington Monument located?", "What is the distance between #2 and #3?", "Is #4 a short enough of a distance to be considered \"close\"?" ], "evidence": [ [ [ [ "Portrayal of James Bond in film-3" ] ], [ [ "Barry Nelson-2" ] ], [ [ "Washington Monument-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Portrayal of James Bond in film-8" ] ], [ [ "Sean Connery-4" ] ], [ [ "Washington Monument-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "James Bond-28" ] ], [ [ "Sean Connery-4" ] ], [ [ "Washington Monument-1" ] ], [ [ "Atlantic Ocean-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "9cfdec34fa9bd1ed9d3f", "term": "J. P. Morgan", "description": "American financier, banker, philanthropist and art collector", "question": "Did J. P. Morgan have healthy lungs?", "answer": false, "facts": [ "J. P. Morgan smoked dozens of cigars per day.", "Smoking tobacco damages the lungs and increases the risk of lung cancer significantly." ], "decomposition": [ "What are the causes of unhealthy lungs?", "Did J. P. Morgan engage in any part of #1?" ], "evidence": [ [ [ [ "Lung-3" ] ], [ [ "J. P. Morgan-29" ] ] ], [ [ [ "Smoking-5" ] ], [ "operation" ] ], [ [ [ "Rheumatoid lung disease-6" ] ], [ [ "J. P. Morgan-4" ], "operation" ] ] ] }, { "qid": "935002dcfa70239e715f", "term": "Firefighter", "description": "rescuer trained to extinguish hazardous fires", "question": "Would Firefighters be included in a September 11th memorial?", "answer": true, "facts": [ "September 11th is remembered as a day of mourning for the lives lost during a terrorist attack in NYC.", "Firefighters were among the first responders to the crisis, and many died. " ], "decomposition": [ "Who gets remembered on September 11th?", "Were firefighters among #1?" ], "evidence": [ [ [ [ "9/11 Tribute Museum-1" ] ], [ [ "9/11 Tribute Museum-7" ], "operation" ] ], [ [ [ "September 11 attacks-2" ] ], [ "operation" ] ], [ [ [ "September 11 attacks-118", "September 11 attacks-63" ] ], [ "operation" ] ] ] }, { "qid": "f07a6acf27e25dedbb8d", "term": "Andrew Johnson", "description": "17th president of the United States", "question": "Does Andrew Johnson's presidential number exceed Elagabalus's Emperor number?", "answer": false, "facts": [ "Andrew Johnson was the 17th president of the United States.", "Elagabalus was the 25th Roman Emperor." ], "decomposition": [ "What number president was Andrew Johnson?", "What number emperor was Elagabalus?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Andrew Johnson-1" ] ], [ [ "Elagabalus-1" ] ], [ "operation" ] ], [ [ [ "Andrew Johnson-1" ] ], [ [ "Elagabalus-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Andrew Johnson-1" ] ], [ [ "Elagabalus-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "58e7eb38d889d4252123", "term": "Chinese calendar", "description": "Lunisolar calendar from China", "question": "Are any animals in Chinese calendar Chordata?", "answer": true, "facts": [ "The chinese zodiac based on the Chinese calendar has a number of animals including dogs and pigs.", "Chordata is a scientific classification of an animals phylum.", "The phylum of pigs is Chordata." ], "decomposition": [ "What animals are on the Chinese calendar?", "Which animals in #1 have a notochord and dorsal neural tube?", "Which animals in #2 have pharyngeal slits and an endostyle at some stage of development?", "Which animals in #3 have a post-anal tail?", "Is there at least one animal listed in #4?" ], "evidence": [ [ [ [ "Chinese astrology-10", "Chordate-1" ] ], [ [ "Tiger-27" ], "no_evidence" ], [ [ "Tiger-27" ], "no_evidence" ], [ [ "Tiger-27" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Chinese zodiac-5" ] ], [ [ "Chordate-1" ], "no_evidence" ], [ [ "Chordate-2" ], "no_evidence" ], [ [ "Chordate-6" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Chinese zodiac-5" ] ], [ [ "Chinese zodiac-5", "Notochord-1" ] ], [ [ "Chinese zodiac-5", "Pharyngeal slit-1" ] ], [ [ "Chinese zodiac-5", "Chordate-1" ] ], [ "operation" ] ] ] }, { "qid": "a3416e6b8a34d389eb0c", "term": "Yeti", "description": "Folkloric ape-like creature from Asia", "question": "Would a hypothetical Yeti be towered over by Andre the Giant?", "answer": true, "facts": [ "The mythical Yeti is said to be between 200 and 400 pounds.", "The mythical Yeti is said to be around 6 feet tall.", "Andre the Giant was an imposing 7'4\" and 529 pounds." ], "decomposition": [ "How tall was Andre the Giant?", "How tall are Yeti thought to be?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "André the Giant-2" ] ], [ [ "Chuchuna-3" ] ], [ "operation" ] ], [ [ [ "André the Giant-2" ] ], [ [ "Expedition Everest-12", "Yeti-8" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Andre the Giant Has a Posse-2" ] ], [ [ "Yeti-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "fdeec181aeee06303113", "term": "Karachi", "description": "Megacity in Sindh, Pakistan", "question": "Are you likely to find a crucifix in Karachi?", "answer": false, "facts": [ "The crucifix is a symbol of Christianity", "The vast majority of Pakistan's population is Muslim" ], "decomposition": [ "What religion does a crucifix symbolize?", "What is the main religion observed in Karachi, Pakistan?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Crucifix-2" ] ], [ [ "Karachi-66" ] ], [ "operation" ] ], [ [ [ "Crucifix-2" ] ], [ [ "Karachi-66", "Muslims-1" ] ], [ "operation" ] ], [ [ [ "Christian cross-1" ] ], [ [ "Religion in Karachi-6" ] ], [ "operation" ] ] ] }, { "qid": "ba691c1bb1c760b1edf8", "term": "The Little Prince", "description": "Novella by Antoine de Saint-Exupéry", "question": "Was The Little Prince's titular character allergic to flowers?", "answer": false, "facts": [ "The Little Prince tends to and cares for a rose.", "The Little Prince falls in love with a rose. " ], "decomposition": [ "Who is the titular character of The Little Prince?", "Does #1 avoid interacting with flowers?" ], "evidence": [ [ [ [ "The Little Prince-1", "The Little Prince-5" ] ], [ [ "The Little Prince-11" ], "operation" ] ], [ [ [ "The Little Prince-1" ] ], [ [ "Rose-1", "The Little Prince-11" ] ] ], [ [ [ "The Little Prince-5" ] ], [ [ "The Little Prince-16", "The Little Prince-20" ], "operation" ] ] ] }, { "qid": "f3d045e08844bf0d4e53", "term": "Creative Commons license", "description": "license allowing free use of a work", "question": "Was a person sold a Creative Commons License for Boticelli's The Birth of Venus ripped off?", "answer": true, "facts": [ "A Creative Commons license allows for the free distribution of an otherwise copyrighted piece of work.", "Works that are deemed in the public domain do not require a copyright or permission to use.", "Any work created before 1923 is in the public domain.", "Sandro Boticelli's The Birth of Venus painting was from 1486." ], "decomposition": [ "What is the purpose of a Creative Commons license?", "Do works in the public domain need #1?", "Works created before what year are presently in the public domain?", "Was Boticelli's The Birth of Venus created before #3?", "Considering #2 and #4, is #1 unnecessary for Boticelli's The Birth of Venus?" ], "evidence": [ [ [ [ "Creative Commons license-1" ] ], [ [ "Public domain-1" ] ], [ [ "Public domain-10" ], "no_evidence" ], [ [ "The Birth of Venus-1" ], "operation" ], [ "operation" ] ], [ [ [ "Creative Commons license-1" ] ], [ [ "Public domain-1" ] ], [ [ "Public domain-2" ] ], [ [ "The Birth of Venus-17" ] ], [ "operation" ] ], [ [ [ "Creative Commons license-1" ] ], [ [ "Public domain-1" ] ], [ [ "Copyright term-2", "Public domain-6" ], "no_evidence" ], [ [ "The Birth of Venus-1" ] ], [ "operation" ] ] ] }, { "qid": "b5b110367bb88821d9d0", "term": "University of Pittsburgh", "description": "American state-related research university located in Pittsburgh, Pennsylvania", "question": "Did Millard Fillmore help to establish the University of Pittsburgh?", "answer": false, "facts": [ "The University of Pittsburgh was established in 1787.", "Millard Fillmore was born in 1800." ], "decomposition": [ "When was the University of Pittsburgh established?", "When was Millard Fillmore born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "University of Pittsburgh-1" ] ], [ [ "Millard Fillmore-1" ] ], [ "operation" ] ], [ [ [ "University of Pittsburgh-5" ] ], [ [ "Millard Fillmore-5" ] ], [ "operation" ] ], [ [ [ "University of Pittsburgh-1" ] ], [ [ "Millard Fillmore-1" ] ], [ "operation" ] ] ] }, { "qid": "80ca095e38ab73b908ff", "term": "Northern fur seal", "description": "The largest fur seal in the northern hemisphere", "question": "Does Northern fur seal make good pet for six year old?", "answer": false, "facts": [ "An average six year old weighs 45 pounds.", "An adult Northern fur seal can weigh up to 120 pounds.", "Northern fur seals have sharp canine teeth for tearing.", "Northern fur seals live near rivers to feed off of fish populations." ], "decomposition": [ "What environment do Northern fur seals thrive in?", "Do six-years olds have safe access to #1?" ], "evidence": [ [ [ [ "Northern fur seal-9" ] ], [ "no_evidence" ] ], [ [ [ "Northern fur seal-9" ] ], [ "operation" ] ], [ [ [ "Northern fur seal-9" ] ], [ "no_evidence" ] ] ] }, { "qid": "e0a9f1ef5fae44427b26", "term": "Pear", "description": "genus of plants", "question": "Would a pear sink in water?", "answer": false, "facts": [ "The density of a raw pear is about 0.59 g/cm^3.", "The density of water is about 1 g/cm^3.", "Objects only sink if they are denser than the surrounding fluid." ], "decomposition": [ "What is the density of a pear?", "What is the density of water?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Density-12", "Pear-8" ] ], [ [ "Density-12", "Density-5" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Water-7" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Properties of water-14" ] ], [ "operation" ] ] ] }, { "qid": "7b84d2bc643ddc2085f0", "term": "Noah's Ark", "description": "the vessel in the Genesis flood narrative", "question": "WIll Noah's Ark hypothetically sail through flooded Lincoln Tunnel?", "answer": false, "facts": [ "Scholars have determined Noah's Ark to be 75 feet wide.", "Each lane of the Lincoln Tunnel is 21 feet wide." ], "decomposition": [ "What is the width of the Lincoln tunnel?", "What is the width of the Noah's ark?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Lincoln Tunnel-5" ] ], [ [ "Cubit-1", "Noah's Ark-3" ] ], [ "operation" ] ], [ [ [ "Lincoln Tunnel-5" ] ], [ [ "Cubit-13", "Noah's Ark-3" ] ], [ "operation" ] ], [ [ [ "Lincoln Tunnel-5" ] ], [ [ "Noah's Ark-3" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "e0e842a78f59698d0cfd", "term": "Haiku", "description": "very short form of Japanese poetry", "question": "Can you write a whole Haiku in a single tweet?", "answer": true, "facts": [ "A Haiku is a Japanese poetry in three phrases.", "The average Haiku is composed of 60 to 70 characters.", "A tweet is a short message sent on Twitter.", "The character limit of a single tweet on Twitter is 140." ], "decomposition": [ "How many characters can be expected to be in an average haiku?", "What is the current character limit of a single tweet?", "Is #1 reasonably less than #2?" ], "evidence": [ [ [ [ "Haiku-2" ], "no_evidence" ], [ [ "Twitter-1" ] ], [ "operation" ] ], [ [ [ "Haiku-2" ] ], [ [ "Twitter-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Haiku-2" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e8b311139f387c983996", "term": "Saltwater crocodile", "description": "species of reptile", "question": "Would you take a photo of a Saltwater crocodile in Memphis?", "answer": false, "facts": [ "The saltwater crocodile is native to saltwater habitats and brackish wetlands from India's east coast across Southeast Asia and the Sundaic region to northern Australia and Micronesia.", "Memphis is a city in the United States." ], "decomposition": [ "Where can saltwater crocodiles be found?", "Is Memphis located in any of #1?" ], "evidence": [ [ [ [ "Saltwater crocodile-1" ] ], [ "operation" ] ], [ [ [ "Saltwater crocodile-1" ] ], [ [ "Memphis, Tennessee-1" ] ] ], [ [ [ "Saltwater crocodile-20" ] ], [ "operation" ] ] ] }, { "qid": "4e598d222fe1001cf4f8", "term": "Kangaroo", "description": "сommon name of family of marsupials", "question": "Could Scooby Doo fit in a kangaroo pouch?", "answer": false, "facts": [ "Scooby Doo is a fictional cartoon Great Dane.", "Great Danes can be 30-34 inches in height.", "Kangaroo babies can fit in their mother's pouch until they're 10 weeks of age.", "A 10 week old kangaroo is much smaller than a Great Dane." ], "decomposition": [ "What type of creature was Scooby-Doo?", "How large are #1?", "What resides in a kangaroo pouch?", "How large are #3?", "Is #2 approximately equal to #4?" ], "evidence": [ [ [ [ "Scooby-Doo (character)-1" ] ], [ [ "Great Dane-3" ] ], [ [ "Pouch (marsupial)-2" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Scooby-Doo-1" ] ], [ [ "Great Dane-10" ] ], [ [ "Kangaroo-34" ] ], [ [ "Red kangaroo-13" ] ], [ "operation" ] ], [ [ [ "Scooby-Doo-1" ] ], [ [ "Great Dane-10" ] ], [ [ "Marsupial-26" ] ], [ [ "Red kangaroo-13" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "db70d15b07a9f288ffdc", "term": "Manta ray", "description": "genus of fishes", "question": "Do manta rays live in water above the safe temperature for cold food storage?", "answer": true, "facts": [ "For cold foods, the food safe temperature is 40 degrees Fahrenheit and below.", "Manta rays prefer water temperatures above 68 °F (20 °C)." ], "decomposition": [ "What temperature should cold food be stored at?", "What kind of water do manta rays live in?", "What is the normal temperature of #2?", "Is #3 higher than #1?" ], "evidence": [ [ [ [ "Food storage-8" ] ], [ [ "Manta ray-2" ] ], [ [ "Manta ray-21" ] ], [ "operation" ] ], [ [ [ "Refrigeration-23" ], "no_evidence" ], [ [ "Manta ray-2" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Food storage-4", "Food storage-8" ] ], [ [ "Manta ray-21" ] ], [ [ "Manta ray-21" ] ], [ "operation" ] ] ] }, { "qid": "f240c88676b6ac6896ad", "term": "Kayak", "description": "small boat propelled with a double-bladed paddle", "question": "Is the kayak a traditional boat in New Zealand?", "answer": false, "facts": [ "Kayaks were developed by native peoples to hunt in northern waters of the Arctic Ocean, North Atlantic, Bering Sea and North Pacific. ", "New Zealand is in the Southern Hemisphere.", "The native Maori people of New Zealand arrived there in canoes." ], "decomposition": [ "What cultures invented the kayak?", "What cultures are native to New Zealand?", "Is there overlap between #1 and #2?" ], "evidence": [ [ [ [ "Kayak-5" ] ], [ [ "New Zealand-7" ] ], [ "operation" ] ], [ [ [ "Kayak-5" ] ], [ [ "Culture of New Zealand-1" ] ], [ "operation" ] ], [ [ [ "Kayak-4" ] ], [ [ "Māori people-1" ] ], [ "operation" ] ] ] }, { "qid": "1f550df826ae448ff082", "term": "Subway (restaurant)", "description": "American fast food chain", "question": "Has the Subway restaurant franchise had any connections with child abusers?", "answer": true, "facts": [ "Subway hired Jared Fogle as a spokesman for their sandwich shops.", "Jared Fogle was convicted for having sex with minors and for possessing child pornography. " ], "decomposition": [ "Was Jared Fogle a spokesman for Subway?", "Is Jared Fogle a sexual abuser of children?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Jared Fogle-2" ] ], [ [ "Jared Fogle-40" ] ], [ "operation" ] ], [ [ [ "Jared Fogle-2" ] ], [ [ "Jared Fogle-15" ] ], [ "operation" ] ], [ [ [ "Jared Fogle-1" ] ], [ [ "Jared Fogle-3" ] ], [ "operation" ] ] ] }, { "qid": "00f951d01196c2e77fe6", "term": "Presidency of Richard Nixon", "description": "American cabinet", "question": "Would the high school class of 2010 have lived through the Presidency of Richard Nixon?", "answer": false, "facts": [ "People in the high school class of 2010 were born between 1991 and 1993.", "Richard Nixon was President of the United States until 1974." ], "decomposition": [ "When was Richard Nixon president of the US until?", "What year range would the high school class of 2010 be born in?", "Is #1 in #2?" ], "evidence": [ [ [ [ "Richard Nixon-1" ] ], [ [ "Secondary education in the United States-36" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Richard Nixon-1" ] ], [ [ "Secondary education-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Richard Nixon-46" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "4810297f474b0e97ed78", "term": "Gallon", "description": "general topic for different units of volume called gallon", "question": "Could ten gallons of seawater crush a six year old?", "answer": true, "facts": [ "The average weight of a six year old is 45 pounds.", "One gallon of seawater weighs slightly over 8 pounds." ], "decomposition": [ "What is the average weight of a six year old?", "What is the weight of a gallon of seawater?", "Is ten times #2 more than #1?" ], "evidence": [ [ [ [ "Weigh station-3" ], "no_evidence" ], [ [ "Seawater-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Child-7" ], "no_evidence" ], [ [ "Seawater-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Early childhood-4" ], "no_evidence" ], [ [ "Seawater-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "43e4aea70f7e53d1d807", "term": "Deacon", "description": "ministry in the Christian Church", "question": "Would a Deacon be likely to be a fan of the podcast 'God Awful Movies'?", "answer": false, "facts": [ "God Awful Movies is a podcast in which people review and mock religious films. ", "The hosts of God Awful Movies take a disrespectful approach to their film critique." ], "decomposition": [ "What is the main topic of God Awful Movies?", "What is God Awful Movies position on #1?", "What are deacon's positions on #1?", "Are #2 and #3 the same or similar?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Deacon-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Deacon-1" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "a98c29881e1929a25eb4", "term": "Giant squid", "description": "Deep-ocean dwelling squid in the family Architeuthidae", "question": "Could a giant squid fit aboard the deck of the titanic?", "answer": true, "facts": [ "Decks on the Titanic were as long as 500ft.", "Giant Squid grow to be around 59ft in length." ], "decomposition": [ "What is the length of a giant squid?", "What was the length of a deck on the Titanic?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Giant squid-1" ] ], [ [ "RMS Titanic-11" ] ], [ "operation" ] ], [ [ [ "Giant squid-1" ] ], [ [ "First-class facilities of the RMS Titanic-12" ] ], [ "operation" ] ], [ [ [ "Giant squid-1" ] ], [ [ "RMS Titanic-11" ] ], [ "operation" ] ] ] }, { "qid": "901156d5fcaf260eb4a7", "term": "Anchovy", "description": "Family of fishes", "question": "Do more anchovy live in colder temperature waters than warmer?", "answer": false, "facts": [ "Anchovy are a type of small fish.", "Anchovy are concentrated in the temperate waters of the Atlantic, Indian, and Pacific Oceans.", "Anchovy are rarely found in colder waters." ], "decomposition": [ "Which oceans do Anchovy live in?", "Which seas do Anchovy live in?", "Are #1 and #2 cold waters?" ], "evidence": [ [ [ [ "Anchovy-5" ] ], [ [ "Anchovy-6" ] ], [ [ "Anchovy-6" ], "no_evidence" ] ], [ [ [ "Anchovy-2" ] ], [ [ "Anchovy-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Anchovy-5" ] ], [ [ "Anchovy-5" ] ], [ "operation" ] ] ] }, { "qid": "998132ad8f805c90f31e", "term": "Groundhog Day", "description": "Traditional method of weather prediction", "question": "Is Antarctica a good location for Groundhog Day?", "answer": false, "facts": [ "Groundhog Day relies on a groundhog seeing their shadow.", "Antarctica has an irregular sun pattern and some days have no sun rise or 24 hour sunlight.", "Antarctica has temperatures can range from -10C to -60C.", "Groundhogs live in forests or woodlands with plenty of sunlight." ], "decomposition": [ "What does a groundhog have to see in order for a prediction to be made on Groundhog Day?", "Which particular light source is responsible for casting #1?", "How is the #2 pattern like on Antarctica?", "Is #3 is irregular?" ], "evidence": [ [ [ [ "Groundhog Day-1" ] ], [ [ "Shadow-1" ] ], [ [ "Antarctica-44" ] ], [ [ "Antarctica-44" ] ] ], [ [ [ "Groundhog Day-1" ] ], [ [ "Shadow-16" ] ], [ [ "Antarctica-44" ] ], [ [ "Midnight sun-3" ], "operation" ] ], [ [ [ "Groundhog Day-1" ] ], [ [ "Sunlight-1" ] ], [ [ "Antarctica-44", "Antarctica-46" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "b7c2bfdfc96be70f3f60", "term": "The Powerpuff Girls", "description": "American animated television series", "question": "Can the Powerpuff Girls form a complete tag team wrestling match?", "answer": false, "facts": [ "A tag team wrestling match is contested between at least two teams of at least two wrestlers each", "There are only three people in the Powerpuff Girls" ], "decomposition": [ "What is the minimum number of people that can participate in a tag team match in professional wrestling?", "The Powerpuff girls are how many in number?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Tag team-7" ] ], [ [ "The Powerpuff Girls-1" ] ], [ "operation" ] ], [ [ [ "Professional wrestling match types-4" ] ], [ [ "The Powerpuff Girls-1" ] ], [ "operation" ] ], [ [ [ "Tag team-7" ] ], [ [ "The Powerpuff Girls-1" ] ], [ "operation" ] ] ] }, { "qid": "d9987847af1ab751c75a", "term": "White blood cell", "description": "type of cells of the immunological system", "question": "Will someone die without white blood cells?", "answer": true, "facts": [ "White blood cells protect people against disease.", "Disease kills people." ], "decomposition": [ "What function do white blood cells serve in the body?", "Can a human live without #1?" ], "evidence": [ [ [ [ "White blood cell-1" ] ], [ [ "White blood cell-15" ], "no_evidence", "operation" ] ], [ [ [ "Blood cell-7" ] ], [ [ "Blood cell-9" ] ] ], [ [ [ "Innate immune system-11" ] ], [ "operation" ] ] ] }, { "qid": "a60e5f73700b47a5f34a", "term": "Boat", "description": "vessel for transport by water", "question": "Does rock star Keith Richards play a captain of a boat in a movie?", "answer": true, "facts": [ "Keith Richards has a cameo appearance in two of the Pirates of the Caribbean movies.", "He plays Captain Teague, the elderly father of famous pirate Captain Jack Sparrow.", "In At World's End, he is the member of the council of Pirate Lords who is responsible for keeping the Pirate Code, and there is a brief shot of him and his crew aboard their ship during the sequence where the pirates are raising their banners in preparation to fight." ], "decomposition": [ "What role did Keith Richards play in the Pirates of the Caribbean movies?", "Can #1 be considered a captain of a boat?" ], "evidence": [ [ [ [ "Keith Richards-47" ] ], [ [ "Captain-1" ], "operation" ] ], [ [ [ "Keith Richards-47" ] ], [ [ "Captain-1" ] ] ], [ [ [ "Keith Richards-47" ] ], [ "operation" ] ] ] }, { "qid": "a7c566e2adbb58be2da8", "term": "3D printing", "description": "Additive process used to make a three-dimensional object", "question": "Is 3D printing able to make adenovirus?", "answer": false, "facts": [ "3D printers come with limitations in terms of precision. The standard nozzle output is about 0.4 mm. Therefore, if you are planning to print something that is below 0.4mm, you will not get a useful item.", "Adenoviruses are medium-sized (90–100 nm), nonenveloped (without an outer lipid bilayer) viruses with an icosahedral nucleocapsid containing a double stranded DNA genome." ], "decomposition": [ "What is the size of a 3D printer's standard nozzle output?", "What size range do adenoviruses fall in?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Adenoviridae-10" ] ], [ "operation" ] ], [ [ [ "Fused filament fabrication-7" ] ], [ [ "Adenoviridae-1" ] ], [ [ "Nanometre-1" ], "operation" ] ], [ [ [ "3D printing-44" ], "no_evidence" ], [ [ "Adenoviridae-1", "Nanometre-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3ac14678dd395d41639c", "term": "Eleventh grade", "description": "educational year", "question": "Would an eleventh-grader be eligible for Medicare?", "answer": false, "facts": [ "Students in the 11th grade are typically between 16-17 years old.", "The age requirement for most Medicare recipients is 65 or older." ], "decomposition": [ "What ages are people in eleventh grade?", "What ages are most medicare recipients?", "Is there an overlap between #1 and #2?" ], "evidence": [ [ [ [ "Eleventh grade-1" ] ], [ [ "Medicare (United States)-1" ] ], [ "operation" ] ], [ [ [ "Eleventh grade-1" ], "operation" ], [ [ "Medicare Advantage-26" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Eleventh grade-1" ] ], [ [ "Medicare (United States)-1" ] ], [ "operation" ] ] ] }, { "qid": "b3443bf3c312c9d144e4", "term": "Panthéon", "description": "mausoleum in Paris", "question": "Is there a full Neptunian orbit between the first two burials of women in the Panthéon?", "answer": false, "facts": [ "In 1907, Sophie Berthelot is the first woman to be interred in the Panthéon ", "In 1995, Marie Curie is the second woman to be interred there", "Neptune takes 165 years to go around the sun" ], "decomposition": [ "In what year was the first woman buried in the Panthéon?", "In what year was the second woman buried in the Panthéon?", "How many years are between #1 and #2?", "How many years does it take for Neptune to orbit the Sun?", "Is #4 less than or equal to #3?" ], "evidence": [ [ [ [ "Panthéon-34" ] ], [ [ "Panthéon-34" ] ], [ "operation" ], [ [ "Neptune-1" ] ], [ "operation" ] ], [ [ [ "Panthéon-34" ] ], [ [ "Panthéon-34" ] ], [ "operation" ], [ [ "Neptune-1" ] ], [ "operation" ] ], [ [ [ "Panthéon-34" ] ], [ [ "Panthéon-34" ] ], [ "operation" ], [ [ "Neptune-1" ] ], [ "operation" ] ] ] }, { "qid": "3bba4ea27500361cb141", "term": "Boat", "description": "vessel for transport by water", "question": "Will Oasis cruise boat traverse the Lincoln Tunnel?", "answer": false, "facts": [ "The Lincoln Tunnel has a maximum height clearance of 13 feet.", "The height of the Oasis cruise boat is 236 feet." ], "decomposition": [ "What is the maximum height clearance of the Lincoln Tunnel?", "How tall is the Oasis cruise ship?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Lincoln Tunnel-5" ] ], [ [ "Oasis-class cruise ship-2" ] ], [ "operation" ] ], [ [ [ "Lincoln Tunnel-5" ] ], [ [ "Oasis of the Seas-6" ] ], [ "operation" ] ], [ [ [ "Lincoln Tunnel-5" ] ], [ [ "Oasis-class cruise ship-2", "Oasis-class cruise ship-3" ] ], [ "operation" ] ] ] }, { "qid": "71d5af08c0cfb9565a54", "term": "Motor vehicle", "description": "self-propelled wheeled vehicle", "question": "Could Oscar Wilde have operated a motor vehicle?", "answer": true, "facts": [ "Motor vehicles were in use by the 1890s", "Oscar Wilde lived until 1900" ], "decomposition": [ "When were cars first used?", "When did Oscar Wilde pass away?", "Did #2 happen after #1?" ], "evidence": [ [ [ [ "Car-2" ] ], [ [ "Oscar Wilde-1" ] ], [ "operation" ] ], [ [ [ "Car-14" ] ], [ [ "Oscar Wilde-1" ] ], [ "operation" ] ], [ [ [ "Car-2" ] ], [ [ "Oscar Wilde-1" ] ], [ "operation" ] ] ] }, { "qid": "806a13f37055bd21fddc", "term": "Stoning", "description": "execution method", "question": "Would George Fox support stoning?", "answer": false, "facts": [ "George Fox was the founder of the Religious Society of Friends, commonly known as the Quakers or Friends.", "The Quakers advocate for peace and nonviolence.", "Stoning is a particularly violent and brutal method of capital punishment." ], "decomposition": [ "What was George Fox the founder of?", "What did #1 advocate for?", "Is stoning an example of #2?" ], "evidence": [ [ [ [ "George Fox-1" ] ], [ [ "Quakers-1" ] ], [ "no_evidence" ] ], [ [ [ "George Fox-1" ] ], [ [ "George Fox-23" ] ], [ "operation" ] ], [ [ [ "George Fox-1" ] ], [ [ "Quakers-63" ] ], [ [ "Stoning-1" ], "operation" ] ] ] }, { "qid": "9b77992543134de72b4e", "term": "Tokyo Tower", "description": "observation tower", "question": "Will Tokyo Tower be repainted only once during President Trump's first term?", "answer": true, "facts": [ "Tokyo Tower is repainted every five years ", "The last repainting began in 2018", "Trump's first presidential term is from 2017 to 2021" ], "decomposition": [ "How long (in years) is President Trump's first term?", "How often (interval in years) is the Tokyo Tower repainted?", "Is #2 divided by #1 less than two?" ], "evidence": [ [ [ [ "Term of office-11" ], "no_evidence" ], [ [ "Tokyo Tower-10" ] ], [ "operation" ] ], [ [ [ "President of the United States-4" ] ], [ [ "Tokyo Tower-10" ] ], [ "operation" ] ], [ [ [ "President of the United States-5" ] ], [ [ "Tokyo Tower-3" ] ], [ "operation" ] ] ] }, { "qid": "a1a92c378396180e190c", "term": "Supreme Court of the United States", "description": "Highest court in the United States", "question": "Has a neanderthal ever served on the Supreme Court of the United States?", "answer": false, "facts": [ "The Supreme Court was established in 1789.", "Neanderthals are primitive humans that lived 40,000 years ago." ], "decomposition": [ "How long ago did Neanderthals live?", "How long ago was the Supreme Court of the United States formed?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Neanderthal-1" ] ], [ [ "Supreme Court of the United States-2" ] ], [ "operation" ] ], [ [ [ "Neanderthal-1" ] ], [ [ "Supreme Court of the United States-2" ] ], [ "operation" ] ], [ [ [ "Neanderthal-1" ] ], [ [ "Supreme Court of the United States-8" ] ], [ "operation" ] ] ] }, { "qid": "03c467f12c3fcc1d91fe", "term": "Clouded leopard", "description": "species of mammal found from the Himalayan foothills through mainland Southeast Asia into China", "question": "Can Clouded leopards chase down many Pronghorn antelopes?", "answer": false, "facts": [ "The top speed of a Clouded leopard is 40 MPH.", "The top speed of a Pronghorn antelope is 61 MPH." ], "decomposition": [ "What is the top speed for a Clouded leopard ?", "What is the top speed for a Pronghorn antelope ?", "Is #1 greater then or equal to #2?" ], "evidence": [ [ [ [ "Clouded leopard-31" ], "no_evidence" ], [ [ "Pronghorn-12" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Pronghorn-12" ] ], [ "operation" ] ], [ [ [ "Leopard-4" ], "no_evidence" ], [ [ "Pronghorn-12" ] ], [ "operation" ] ] ] }, { "qid": "d4546cb00bd8a7f0e041", "term": "Wednesday", "description": "Day of the week", "question": "Does the anatomy of a camel lend itself to jokes on Wednesdays?", "answer": true, "facts": [ "Wednesday is often referred to as 'hump day' as a joke.", "Camels are known for having a significant hump. " ], "decomposition": [ "As a joke, what is Wednesday otherwise known as?", "What are camels known for having?", "Is there overlap between #1 and #2?" ], "evidence": [ [ [ [ "Wednesday-25" ] ], [ [ "Camel-1" ] ], [ "operation" ] ], [ [ [ "Wednesday-25" ] ], [ [ "Camel-1" ] ], [ "operation" ] ], [ [ [ "Wednesday-25" ] ], [ [ "Camel-1" ] ], [ "operation" ] ] ] }, { "qid": "ac61d110f57cd7855686", "term": "Goofy", "description": "Disney cartoon character", "question": "Can voice actors for Goofy and Bugs Bunny each get one stripe from American flag?", "answer": true, "facts": [ "The American flag has 13 stripes on it.", "Since the role originated in 1932, six people have voiced the character of Goofy.", "Since 1940, seven people have voiced the character of Bugs Bunny." ], "decomposition": [ "How many stripes does the American flag have?", "How many people have been the voice of Goofy?", "How many people have been the voice of Bugs Bunny?", "What is #2 plus #3?", "Is #1 equal to or greater than #4?" ], "evidence": [ [ [ [ "Flag of the United States-1" ] ], [ [ "Bill Farmer-1", "Hal Smith (actor)-16", "Pinto Colvig-1", "Stuart Buchanan-1", "Tony Pope-2" ], "no_evidence" ], [ [ "Bugs Bunny-26", "Bugs Bunny-41" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Flag of the United States-1" ] ], [ [ "Goofy-43" ] ], [ [ "Billy West-1", "Eric Bauza-1", "Greg Burson-2", "Jeff Bergman-1", "Joe Alaskey-2", "Mel Blanc-1", "Sam Vincent (voice actor)-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Flag of the United States-1" ] ], [ [ "Goofy-43" ] ], [ [ "Bugs Bunny-41" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "52f9fdeab2e51f01f3dd", "term": "Clark Gable", "description": "American actor", "question": "Did Clark Gable appear in any movies scored by John Williams?", "answer": false, "facts": [ "Clark Gable died in 1960.", "John Williams scored his first movie in 1961." ], "decomposition": [ "When did Clark Gable die?", "When did John Williams begin creating movie scores?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Clark Gable-1" ] ], [ [ "John Williams-13" ] ], [ "operation" ] ], [ [ [ "Clark Gable-1" ] ], [ [ "John Williams-11" ] ], [ "operation" ] ], [ [ [ "Clark Gable-1" ] ], [ [ "John Williams-14" ] ], [ "operation" ] ] ] }, { "qid": "9dff1d5fe4c07cdb0ccb", "term": "Linus Torvalds", "description": "Creator and lead developer of Linux kernel", "question": "Is Linus Torvalds' wife unable to physically defend herself?", "answer": false, "facts": [ "Linus Torvalds is married to Tove Torvalds.", "Tove Torvalds is a six-time Finnish national karate champion.", "Karate is now predominantly a striking art using punching, kicking, knee strikes, elbow strikes and open-hand techniques such as knife-hands, spear-hands and palm-heel strikes." ], "decomposition": [ "Who is Linus Torvalds' wife?", "What is #1 well known for?", "Can #2 not be used as a form of self defense?" ], "evidence": [ [ [ [ "Linus Torvalds-21" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Linus Torvalds-20" ] ], [ [ "Linus Torvalds-20" ] ], [ [ "Karate-22" ] ] ], [ [ [ "Linus Torvalds-20" ] ], [ [ "Linus Torvalds-20" ] ], [ [ "Karate-1" ], "operation" ] ] ] }, { "qid": "48628a79ac6d18460f36", "term": "Eric Clapton", "description": "English musician, singer, songwriter, and guitarist", "question": "Did Eric Clapton have similar taste in women to one of the Beatles?", "answer": true, "facts": [ "The Beatles consisted of John Lennon, Paul McCartney, George Harrison, and Ringo Starr.", "George Harrison was married to Pattie Boyd from 1966-1977.", "Eric Clapton married Pattie Boyd in 1979." ], "decomposition": [ "Who are the spouses Eric Clapton has had?", "Who are the spouses the members of the Beatles have had?", "Is #1 listed in #2?" ], "evidence": [ [ [ [ "Eric Clapton-78" ] ], [ [ "George Harrison-4" ] ], [ "operation" ] ], [ [ [ "Eric Clapton-78" ] ], [ [ "George Harrison-4" ] ], [ "operation" ] ], [ [ [ "Eric Clapton-78" ] ], [ [ "Pattie Boyd-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "ee95615e3dafa5e19020", "term": "Darth Vader", "description": "fictional character in the Star Wars franchise", "question": "Is watching Star Wars necessary to know who Darth Vader is?", "answer": false, "facts": [ "Star Wars is one of the most widely parodied film series to be produced.", "Star Wars merchandise, from tees to Halloween costumes, is widely available and is plentiful. " ], "decomposition": [ "Has Star Wars inspired many parody films?", "Are Star Wars merchandise depicting characters from the movie available?", "Considering #1 and #2, are there no depictions of characters outside the movie?" ], "evidence": [ [ [ [ "Robot Chicken: Star Wars-8" ] ], [ [ "Lego Star Wars-1" ] ], [ [ "Lego Star Wars-1", "Robot Chicken: Star Wars-8" ] ] ], [ [ [ "Cultural impact of Star Wars-1", "Star Wars: The Vintage Collection-1" ], "no_evidence" ], [ [ "Star Wars-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Lego Star Wars-9", "Spaceballs-2" ] ], [ [ "Walker (Star Wars)-33" ] ], [ "operation" ] ] ] }, { "qid": "4e4b9fc379547c40adf5", "term": "Eddie Murphy", "description": "American stand-up comedian and actor", "question": "Did Eddie Murphy's father see his first stand up show?", "answer": false, "facts": [ "Eddie Murphy's father died when Eddie Murphy was 8 years old.", "Eddie Murphy's stand up career began when he was 15 years old." ], "decomposition": [ "How old was Eddie Murphy when he released his first stand up show?", "How old was Eddie Murphy when his father died?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Eddie Murphy-9" ], "no_evidence" ], [ [ "Eddie Murphy-7" ] ], [ [ "Eddie Murphy-1" ] ] ], [ [ [ "Eddie Murphy-9" ] ], [ [ "Eddie Murphy-7" ] ], [ "operation" ] ], [ [ [ "Eddie Murphy-8" ] ], [ [ "Eddie Murphy-7" ] ], [ "operation" ] ] ] }, { "qid": "ac37cfbe97efb67fde55", "term": "Silverfish", "description": "species of insect", "question": "Could a silverfish reach the top of the Empire State Building?", "answer": false, "facts": [ "Silverfish cannot fly.", "Animals that cannot fly can only access objects at or near ground level without mechanical assistance.", "The top of the Empire State Building is \t1,454 ft high." ], "decomposition": [ "How high is the Empire State Building?", "What class of animals do silverfish belong to?", "Can #2 typically get to heights of #1 without assistance?" ], "evidence": [ [ [ [ "Empire State Building-1" ] ], [ [ "Silverfish-1" ] ], [ [ "Silverfish-1" ] ] ], [ [ [ "Empire State Building-1" ] ], [ [ "Silverfish-1" ] ], [ "operation" ] ], [ [ [ "Empire State Building-1" ] ], [ [ "Silverfish-1" ] ], [ "operation" ] ] ] }, { "qid": "9834ceb34172041cd73f", "term": "Law & Order", "description": "original television series (1990-2010)", "question": "Are there winged statuettes in the home of the creator of Law & Order?", "answer": true, "facts": [ "Law & Order was created by Dick Wolf", "Dick Wolf won an Emmy in 2007 ", "Dick Wolf won an Emmy in 1997", "The Emmy statuette is of a winged woman holding an atom" ], "decomposition": [ "What award has a trophy that is a winged statuette?", "Who is the creator of Law & Order?", "Has #2 ever won #1?" ], "evidence": [ [ [ [ "Emmy Award-9" ] ], [ [ "Dick Wolf-1" ] ], [ [ "Dick Wolf-1" ] ] ], [ [ [ "Emmy Award-2" ] ], [ [ "Law & Order-1" ] ], [ [ "Dick Wolf-1" ] ] ], [ [ [ "Emmy Award-2" ] ], [ [ "Law & Order (franchise)-1" ] ], [ [ "Dick Wolf-1" ], "operation" ] ] ] }, { "qid": "ce921a18dd60a1c8caac", "term": "Islamophobia", "description": "Fear, hatred of, or prejudice against the Islamic religion or Muslims generally,", "question": "Was Donald Trump the target of Islamophobia?", "answer": false, "facts": [ "Islamophobia targets Muslims", "Donald Trump is a Presbyterian, a denomination of Christianity" ], "decomposition": [ "Islamophobia is the fear of which set of people?", "Does Donald Trump identify as #1?" ], "evidence": [ [ [ [ "Islamophobia-1" ] ], [ [ "Donald Trump-11" ] ] ], [ [ [ "Islamophobia-1" ] ], [ [ "Donald Trump-11" ] ] ], [ [ [ "Islamophobia-1" ] ], [ [ "Donald Trump-9" ] ] ] ] }, { "qid": "364935e95e4880fcc8b0", "term": "Golden Gate Bridge", "description": "suspension bridge on the San Francisco Bay", "question": "Would baker's dozen of side by side Mac Trucks jam up Golden Gate Bridge?", "answer": true, "facts": [ "The width of the Golden Gate Bridge is 90 feet. ", "A baker's dozen includes 13 items.", "The width of a Mac truck is around 8 feet." ], "decomposition": [ "How wide is a Mac truck?", "How many items are in a baker's dozen?", "What is #1 multiplied by #2?", "How wide is the Golden Gate Bridge?", "Is #3 greater than or equal to #4?" ], "evidence": [ [ [ [ "Mack Trucks-1" ], "no_evidence" ], [ [ "Dozen-7" ] ], [ "no_evidence", "operation" ], [ [ "Golden Gate Bridge-34" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Mack model EH trucks-6" ] ], [ [ "Dozen-8" ] ], [ "operation" ], [ [ "Golden Gate Bridge-36" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Mack Granite-2" ] ], [ [ "Dozen-7" ] ], [ "operation" ], [ [ "Golden Gate Bridge-25" ] ], [ "operation" ] ] ] }, { "qid": "93b19cdb3f9d7731997e", "term": "Brussels sprout", "description": "vegetable", "question": "Could someone mistake the smell of your brussels sprouts for a fart?", "answer": true, "facts": [ "Brussels Sprouts are cruciferous vegetables.", "Cruciferous vegetables have a sulfur containing chemical called glucosinolate in them", "When you cook brussels sprouts, their smell intensifies. " ], "decomposition": [ "What kind of vegetable are brussels sprouts?", "What chemical is found inside #1?", "What happens to #2 when you cook them?", "Does #3 smell like farts?" ], "evidence": [ [ [ [ "Brussels sprout-1" ] ], [ [ "Brussels sprout-13" ] ], [ [ "Brussels sprout-13", "Brussels sprout-16" ] ], [ [ "Glucosinolate-1" ], "no_evidence" ] ], [ [ [ "Brussels sprout-2" ] ], [ [ "Brussels sprout-13" ] ], [ [ "Brussels sprout-16" ] ], [ [ "Flatulence-17" ] ] ], [ [ [ "Brussels sprout-1" ] ], [ [ "Brussels sprout-13", "Cabbage-39" ] ], [ [ "Cabbage-39" ] ], [ [ "Hydrogen sulfide-2" ], "operation" ] ] ] }, { "qid": "3871d7a05a729494ecd9", "term": "Guitarist", "description": "person who plays the guitar", "question": "Do guitarist's have fingers that can handle pain better than average?", "answer": true, "facts": [ "Guitarists typically have calloused fingertips. ", "Callouses are formed of layers of dead skin and usually lack sensation." ], "decomposition": [ "What typically forms on a Guitarists' finger?", "Does #1 usually cause a lack of sensation?" ], "evidence": [ [ [ [ "Callus-3" ] ], [ [ "Callus-12" ], "no_evidence", "operation" ] ], [ [ [ "Callus-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Callus-3" ] ], [ [ "Callus-13", "Callus-6" ] ] ] ] }, { "qid": "f315b85273989097eb91", "term": "1965", "description": "Year", "question": "Were there under 150,000 American troops in Vietnam in 1965?", "answer": true, "facts": [ "In 1965 the president announced an intention to increase the amount of troops to 125,000", "There were only 75,000 prior to 1965" ], "decomposition": [ "How many American troops were in Vietnam in 1965?", "Is #1 less than 150,000?" ], "evidence": [ [ [ [ "Vietnam War-58" ] ], [ "operation" ] ], [ [ [ "Vietnam War-56", "Vietnam War-58" ], "no_evidence" ], [ "operation" ] ], [ [ [ "1965 in the Vietnam War-96" ] ], [ "operation" ] ] ] }, { "qid": "2c71a9a0b7d73c151442", "term": "Maize", "description": "Cereal grain", "question": "Did Native American tribes teach Spaniards how to cultivate maize?", "answer": true, "facts": [ "In 1492, Spanish settlers brought Maize back to Europe from America.", "Native Americans cultivated and bred the first maize from wild grasses." ], "decomposition": [ "Who cultivated the maize that Spaniards took to Europe from America in 1492?", "Were #1 Native Americans?" ], "evidence": [ [ [ [ "Maize-11" ] ], [ "no_evidence" ] ], [ [ [ "Maize-10" ] ], [ [ "Mapuche-1" ], "operation" ] ], [ [ [ "History of the Caribbean-6", "Maize-11", "Maize-13", "Maize-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "52a0dd337fb870fa3eb8", "term": "Oscar Wilde", "description": "19th-century Irish poet, playwright and aesthete", "question": "Has Oscar Wilde's most famous character ever been in an Eva Green project?", "answer": true, "facts": [ "Oscar Wilde is most famous for his book The Picture of Dorian Gray.", "Dorian Gray is a beautiful socialite that is the main character of The Picture of Dorian Gray.", "Acclaimed actress, Eva Green has starred in numerous films and TV shows including Penny Dreadful.", "Dorian Gray flirts with Vanessa Ives in the Penny Dreadful episode, Seance.", "Vanessa Ives is played by Eva Green." ], "decomposition": [ "What is Oscar Wilde's most famous book?", "Who is the main character of #1?", "What episode of Penny Dreadful was #2 in?", "Is one of the characters in #3 played by Eva Green?" ], "evidence": [ [ [ [ "Oscar Wilde-1" ] ], [ [ "The Picture of Dorian Gray-4" ] ], [ [ "Penny Dreadful (TV series)-2" ] ], [ [ "Eva Green-4" ] ] ], [ [ [ "Oscar Wilde-1" ] ], [ [ "The Picture of Dorian Gray-4" ] ], [ [ "Penny Dreadful (TV series)-2" ], "no_evidence" ], [ [ "Penny Dreadful (TV series)-3" ] ] ], [ [ [ "Oscar Wilde-1" ] ], [ [ "The Picture of Dorian Gray-4" ] ], [ [ "Penny Dreadful (TV series)-2" ], "no_evidence" ], [ [ "Reeve Carney-1", "Vanessa Ives-1" ], "no_evidence" ] ] ] }, { "qid": "f233f8748574df6f4727", "term": "Disgust", "description": "Basic emotion", "question": "Do frogs feel disgust?", "answer": true, "facts": [ "Disgust is a basic emotion.", "Frogs make sounds that express their emotions." ], "decomposition": [ "What is disgust?", "Do frogs express #1?" ], "evidence": [ [ [ [ "Disgust-1" ] ], [ [ "Frog-42" ], "no_evidence" ] ], [ [ [ "Disgust-1" ] ], [ [ "Frog-2" ], "no_evidence", "operation" ] ], [ [ [ "Disgust-47" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "00dc05718aedf2370213", "term": "Monogamy", "description": "Relationship form where each individual has only one partner during their lifetime or at any one time", "question": "Did either Kublai Khan or his grandfather practice monogamy?", "answer": false, "facts": [ "Kublai Khan was married multiple times and was said by some to have thousands of concubines.", "Kublai Khans grandfather was Genghis Khan.", "Genghis Khan had six wives and was said to have over 500 concubines." ], "decomposition": [ "How many times was Kublai Khan married?", "Who was Kublai Khan's grandfather?", "How many times wives did #2 have?", "Is #1 equal to 1 and is #3 equal 1?" ], "evidence": [ [ [ [ "Kublai Khan-73" ] ], [ [ "Kublai Khan-5" ] ], [ [ "Genghis Khan-22" ] ], [ "operation" ] ], [ [ [ "Kublai Khan-74" ] ], [ [ "Kublai Khan-2" ] ], [ [ "Genghis Khan-18" ] ], [ "operation" ] ], [ [ [ "Kublai Khan-73" ] ], [ [ "Kublai Khan-5" ] ], [ [ "Genghis Khan-15" ] ], [ "operation" ] ] ] }, { "qid": "d05b8ed82dbe1583b16c", "term": "Parent", "description": "father or mother", "question": "Does a person need to be a parent to become a grandparent?", "answer": true, "facts": [ "Parents care for their children.", "When the children grow up and have kids of their own, the parents become grandparents to those kids.", "A person who is not a parent has no kids, therefore nobody to produce grandchildren for them." ], "decomposition": [ "What must a person have in order to be known as a grandparent?", "What would the parents of #1 be to the person?", "Must one be a parent to have #2?" ], "evidence": [ [ [ [ "Grandparent-1" ] ], [ [ "Grandparent-1" ] ], [ [ "Grandparent-1" ] ] ], [ [ [ "Parent-7" ], "no_evidence" ], [ [ "Parent-1" ] ], [ "operation" ] ], [ [ [ "Grandparent-1" ] ], [ [ "Child-2" ] ], [ [ "Parent-1" ], "operation" ] ] ] }, { "qid": "4c088a5366459f2256c6", "term": "Mental disorder", "description": "Distressing thought or behavior pattern", "question": "Did Van Gogh suffer from a mental disorder?", "answer": true, "facts": [ "Mental disorders can be characterized by psychotic episodes and delusions", "Van Gogh suffered from psychotic episodes and delusions" ], "decomposition": [ "What are mental disorders characterized as?", "What issues did Van Gogh suffer from?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Mental disorder-40" ] ], [ [ "Vincent van Gogh-3" ] ], [ "operation" ] ], [ [ [ "Mental disorder-1" ] ], [ [ "Vincent van Gogh-3" ] ], [ "operation" ] ], [ [ [ "Causes of mental disorders-58" ], "operation" ], [ [ "Van Gogh syndrome-4" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "d424e393a4daff536f57", "term": "C-SPAN", "description": "American pay television network", "question": "Is the span in C-SPAN named after Alan Greenspan?", "answer": false, "facts": [ "Alan Greenspan was chairman of the Federal Reserve from 1987 to 2006.", "CSPAN is a cable news network that was created in 1979.", "Alan Greenspan was relatively unknown to the world at large in 1979." ], "decomposition": [ "When was C-SPAN created?", "When was Alan Greenspan well-known?", "Is #1 contained within #2?" ], "evidence": [ [ [ [ "C-SPAN-1" ] ], [ [ "Alan Greenspan-1" ] ], [ [ "Alan Greenspan-1", "C-SPAN-1" ] ] ], [ [ [ "Cable television-2" ] ], [ [ "Alan Greenspan-1" ] ], [ "operation" ] ], [ [ [ "C-SPAN-1" ] ], [ [ "Alan Greenspan-1", "Alan Greenspan-7" ] ], [ "operation" ] ] ] }, { "qid": "f815aaa22d608f1dcb84", "term": "Eminem", "description": "American rapper and actor", "question": "Would Eminem perform well at the International Mathematical Olympiad?", "answer": false, "facts": [ "Eminem dropped out of Lincoln High School at age 17 and disliked math and social studies.", "Mathematics competitions or mathematical olympiads are competitive events where participants sit a mathematics test.", "The content on the International Mathematical Olympiad ranges from extremely difficult algebra and pre-calculus problems to problems on branches of mathematics not conventionally covered at school and often not at university level either." ], "decomposition": [ "What levels of mathematics are covered in the International Mathematical Olympiad?", "What levels of mathematics is Eminem competent in?", "Does #2 meet the minimum level in #1?" ], "evidence": [ [ [ [ "International Mathematical Olympiad-2" ] ], [ [ "Eminem-8" ] ], [ [ "Eminem-8", "International Mathematical Olympiad-2" ] ] ], [ [ [ "International Mathematical Olympiad-2" ] ], [ [ "Eminem-8" ] ], [ [ "Eminem-8" ] ] ], [ [ [ "International Mathematical Olympiad-1", "International Mathematical Olympiad-2" ] ], [ [ "Eminem-8" ] ], [ "operation" ] ] ] }, { "qid": "9d2f5beb0ffe85faf16d", "term": "Boolean algebra", "description": "Algebra involving variables containing only \"true\" and \"false\" (or 1 and 0) as values", "question": "Can a computer be programmed entirely in Boolean algebra?", "answer": true, "facts": [ "Boolean algebra is the branch of algebra in which the values of the variables are the truth values true and false, usually denoted 1 and 0 respectively. ", "Mathematics in 1 and 0 is also called binary or machine language.", "Computers are programmed in machine language." ], "decomposition": [ "What are values included in Boolean algebra?", "At what level are program codes read directly by computers?", "Are the values included in #2 the same as #1?" ], "evidence": [ [ [ [ "Boolean algebra-1" ] ], [ [ "Computer programming-7" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Boolean algebra-1" ] ], [ [ "Binary code-1" ] ], [ "operation" ] ], [ [ [ "Boolean algebra-1" ] ], [ [ "Binary code-1" ] ], [ "operation" ] ] ] }, { "qid": "43a26c2f067095e1992b", "term": "Guitar Hero", "description": "video game series", "question": "Is Guitar Hero Beatles inappropriate for a US third grader?", "answer": false, "facts": [ "The average age of a US third grader is 8.", "Guitar Hero is recommended for ages 7 and up.", "The Beatles were a British rock band with a plethora of radio friendly hits." ], "decomposition": [ "How old is the average US third grader?", "What is the recommended age to play Guitar Hero?", "Is #1 higher than #2?" ], "evidence": [ [ [ [ "Third grade-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Third grade-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Third grade-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "30ebf73bc3294792f8de", "term": "Society", "description": "Social group involved in persistent social interaction", "question": "In American society, will a bachelor's degree often include a leap year?", "answer": true, "facts": [ "Leap years occur every four years.", "In American society, a bachelor's degree takes about four years." ], "decomposition": [ "Leap years occur after how many years' interval?", "How many years does an average bachelor's degree take in the US?", "Is #2 divided by #1 greater than or equal to one?" ], "evidence": [ [ [ [ "Leap year-16" ] ], [ [ "Bachelor's degree-1" ] ], [ "operation" ] ], [ [ [ "Leap year-6" ] ], [ [ "Bachelor's degree-37" ] ], [ "operation" ] ], [ [ [ "Leap year-2" ] ], [ [ "Bachelor's degree-1", "Bachelor's degree-37" ] ], [ "operation" ] ] ] }, { "qid": "6cd39b671b2a1d6fb161", "term": "Ariana Grande", "description": "American singer, songwriter, and actress", "question": "At a presentation about post traumatic stress disorder, would Ariana Grande be a topic of relevance?", "answer": true, "facts": [ "Ariana Grande was performing in Manchester in 2017 when explosives were set off in an act of terrorism.", "Ariana Grande has spoken openly about her trauma experience and her PTSD regarding the Manchester Bombing." ], "decomposition": [ "What happened during Ariana Grande's performance in Manchester in 2017?", "What types of events cause post traumatic stress disorder?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Ariana Grande-22" ] ], [ [ "Posttraumatic stress disorder-1" ] ], [ "operation" ] ], [ [ [ "Manchester Arena bombing-4" ] ], [ [ "Posttraumatic stress disorder-1" ] ], [ "operation" ] ], [ [ [ "Ariana Grande-22" ] ], [ [ "Traumatic stress-7" ] ], [ [ "Traumatic stress-7" ] ] ] ] }, { "qid": "8f12cd3797d27f250b00", "term": "Banana", "description": "edible fruit", "question": "Were plants crucial for The King of Rock'n Roll's snack with bananas?", "answer": true, "facts": [ "Elvis Presley is known as The King of Rock'n Roll.", "Elvis Presley loved to eat peanut butter and bananas.", "Bananas come from banana plants.", "Peanut butter comes from peanuts, which come from peanut plants." ], "decomposition": [ "Who is commonly referred to as The King of Rock 'n Roll?", "Which snacks was #1 known to take with bananas?", "Are #2 plants products or made from them?" ], "evidence": [ [ [ [ "King of Rock and Roll (disambiguation)-1" ] ], [ [ "Elvis Presley-86" ] ], [ [ "Peanut butter, banana and bacon sandwich-1" ] ] ], [ [ [ "Elvis Presley-1" ] ], [ [ "Elvis Presley-86" ] ], [ [ "Peanut butter-1" ], "operation" ] ], [ [ [ "Elvis Presley-1" ] ], [ [ "Elvis Presley-86" ] ], [ [ "Peanut butter-1", "Peanut-1" ] ] ] ] }, { "qid": "80ba3ad84b318f16f34c", "term": "The Atlantic", "description": "Magazine and multi-platform publisher based in Washington, D.C.", "question": "Could you read The Atlantic magazine during the Games of the XXII Olympiad?", "answer": true, "facts": [ "The Atlantic magazine, founded in 1857, still publishes as of May 2020.", "The XXII Olympiad was the official name for the 1980 Summer Olympics." ], "decomposition": [ "When was The Atlantic Magazine founded?", "When was the XXII Olypiad?", "Is #2 after #1?" ], "evidence": [ [ [ [ "The Atlantic-1" ] ], [ [ "1980 Summer Olympics-1" ] ], [ "operation" ] ], [ [ [ "The Atlantic-1" ] ], [ [ "1980 Summer Olympics-1" ] ], [ "operation" ] ], [ [ [ "The Atlantic-1" ] ], [ [ "1980 Summer Olympics-1" ] ], [ "operation" ] ] ] }, { "qid": "881286dcdded13a96e3b", "term": "Bob Marley", "description": "Jamaican singer-songwriter", "question": "Can you find Bob Marley's face in most smoke shops?", "answer": true, "facts": [ "Bob Marley's face is on the packaging of a popular brand of rolling papers.", "Bob Marley is a popular graphic to print on t-shirts for sale to smokers." ], "decomposition": [ "Where can one find Bob Marley's face printed on?", "Are any items from #1 commonly found in smoke shops?" ], "evidence": [ [ [ [ "Bob Marley-1" ], "no_evidence" ], [ [ "Head shop-1", "Head shop-2" ], "no_evidence", "operation" ] ], [ [ [ "Marley Natural-2" ] ], [ "operation" ] ], [ [ [ "Bob Marley-48" ] ], [ "operation" ] ] ] }, { "qid": "5c03103b5a70117cbdf0", "term": "Elizabeth I of England", "description": "Queen regnant of England and Ireland from 17 November 1558 until 24 March 1603", "question": "Could Elizabeth I of England have seen the play Dido, Queen of Carthage ?", "answer": true, "facts": [ "Elizabeth I of England lived from 1533 - 1603.", "Dido, Queen of Carthage is a short play written by the English playwright Christopher Marlowe.", " It was probably written between 1587 and 1593." ], "decomposition": [ "When was the play Dido, Queen of Carthage written?", "Was Elizabeth I of England alive during the period covered by #1?" ], "evidence": [ [ [ [ "Dido, Queen of Carthage (play)-1" ] ], [ [ "Elizabeth I of England-1" ] ] ], [ [ [ "Dido, Queen of Carthage (play)-1" ] ], [ [ "Elizabeth I of England-1" ] ] ], [ [ [ "Dido, Queen of Carthage (play)-1" ] ], [ [ "Elizabeth I of England-1" ] ] ] ] }, { "qid": "e48fb3de1e039f4e8960", "term": "Bipolar disorder", "description": "mental disorder that causes periods of depression and abnormally elevated mood", "question": "Is Britney Spears' breakdown attributed to bipolar disorder?", "answer": true, "facts": [ "In 2008 Britney Spears was detained in a psychiatric hospital for bipolar disorder.", "In 2007 Britney Spears shaved her head during a breakdown. " ], "decomposition": [ "When did Britney Spears have a breakdown?", "What major event happened to Britney Spears the year after #1?", "What was the reason behind #2 happening to Britney Spears?" ], "evidence": [ [ [ [ "Britney Spears-25" ] ], [ [ "Britney Spears-27" ] ], [ "no_evidence" ] ], [ [ [ "Britney Spears-25" ] ], [ [ "Britney Spears-24" ] ], [ "no_evidence" ] ], [ [ [ "Britney Spears-25" ] ], [ [ "Britney Spears-27" ] ], [ [ "Britney Spears-61" ], "no_evidence" ] ] ] }, { "qid": "056452ee6c3af5567f82", "term": "Chlorine", "description": "Chemical element with atomic number 17", "question": "Is week old chlorine water safe to drink?", "answer": true, "facts": [ "Chlorine is a chemical that is the second lightest halogen element.", "Chlorine is toxic and can attack the respiratory system of humans.", "Chlorine is highly soluble in water and will dissolve in around 4 and a half days.", "The Water Quality and Health Council states that chlorination of drinking water protects consumers from diseases caused by waterborne microorganisms." ], "decomposition": [ "How long does it take for chlorine to dissolve in water?", "Is water with dissolved chlorine safe to drink?", "Is #2 positive and #1 less than a week?" ], "evidence": [ [ [ [ "Water chlorination-5" ] ], [ [ "History of water supply and sanitation-75" ] ], [ "operation" ] ], [ [ [ "Chlorine-66" ], "no_evidence" ], [ [ "Hypochlorous acid-3" ], "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Chlorine-13" ], "no_evidence" ], [ [ "History of water supply and sanitation-75" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "385edf4addbd69573540", "term": "Foot (unit)", "description": "customary unit of length", "question": "Is the foot part of the metric system?", "answer": false, "facts": [ "The metric system measures distance based on the meter unit.", "The foot is part of the American standard system of measurement." ], "decomposition": [ "Which units of measure are used in the metric system?", "Is foot included in #1?" ], "evidence": [ [ [ [ "Metric system-1", "Metric system-27" ] ], [ [ "Foot (unit)-1" ], "operation" ] ], [ [ [ "Metric system-5" ] ], [ "operation" ] ], [ [ [ "Metric system-28" ] ], [ "operation" ] ] ] }, { "qid": "d00ae50a07a5dd8b726c", "term": "Rede Globo", "description": "Brazilian commercial television network", "question": "Would it be typical for a Rede Globo anchor to say Konnichiwa to the viewers?", "answer": false, "facts": [ "Konnichiwa is a greeting in the Japanese language.", "The national language of Brazil is Portuguese." ], "decomposition": [ "In which country is Rede Globo based?", "What is the official language in #1?", "What language is Konnichiwa?", "Are #2 and #3 the same?" ], "evidence": [ [ [ [ "Rede Globo-1" ] ], [ [ "Portuguese language-1" ] ], [ [ "Konnichi wa-1" ] ], [ "operation" ] ], [ [ [ "Rede Globo-1" ] ], [ [ "Portuguese language-1" ] ], [ [ "Konnichi wa-1" ] ], [ "operation" ] ], [ [ [ "Rede Globo-1" ] ], [ [ "Rede Globo-1" ] ], [ [ "Konnichi wa-1" ] ], [ "operation" ] ] ] }, { "qid": "001f5aedc57159e1fd99", "term": "Hades", "description": "Greek god of the underworld in Greek mythology", "question": "Does Hades appear in a Disney Channel musical movie?", "answer": true, "facts": [ "The Descendants Trilogy is a series of musical movies that aired on Disney Channel between 2015 and 2019.", "Hades appears as a supporting character in the third Descendants movie." ], "decomposition": [ "Which major musical series has been aired on Disney Channel?", "Has Hades been featured in any of #1?" ], "evidence": [ [ [ [ "Hercules (franchise)-1" ] ], [ [ "Megara (Disney character)-17" ] ] ], [ [ [ "Descendants (franchise)-1" ] ], [ [ "Descendants 3-3" ], "operation" ] ], [ [ [ "Hercules (franchise)-17", "Megara (Disney character)-17" ] ], [ [ "Megara (Disney character)-17" ] ] ] ] }, { "qid": "01786fe2b099fd7fb504", "term": "Minor League Baseball", "description": "hierarchy of professional baseball leagues affiliated with Major League Baseball", "question": "Were weather phenomena avoided when naming minor league baseball teams?", "answer": false, "facts": [ "Weather phenomena refers to types of weather caused conditions such as cyclones, storms, and tsunamis.", "Minor league baseball teams include the Brooklyn Cyclones and Lake Elsinore Storm." ], "decomposition": [ "What are some names of weather phenomena?", "What are the name of minor league baseball teams?", "Are any terms in #1 also present in #2?" ], "evidence": [ [ [ [ "Weather-5" ], "no_evidence" ], [ [ "Omaha Storm Chasers-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Glossary of meteorology-1" ], "no_evidence" ], [ [ "Minor League Baseball-40" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Thunder-1" ] ], [ [ "Trenton Thunder-1" ] ], [ "operation" ] ] ] }, { "qid": "0013d38e0568f48acdc0", "term": "Voyager 2", "description": "Space probe and the second-farthest man-made object from Earth", "question": "Could a Hwasong-15 missile hypothetically reach Voyager 2?", "answer": false, "facts": [ "Voyager 2 was a probe that traveled to the interstellar medium of space.", "The interstellar medium is over 12,161,300,000 miles away from earth.", "The Hwasong-15 missile is a North Korean missile with a range of 8,000 miles." ], "decomposition": [ "How far away from Earth has Voyager 2 traveled?", "What is the range of a Hwasong-15 missile?", "Is #2 greater or equal to #1?" ], "evidence": [ [ [ [ "Voyager 2-3" ] ], [ [ "Hwasong-15-3" ] ], [ "operation" ] ], [ [ [ "Voyager 2-3" ] ], [ [ "Hwasong-15-1" ] ], [ "operation" ] ], [ [ [ "Voyager 2-3" ] ], [ [ "Hwasong-15-3" ] ], [ "operation" ] ] ] }, { "qid": "30c3a32157acb4861555", "term": "Armadillo", "description": "family of mammals", "question": "Could someone theoretically use an armadillo as a shield?", "answer": true, "facts": [ "Armadillos have hard armor made of dermal bone.", "Humans have ended up in the hospital due to bullets ricocheting against an armadillo's shell." ], "decomposition": [ "What are the basic features of a shield?", "Does any part of the armadillo's body possess any of #1?" ], "evidence": [ [ [ [ "Shield-1" ] ], [ [ "Armadillo-2" ], "operation" ] ], [ [ [ "Shield-2" ] ], [ [ "Armadillo-2" ], "operation" ] ], [ [ [ "Shield-1" ] ], [ [ "Armadillo-2" ] ] ] ] }, { "qid": "2c6955a5381253c1067c", "term": "Linus Torvalds", "description": "Creator and lead developer of Linux kernel", "question": "Does Linus Torvalds make money off of DirectX?", "answer": false, "facts": [ "DirectX is a proprietary technology owned by Microsoft", "Linus Torvalds is the creator and lead developer for the open-source Linux kernel", "The Linux kernel is used in operating systems that are competitors of Microsoft Windows" ], "decomposition": [ "Which company owns the DirectX technology?", "Which operating system does #1 develop?", "Linus Torvalds develops which operating system?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "DirectX-1" ] ], [ [ "Microsoft Windows-1" ] ], [ [ "Linus Torvalds-1" ] ], [ "operation" ] ], [ [ [ "DirectX-1" ] ], [ [ "DirectX-1" ] ], [ [ "Linus Torvalds-1" ] ], [ "operation" ] ], [ [ [ "DirectX-1" ] ], [ [ "Microsoft Windows-1" ] ], [ [ "Linus Torvalds-1" ] ], [ "operation" ] ] ] }, { "qid": "6a01b523e0140f21c426", "term": "Mediterranean Sea", "description": "Sea connected to the Atlantic Ocean between Europe, Africa and Asia", "question": "Did a Mediterranean Sea creature kill Steve Irwin?", "answer": true, "facts": [ "Steve Irwin was killed by a Stingray animal.", "Batoids are sea ray animals that live in the Mediterranean Sea.", "Batoids and stingrays are related by sharing a scientific class of Chondrichthyes." ], "decomposition": [ "Which animal killed Steve Irwin?", "Is #1 a sea creature" ], "evidence": [ [ [ [ "Steve Irwin-35" ] ], [ [ "Stingray-2" ] ] ], [ [ [ "Steve Irwin-35" ] ], [ [ "Broad stingray-4", "Great Barrier Reef-7" ] ] ], [ [ [ "Steve Irwin-35" ] ], [ [ "Stingray-1" ] ] ] ] }, { "qid": "4ba70839df733c61f9a5", "term": "Zika virus", "description": "Species of virus", "question": "Do you need to worry about Zika virus in Antarctica? ", "answer": false, "facts": [ "Mosquitoes cannot survive in the climate of Antarctica.", "Zika virus is primarily spread through mosquito bites. " ], "decomposition": [ "What animal spreads the Zika Virus?", "What is the climate of Antarctica?", "Can #1 survive in #2?" ], "evidence": [ [ [ [ "Zika virus-10" ] ], [ [ "Antarctica-42" ] ], [ [ "Antarctica-42", "Mosquito-68" ], "operation" ] ], [ [ [ "Zika fever-2" ] ], [ [ "Antarctica-42" ] ], [ [ "Mosquito-61" ], "operation" ] ], [ [ [ "Aedes-1", "Zika fever-2" ] ], [ [ "Antarctica-2" ] ], [ "operation" ] ] ] }, { "qid": "49228a8553a4448fa366", "term": "Christmas carol", "description": "Song or hymn or carol on the theme of Christmas", "question": "Did the writer of Christmas carol fast during Ramadan? ", "answer": false, "facts": [ "The writer of Christmas carol is Charles Dickens, who is a Christian. ", "Christians do not fast during Ramadan. " ], "decomposition": [ "Which group of people fast during Ramadan?", "Christmas carols are composed by and for which group of people?", "Are #2 and #1 the same?" ], "evidence": [ [ [ [ "Ramadan-1" ] ], [ [ "Christmas and holiday season-2", "Christmas carol-1" ] ], [ "operation" ] ], [ [ [ "Ramadan-1" ] ], [ [ "Christmas carol-1", "Christmas-1" ] ], [ "operation" ] ], [ [ [ "Ramadan-1" ] ], [ [ "Christmas carol-10" ] ], [ "operation" ] ] ] }, { "qid": "68684152725123d32f4b", "term": "Haiku", "description": "very short form of Japanese poetry", "question": "Are most books written as a Haiku?", "answer": false, "facts": [ "Haiku is a very short poem", "Haiku is written with 3 short phrases." ], "decomposition": [ "What is the format of a haiku?", "Are chapter books written like #1?" ], "evidence": [ [ [ [ "Haiku-2" ] ], [ "operation" ] ], [ [ [ "Haiku-2" ] ], [ [ "Chapter book-1" ] ] ], [ [ [ "Haiku-2" ] ], [ "no_evidence" ] ] ] }, { "qid": "86f441f4f66b4b5eb375", "term": "1976 Summer Olympics", "description": "Games of the XXI Olympiad, held in Montréal in 1976", "question": "Does the country that received the most gold medals during the 1976 Olympics still exist?", "answer": false, "facts": [ "The Soviet Union received the most gold medals during the 1976 Summer Olympics", "The Soviet Union existed from 1922 to 1991" ], "decomposition": [ "In the 1976 Summer Olympics, which country received the most gold medals?", "Does #1 still exist as a country?" ], "evidence": [ [ [ [ "1976 Summer Olympics-3" ] ], [ [ "Soviet Union-1" ], "operation" ] ], [ [ [ "1976 Summer Olympics-3" ] ], [ [ "Soviet Union-1" ] ] ], [ [ [ "1976 Summer Olympics medal table-3" ] ], [ [ "Islam in the Soviet Union-1" ] ] ] ] }, { "qid": "976710eb9fff4ed94fd8", "term": "Chuck Norris", "description": "American martial artist, actor, film producer and screenwriter", "question": "Will Chuck Norris be a nonagenarian by time next leap year after 2020 happens?", "answer": false, "facts": [ "A nonagenarian is a person between 90 and 99 years of age.", "Chuck Norris is 80 years old in 2020.", "The next leap year after 2020 is 2024.", "Chuck Norris will be 84 in 2024." ], "decomposition": [ "When was Chuck Norris born?", "When is the next leap year after 2020?", "What is the difference between #1 and #2?", "How many years of age makes one a nonagenarian?", "Is #3 greater than or equal to #4?" ], "evidence": [ [ [ [ "Chuck Norris-1" ] ], [ [ "Determination of the day of the week-14" ] ], [ "operation" ], [ [ "Manuel Pinto da Fonseca-9" ] ], [ "operation" ] ], [ [ [ "Chuck Norris-1" ] ], [ [ "Leap year-6" ], "no_evidence" ], [ "operation" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Chuck Norris-4" ] ], [ [ "2024-1" ] ], [ "operation" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "b88ba16883b786c8469b", "term": "Silk", "description": "fine, lustrous, natural fiber produced by the larvae of various silk moths, especially the species Bombyx mori", "question": "Does Bombyx mori have a monopoly over silk production?", "answer": false, "facts": [ "A monopoly refers to the exclusive supply of a good.", "The Bombyx mori is a moth famous for its silk production.", "Spiders, beetles, caterpillars, and fleas produce silk.", "Wild silk produced by caterpillars has been used in China, Europe, and South Asia since antiquity." ], "decomposition": [ "In a monopoly, how many different entities supply goods?", "What insects produce silk?", "How many things are listed in #2?", "Is #3 equal to #1?" ], "evidence": [ [ [ [ "Monopoly-1" ] ], [ [ "Bombyx mori-1", "Silk-2" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Monopoly-1" ] ], [ [ "Silk-2" ] ], [ [ "Silk-2" ] ], [ "operation" ] ], [ [ [ "Monopoly-2" ] ], [ [ "Silk-2" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "28cd9041ad61b93e3b91", "term": "Cannabis (drug)", "description": "psychoactive drug from the Cannabis plant", "question": "Has cannabis been a big influence in rap music genre?", "answer": true, "facts": [ "Rapper Dr. Dre named his 1992 album, The Chronic, a reference to marijuana.", "Cannabis is a flowering plant also known as marijuana.", "Rapper Canibus took his name from cannabis.", "Rapper Snoop Dogg's song OG has a line, \"Rolling up my Mary Jane,\" a reference to marijuana.", "Rap group Bone Thugs N Harmony's Weed Song is a reference to marijuana." ], "decomposition": [ "What is Rapper Dr. Dre's Album The Chronic a reference to?", "What did Rapper Canibus get his name from?", "Snoop Dogg's line \"Rolling up my Mary Jane\" from the song OG has reference to?", "Are all #1, #2, #3 the same as cannabis?" ], "evidence": [ [ [ [ "The Chronic-1" ] ], [ [ "Canibus-4", "Cannabis sativa-1" ] ], [ [ "Mary + Jane-1" ] ], [ "operation" ] ], [ [ [ "The Chronic-1" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "The Chronic-1" ] ], [ [ "Cannabis (drug)-1" ] ], [ [ "Snoop Dogg-88" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "fd6c86e0bccd5157cfd9", "term": "Jews", "description": "Ancient nation and ethnoreligious group from the Levant", "question": "Do Jews believe in any New Testament angels?", "answer": true, "facts": [ "The New Testament is a book central to Christianity.", "The New Testament features a number of angels including Michael, and Gabriel.", "The Talmud is the central text of Rabbinic Judaism.", "The Talmud names four angels who would later be known as archangels, surrounding God's throne: Michael, Gabriel, Uriel, and Raphael." ], "decomposition": [ "What book is the central text of Rabbinic Judaism?", "Does #1 mention any angels?", "Are the angels mentioned in #2 also mentioned in the New testament?" ], "evidence": [ [ [ [ "Rabbinic Judaism-3" ] ], [ [ "Angels in Judaism-1" ] ], [ [ "Angels in Judaism-1" ] ] ], [ [ [ "Rabbinic Judaism-3", "Talmud-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Hebrew Bible-1" ] ], [ [ "Book of Daniel-2", "Michael (archangel)-2" ] ], [ [ "Michael (archangel)-3" ], "operation" ] ] ] }, { "qid": "74936a1e1f16a8e97d68", "term": "Kelly Clarkson", "description": "American singer-songwriter, actress, and television personality", "question": "Did Christina Aguilera turn her chair around for Kelly Clarkson on The Voice?", "answer": false, "facts": [ "Christina Aguilera is a judge on the voice.", "Kelly Clarkson is a judge on the voice.", "Judges only turn their chairs around for competitors. ", "Kelly Clarkson has not competed on the voice." ], "decomposition": [ "Do judges on the voice turn their chair for only contestants?", "Has Kelly Clarkson ever been a contestant on the voice?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "The Voice (franchise)-2" ] ], [ [ "Kelly Clarkson-33" ] ], [ "operation" ] ], [ [ [ "The Voice (franchise)-6" ] ], [ [ "Kelly Clarkson-1" ] ], [ "operation" ] ], [ [ [ "The Voice (franchise)-6" ] ], [ [ "Kelly Clarkson-34" ] ], [ "operation" ] ] ] }, { "qid": "684d4c03be354635b80f", "term": "Christians", "description": "people who adhere to Christianity", "question": "Do Christians anticipate an existence in Sheol after death?", "answer": false, "facts": [ "Sheol appears in the Christian Bible, in the Old Testament.", "Christians do not recognize Sheol as part of their afterlife." ], "decomposition": [ "Which Testament of the Bible makes reference to Sheol?", "Is #1 the New Testament?", "Is Sheol included in Christians' concept of afterlife as expressed in the New Testament?", "Is #2 or #3 positive?" ], "evidence": [ [ [ [ "Sheol-2" ] ], [ "operation" ], [ [ "Heaven-15", "Hell-34" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sheol-1" ] ], [ [ "New Testament-1" ], "operation" ], [ [ "New Testament-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sheol-2" ] ], [ [ "Sheol-1" ] ], [ [ "Afterlife-44" ] ], [ "operation" ] ] ] }, { "qid": "2b776be964150651f4b3", "term": "Courage", "description": "quality of mind or spirit that enables a person to face difficulty, danger, or pain", "question": "Would an anxious person benefit from receiving courage from the Wizard of Oz?", "answer": false, "facts": [ "An anxious person may benefit from medication or therapy.", "The Wizard of Oz cannot give courage to anyone." ], "decomposition": [ "What would an anxious person benefit from receiving?", "Can the Wizard of Oz provide #1?" ], "evidence": [ [ [ [ "Anxiety disorder-3", "Anxiety disorder-58" ] ], [ [ "The Wizard of Oz (1939 film)-8" ], "operation" ] ], [ [ [ "Anxiety-2", "Panic attack-47" ], "no_evidence" ], [ [ "The Wonderful Wizard of Oz-10" ], "no_evidence", "operation" ] ], [ [ [ "Anxiety-1", "Courage-1" ] ], [ [ "The Wonderful Wizard of Oz-13" ], "operation" ] ] ] }, { "qid": "b35f58b61b53c487d1ab", "term": "Polyamory", "description": "Practice of or desire for intimate relationships with more than one partner", "question": "Is polyamory allowed in the Catholic Church?", "answer": false, "facts": [ "A central tenet of the Catholic Church is a one-to-one match between man and woman.", "The ten commandments claim that \"coveting your neighbors wife\" is a sin." ], "decomposition": [ "What is Polyamory?", "Is #1 allowed in catholic churches?" ], "evidence": [ [ [ [ "Polyamory-10" ] ], [ [ "Polygamy-34" ] ] ], [ [ [ "Polyamory-1" ] ], [ [ "Catholic Church-66" ], "operation" ] ], [ [ [ "Polyamory-1" ] ], [ [ "Religion and sexuality-16" ] ] ] ] }, { "qid": "6296b8c3a16680a826eb", "term": "Police officer", "description": "warranted employee of a police force", "question": "Does a Generation Y member satisfy NYPD police officer age requirement?", "answer": true, "facts": [ "The NYPD has a minimum age requirement of 21.", "Members of Generation Y were born between 1980 and 1994." ], "decomposition": [ "How old do you have to be to be an NYPD officer?", "How old are Generation Y members currently?", "Is #2 higher than #1?" ], "evidence": [ [ [ [ "New York City Police Department Auxiliary Police-31" ], "no_evidence" ], [ [ "Millennials-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Millennials-1" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Millennials-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "72d9416ee7093deb9eab", "term": "Alice's Adventures in Wonderland", "description": "book by Lewis Carroll", "question": "Could the main character of \"Alice's Adventures in Wonderland\" join a Masonic Lodge?", "answer": false, "facts": [ "The main character of \"Alice's Adventures in Wonderland\" is Alice, a young girl.", "Masonic Lodge membership is restricted to men over the age of either 18 or 21, depending on jurisdiction." ], "decomposition": [ "Who is the main character of \"Alice's Adventures in Wonderland\"?", "Does #1 meet the age and gender requirements for Masonic Lodge membership?" ], "evidence": [ [ [ [ "Alice's Adventures in Wonderland-1" ] ], [ [ "Masonic lodge-11" ], "operation" ] ], [ [ [ "Alice's Adventures in Wonderland-1" ] ], [ [ "Freemasonry-3" ], "no_evidence", "operation" ] ], [ [ [ "Alice's Adventures in Wonderland-1" ] ], [ [ "Freemasonry-14", "Girl-1" ] ] ] ] }, { "qid": "e6391d901dcc8a269c79", "term": "Fairy", "description": "mythical being or legendary creature", "question": "Did King James I despise fairy beings?", "answer": true, "facts": [ "King James I wrote Daemonologie in which he stated that a fairy was a being that could act as a familiar.", "A familiar was an animal or spirit that conspired with The Devil.", "King James I presided over the execution of Agnes Sampson.", "Agnes Sampson was accused of conspiring with familiars and was burned at the stake." ], "decomposition": [ "What did King James I claim that fairies could act as in his book 'Daemonologie'", "Which beings did he execute Agnes Sampson for allegedly conspiring with?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Daemonologie-8" ], "no_evidence" ], [ [ "Agnes Sampson-9" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Daemonologie-15" ] ], [ [ "Agnes Sampson-7" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Daemonologie-6" ], "no_evidence" ], [ [ "Agnes Sampson-10", "Agnes Sampson-5", "Agnes Sampson-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "8acc02738825ee8a68b6", "term": "Evander Holyfield", "description": "American boxer", "question": "Did Evander Holyfield compete in an Olympics hosted in the western hemisphere?", "answer": true, "facts": [ "Evander Holyfield won a bronze medal during the 1984 Summer Olympics.", "The 1984 Olympics were held in Los Angeles, California.", "California is in the United States, which is located entirely within the western hemisphere." ], "decomposition": [ "Which Olympic games have been held in the Western Hemisphere?", "Did Evander Holyfield compete in any events listed in #1?" ], "evidence": [ [ [ [ "1984 Summer Olympics-1" ], "no_evidence" ], [ [ "Evander Holyfield-2" ], "operation" ] ], [ [ [ "1984 Summer Olympics-1", "Western Hemisphere-3" ], "no_evidence" ], [ [ "Evander Holyfield-2" ] ] ], [ [ [ "1984 Summer Olympics-1" ] ], [ [ "Evander Holyfield-2" ], "operation" ] ] ] }, { "qid": "0a87a031e804912db8df", "term": "Harry Houdini", "description": "American magician, escapologist, and stunt performer", "question": "Did Harry Houdini's wife make psychics look foolish?", "answer": true, "facts": [ "Psychics are people that claim to have special powers to talk to the dead.", "Harry Houdini gave his wife a code word to ask psychics to repeat after his death.", "The wife of Harry Houdini, Wilhelmina Beatrice \"Bess\" Rahner, asked several psychics for the code word and none knew it." ], "decomposition": [ "What did Harry Houdini give to his wife to test psychics after his death?", "Were psychics unable to provide #1?" ], "evidence": [ [ [ [ "Harry Houdini-60" ] ], [ [ "Harry Houdini-60" ], "operation" ] ], [ [ [ "Harry Houdini-60" ] ], [ [ "Harry Houdini-60" ], "operation" ] ], [ [ [ "Harry Houdini-60" ] ], [ [ "Bess Houdini-6" ], "operation" ] ] ] }, { "qid": "f95956c13e22f073310a", "term": "Handedness", "description": "Better performance or individual preference for use of a hand", "question": "Does handedness determine how you use American Sign Language?", "answer": true, "facts": [ "Your dominant hand typically performs the moving part of a sign in ASL.", "Your dominant hand determines the hand you use to finger spell in ASL." ], "decomposition": [ "Does the dominant hand perform different functions than the other in ASL?" ], "evidence": [ [ [ [ "American Sign Language-40" ], "no_evidence" ] ], [ [ [ "American Sign Language-1", "American Sign Language-29" ], "no_evidence", "operation" ] ], [ [ "no_evidence" ] ] ] }, { "qid": "9883d6c816d4855035ea", "term": "Onion", "description": "vegetable", "question": "Do onions have a form that resembles the inside of a tree?", "answer": true, "facts": [ "When bisected, an onion has rings that extend from the core to the outside.", "Trees are formed of a series of rings that extend from the inside to the outside" ], "decomposition": [ "What is the structure observed in an onion when it is cut open?", "What is the structure of a tree's cross section?", "Is #1 similar to #2?" ], "evidence": [ [ [ [ "Fried onion-8" ] ], [ [ "International Tree-Ring Data Bank-1" ] ], [ [ "International Tree-Ring Data Bank-1" ], "operation" ] ], [ [ [ "Onion-13" ] ], [ [ "Tree-28" ] ], [ "operation" ] ], [ [ [ "Onion-13" ] ], [ [ "Dendrochronology-7" ] ], [ "operation" ] ] ] }, { "qid": "660bfdc17b47f42facac", "term": "Moose", "description": "A genus of mammals belonging to the deer, muntjac, roe deer, reindeer, and moose family of ruminants", "question": "Are moose used for work near the kingdom of Arendelle?", "answer": true, "facts": [ "The opening scene of Disney's Frozen shows a group of ice breakers.", "They have moose that carry the heavy ice blocks.", "One of them, Kristoff, becomes separated with his moose Sven.", "When Queen Elsa flees Arendelle and Princess Anna gives chase, she quickly encounters Kristoff." ], "decomposition": [ "What show is the kingdom of Arendelle from?", "In the opening scene of #1, what are a group of men doing?", "Are moose used to carry #2?" ], "evidence": [ [ [ [ "Arendelle: World of Frozen-1", "Elsa (Frozen)-26" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Frozen (2013 film)-45" ] ], [ [ "Frozen (2013 film)-44" ], "no_evidence" ], [ [ "Moose-73" ], "operation" ] ], [ [ [ "Frozen (2013 film)-6" ] ], [ "no_evidence" ], [ [ "Moose-9" ], "no_evidence", "operation" ] ] ] }, { "qid": "ba67c57c41ae212177c8", "term": "Eleventh grade", "description": "educational year", "question": "Is eleventh grade required to get a driver's licence?", "answer": false, "facts": [ "Eleventh grade is an educational year in high school.", "Many high schools offer driver's education classes.", "Drivers education classes can be taken outside by other organizationsof high school.", "One must pass a driving test to obtain a drivers license." ], "decomposition": [ "What criteria must be met to obtain a driver's license in the US?", "Is passing the eleventh grade required to meet #1?" ], "evidence": [ [ [ [ "Driver's licenses in the United States-10" ] ], [ "operation" ] ], [ [ [ "Driver's license-3" ] ], [ "operation" ] ], [ [ [ "Driver's license-3" ] ], [ "operation" ] ] ] }, { "qid": "a18c59e77cc176f748b2", "term": "Blue", "description": "A primary colour between purple and green", "question": "Do some home remedies result in your skin color turning blue?", "answer": true, "facts": [ "Colloidal silver is a popular alternative treatment/home remedy that is used by some people.", "Ingestion of colloidal silver in high amounts can tint the skin blue." ], "decomposition": [ "What can cause skin color to change?", "Of #1, what changes can be caused by ingestion of something?", "Of #2, what causes skin color to become blue?", "Is #3 used in home remedies?" ], "evidence": [ [ [ [ "Argyria-1", "Argyria-6", "Carrot juice-3", "Drug-induced pigmentation-2" ] ], [ [ "Carrot juice-3", "Drug-induced pigmentation-2", "Medical uses of silver-21" ] ], [ [ "Argyria-1" ] ], [ [ "Argyria-5" ], "operation" ] ], [ [ [ "Human skin color-41", "Human skin color-56" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Cyanosis-1" ] ], [ [ "Methemoglobinemia-2" ] ], [ [ "Methemoglobinemia-6" ] ], [ [ "Benzocaine-5" ], "no_evidence" ] ] ] }, { "qid": "4fe90b9eab197be78729", "term": "Jack Kerouac", "description": "American writer", "question": "Was ethanol beneficial to Jack Kerouac's health?", "answer": false, "facts": [ "In 1969, at age 47, Kerouac died from an abdominal hemorrhage caused by a lifetime of heavy drinking of alcohol.", "Ethanol is the main ingredient in alcoholic beverages." ], "decomposition": [ "What did Jack Kerouac die from?", "Is there ethanol in #1? " ], "evidence": [ [ [ [ "Jack Kerouac-41" ] ], [ [ "Ethanol-1" ], "operation" ] ], [ [ [ "Jack Kerouac-41" ] ], [ [ "Jack Kerouac-41" ], "no_evidence" ] ], [ [ [ "Jack Kerouac-41" ] ], [ [ "Ethanol-1" ] ] ] ] }, { "qid": "89a19bd8f2bea335bca1", "term": "Doctor Strange", "description": "Superhero appearing in Marvel Comics publications and related media", "question": "Did Doctor Strange creators also make Batman?", "answer": false, "facts": [ "Doctor Strange is a superhero created by Steve Ditko and Stan Lee.", "Batman is a DC comics superhero.", "Stan Lee worked for Marvel comics, the competitor of DC comics.", "Steve Ditko worked for DC late in his career and worked on Blue Beetle, the Question, the Creeper, Shade the Changing Man, and Hawk and Dove." ], "decomposition": [ "Who were the creators of the fictional character 'Doctor Strange'?", "Who were the creators of the fictional character 'Batman'?", "Are #1 the same as #2?" ], "evidence": [ [ [ [ "Doctor Strange-1" ] ], [ [ "Batman-1" ] ], [ "operation" ] ], [ [ [ "Doctor Strange-1" ] ], [ [ "Batman-1" ] ], [ "operation" ] ], [ [ [ "Doctor Strange-1" ] ], [ [ "Batman-1" ] ], [ "operation" ] ] ] }, { "qid": "347eff30928ba07fa567", "term": "Charlemagne", "description": "King of the Franks, King of Italy, and Holy Roman Emperor", "question": "Was Charlemagne's father instrumental in outcome of the Battle of Tours?", "answer": false, "facts": [ "Charlemagne's father was Pepin the Short.", "Pepin the Short's father was Charles Martel.", "Charles Martel led an army against the Umayyads at the Battle of Tours.", "Pepin the Short spent his early years being raised by monks." ], "decomposition": [ "Who was Charlemagne's father?", "Was #1 involved in the Battle of Tours?" ], "evidence": [ [ [ [ "Charlemagne-15" ] ], [ [ "Battle of Tours-1", "Pepin the Short-1" ], "no_evidence", "operation" ] ], [ [ [ "Pepin the Short-5" ] ], [ [ "Battle of Tours-1" ] ] ], [ [ [ "Pepin the Short-5" ] ], [ [ "Battle of Tours-60" ] ] ] ] }, { "qid": "6c1f7619f21201712194", "term": "Ham", "description": "Pork from a leg cut that has been preserved by wet or dry curing, with or without smoking", "question": "Would Janet Jackson avoid a dish with ham?", "answer": true, "facts": [ "Janet Jackson follows an Islamic practice. ", "Islamic culture avoids eating pork.", "Ham is made from pork." ], "decomposition": [ "What is Janet Jackson's religion?", "Which foods are prohibited by #1?", "What type of food is ham?", "Is #3 included in #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Ham-1" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Ham-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Janet Jackson-5" ] ], [ "no_evidence" ], [ [ "Ham-3" ] ], [ "no_evidence" ] ] ] }, { "qid": "9f1ea9c12c748ea7e456", "term": "Giant panda", "description": "species of mammal", "question": "Can giant pandas sell out a Metallica show?", "answer": false, "facts": [ "Metallica concerts are held in large arenas attended by tens of thousands of fans", "The highest estimate for the giant panda population is around 3,000 animals" ], "decomposition": [ "How many people can the large arenas where Metallica plays hold?", "How many giant pandas are there?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Giant panda-49" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Metallica (album)-14" ], "no_evidence" ], [ [ "Giant panda-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Metallica-13" ] ], [ [ "Giant panda-49" ] ], [ "operation" ] ] ] }, { "qid": "f32228b474fc1ff18d59", "term": "Armadillo", "description": "family of mammals", "question": "Would multiple average rulers be necessary to measure the length of a giant armadillo?", "answer": true, "facts": [ "The average ruler is 12 inches or 30 centimeters in length.", "The typical length of the giant armadillo is 75–100 cm (30–39 in), with the tail adding another 50 cm (20 in)." ], "decomposition": [ "What length are the best selling rulers on Amazon?", "How long is a typical giant armadillo?", "What is #2 divided by #1?", "Is #3 greater than one?" ], "evidence": [ [ [ [ "Ruler-2" ], "no_evidence" ], [ [ "Giant armadillo-6" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Giant armadillo-5" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Ruler-2" ], "no_evidence" ], [ [ "Giant armadillo-5" ], "no_evidence" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "7fc117f83a13b80e0e09", "term": "Hippopotamus", "description": "A large, mostly herbivorous, semiaquatic mammal native to sub-Saharan Africa", "question": "Can you only see hippopotamus in Africa?", "answer": false, "facts": [ "The United States has several zoos featuring hippopotamus.", "In the UK, you can see hippopotamus at the Marwell Zoo." ], "decomposition": [ "Where are animals kept for recreation/sightseeing?", "Can #1 that has hippopotamus be found only inside Africa?" ], "evidence": [ [ [ [ "Zoo-1" ] ], [ [ "Hippopotamus-44", "Toledo, Ohio-1" ] ] ], [ [ [ "Hippopotamus-5" ] ], [ [ "Hippopotamus-13" ] ] ], [ [ [ "Zoo-1" ] ], [ [ "Hippopotamus-43" ], "operation" ] ] ] }, { "qid": "ac641c5074e03e422221", "term": "Kane (wrestler)", "description": "American professional wrestler, actor, businessman, and politician", "question": "Was Kane (wrestler) banned from WCW headquarters city?", "answer": false, "facts": [ "Kane (wrestler is a professional wrestler most known for his WWE tenure.", "Kane wrestled one match in WCW as Bruiser Mastino.", "WWE main rival WCW was headquartered in Atlanta, Georgia.", "Kane competed in an eight-man tag match at Wrestlemania XXVII in the Georgia Dome.", "The Georgia Dome was a stadium in Atlanta Georgia." ], "decomposition": [ "Where were the headquarters of the WCW?", "Did Kane never perform in #1?" ], "evidence": [ [ [ [ "World Championship Wrestling-4" ] ], [ [ "Royal Rumble (2002)-1", "Royal Rumble (2002)-15" ], "operation" ] ], [ [ [ "World Championship Wrestling-4" ] ], [ [ "Kane (wrestler)-1" ], "no_evidence", "operation" ] ], [ [ [ "World Championship Wrestling-4" ] ], [ [ "Royal Rumble (2002)-1", "Royal Rumble (2002)-15" ] ] ] ] }, { "qid": "cee315334a0a8419283c", "term": "Rumi", "description": "13th-century Persian poet", "question": "Was Rumi's work serialized in a magazine?", "answer": false, "facts": [ "Rumi was a poet who wrote poetry", "Magazines serialize long-form prose like novels" ], "decomposition": [ "When was the first magazine ever published?", "When was the poet Rumi active?", "Was #1 before #2?" ], "evidence": [ [ [ [ "Magazine-8" ] ], [ [ "Rumi-1" ] ], [ "operation" ] ], [ [ [ "The Gentleman's Magazine-1" ] ], [ [ "Rumi-1" ] ], [ "operation" ] ], [ [ [ "Magazine-8" ] ], [ [ "Rumi-1" ] ], [ "operation" ] ] ] }, { "qid": "2cc59f4d25398d251fd6", "term": "Olympia, Washington", "description": "State capital and city in Washington, United States", "question": "Is Olympia, Washington part of \"Ish river country\"?", "answer": true, "facts": [ "Poet Robert Sund called the Puget Sound region \"Ish River country\".", "Olympia is in the Puget Sound region." ], "decomposition": [ "Where is Ish river country? ", "What cities are located in #1?", "Is Olympia included in the list in #2?" ], "evidence": [ [ [ [ "Puget Sound region-1", "Puget Sound region-2" ] ], [ [ "Washington (state)-79" ] ], [ "operation" ] ], [ [ [ "Puget Sound region-1", "Puget Sound region-2" ] ], [ [ "Puget Sound region-1" ], "no_evidence" ], [ [ "Washington (state)-1" ], "operation" ] ], [ [ [ "Puget Sound region-2" ] ], [ [ "Puget Sound-4" ] ], [ "operation" ] ] ] }, { "qid": "8e05ce95646698ca8555", "term": "Strawberry", "description": "edible fruit", "question": "Would someone with back pain enjoy picking strawberries?", "answer": false, "facts": [ "Back pain may be worsened by repeated bending at the waist.", "Strawberries grow very close to the ground." ], "decomposition": [ "What are some common body postures that can aggravate back pain?", "At which position relative to the ground do strawberries grow?", "What posture would one have to assume to reach #2?", "Is #3 excluded from #1?" ], "evidence": [ [ [ [ "Back pain-43" ], "no_evidence" ], [ [ "Strawberry-31" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Back pain-20" ] ], [ [ "Strawberry-31" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Back pain-43" ] ], [ [ "Strawberry-31" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e7730031d304759520ba", "term": "Prussia", "description": "state in Central Europe between 1525–1947", "question": "Was the Euro used in Prussia?", "answer": false, "facts": [ "Prussia was formally abolished in 1947.", "The Euro was introduced in 1992." ], "decomposition": [ "When was Prussia formally abolished?", "When was the Euro introduced?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Prussia-1" ] ], [ [ "Euro-18" ] ], [ "operation" ] ], [ [ [ "Monarchies in Europe-27" ] ], [ [ "Euro-23" ] ], [ "operation" ] ], [ [ [ "Prussia-2" ] ], [ [ "Euro-5" ] ], [ "operation" ] ] ] }, { "qid": "6541fe954f5c5a530fba", "term": "Eiffel Tower", "description": "Tower located on the Champ de Mars in Paris, France", "question": "Did Eiffel Tower contribute to a war victory?", "answer": true, "facts": [ "A radio transmitter located in the Eiffel Tower.", "This transmitter jammed German radio communications.", "This hindrance in German radio communications contributing to the Allied victory at the First Battle of the Marne." ], "decomposition": [ "What notable events in which Eiffel Tower was of primary importance took place during a war?", "Did any of #1 give a side an advantage during the said war?" ], "evidence": [ [ [ [ "Eiffel Tower-33" ] ], [ [ "Eiffel Tower-33" ] ] ], [ [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Eiffel Tower-33" ] ], [ [ "Eiffel Tower-33" ] ] ] ] }, { "qid": "83395bf81226b3f487a7", "term": "Indian Ocean", "description": "The ocean between Africa, Asia, Australia and Antarctica (or the Southern Ocean)", "question": "Has the Indian Ocean garbage patch not completed two full rotations of debris since its discovery?", "answer": true, "facts": [ "The Indian Ocean garbage patch was discovered in 2010", "The Indian Ocean garbage patch takes 6 years to complete a circulation " ], "decomposition": [ "When was the Indian Ocean garbage patch discovered?", "How long does it take for the Indian Ocean garbage patch to complete a rotation?", "How many years has it been since #1?", "Is #3 less than two times #2?" ], "evidence": [ [ [ [ "Indian Ocean garbage patch-1" ] ], [ "no_evidence" ], [ "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Indian Ocean garbage patch-2" ] ], [ [ "Indian Ocean Gyre-3" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Indian Ocean garbage patch-2" ] ], [ [ "Indian Ocean Gyre-3" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "9975870880bf73b8644e", "term": "Armageddon", "description": "according to the Book of Revelation, the site of a battle during the end times", "question": "Do some religions look forward to armageddon?", "answer": true, "facts": [ "Evangelicals cite that we are living in the beginning of Armageddon and that the rapture will happen soon as a good thing.", "Jehova's Witnesses believe that destroying the present world system and Armageddon is imminent, and that the establishment of God's kingdom over the earth is the only solution for all problems faced by humanity" ], "decomposition": [ "Where does the concept of Armageddon has its roots?", "#1 is associated with which religion?", "Do adherents of #2 believe in and await the Armageddon?" ], "evidence": [ [ [ [ "Armageddon-5" ] ], [ [ "Armageddon-4" ] ], [ [ "Armageddon-4" ], "operation" ] ], [ [ [ "Armageddon-1" ] ], [ [ "New Testament-1" ] ], [ [ "Armageddon-18", "Jehovah's Witnesses-30" ], "operation" ] ], [ [ [ "Armageddon-1" ] ], [ [ "Book of Revelation-1" ] ], [ [ "Rapture-40" ], "operation" ] ] ] }, { "qid": "e44e24c2c3b8599b85ad", "term": "War in Vietnam (1945–46)", "description": "Prelude to the Indochina Wars", "question": "Could a llama birth twice during War in Vietnam (1945-46)?", "answer": false, "facts": [ "The War in Vietnam (1945-46) lasted around 6 months.", "The gestation period for a llama is 11 months." ], "decomposition": [ "How long did the Vietnam war last?", "How long is llama gestational period?", "What is 2 times #2?", "Is #1 longer than #3?" ], "evidence": [ [ [ [ "Vietnam War-1" ] ], [ [ "Llama-23" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "War in Vietnam (1945–1946)-3" ] ], [ [ "Llama-23" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Vietnam War-1" ] ], [ [ "Llama-23" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "44c59a3ac10f2921a009", "term": "Rick and Morty", "description": "Animated sitcom", "question": "Is Rick and Morty considered an anime?", "answer": false, "facts": [ "Anime is a genre of animation that is hand drawn and is of Japanese origin.", "Rick and Morty is an American animated show." ], "decomposition": [ "What country does anime come from?", "Rick and Morty is an animated show from which country?", "Do #1 and #2 have the same answer?" ], "evidence": [ [ [ [ "Anime-1" ] ], [ [ "Rick and Morty-1" ] ], [ "operation" ] ], [ [ [ "Anime-1" ] ], [ [ "Rick and Morty-1" ] ], [ "operation" ] ], [ [ [ "Anime-10" ] ], [ [ "Rick and Morty-18" ] ], [ "operation" ] ] ] }, { "qid": "3528723781a97a0e5a7e", "term": "The Hague", "description": "City and municipality in South Holland, Netherlands", "question": "Does The Hague border multiple bodies of water?", "answer": false, "facts": [ "The Hague is in the Netherlands. ", "The Hague is in the Western part of the Netherlands. ", "The Netherlands borders the North Sea to its west. " ], "decomposition": [ "What country is the Hague located in?", "What bodies of water does #1 border on?", "What part of #1 is the Hague located in?", "How many bodies of water in #2 does #3 border?", "Is #4 greater than 1?" ], "evidence": [ [ [ [ "The Hague-1" ] ], [ [ "Netherlands-1" ] ], [ [ "The Hague-1" ] ], [ [ "The Hague-20" ], "operation" ], [ "operation" ] ], [ [ [ "The Hague-19" ] ], [ [ "Netherlands-1" ] ], [ [ "The Hague-20" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "The Hague-1" ] ], [ [ "Netherlands-1" ] ], [ [ "The Hague-1" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "b002c8cef0a1b3f84f7a", "term": "Chipmunk", "description": "Tribe of mammals (rodent (marmot))", "question": "Could a chipmunk fit 100 chocolate chips in his mouth?", "answer": false, "facts": [ "A chipmunk can fit up to two tbsp of food in his mouth.", "There are about 20-25 chocolate chips in a tbsp." ], "decomposition": [ "What is the carrying capacity of a chipmunks mouth in tbsp.?", "How many chocolate chips are in a tbsp?", "What is 100 divided by #2?", "Is #1 greater than #3?" ], "evidence": [ [ [ [ "Cheek pouch-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Cheek pouch-6" ], "no_evidence" ], [ [ "Chocolate chip-1" ], "no_evidence" ], [ "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Cheek pouch-6" ], "no_evidence" ], [ [ "Chocolate chip-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "1dce60cffcb066e212b8", "term": "Anchovy", "description": "Family of fishes", "question": "Would a pescatarian be unable to eat anchovy pizza?", "answer": false, "facts": [ "Pescatarians do not eat red meat or chicken but do eat fish.", "Pescatarians have no restrictions with eating cheese." ], "decomposition": [ "What do pediatricians eat for source of meat?", "Is anchovy not included in #1?" ], "evidence": [ [ [ [ "Pescetarianism-1" ] ], [ [ "Anchovy-1" ] ] ], [ [ [ "Pescetarianism-1" ] ], [ [ "Anchovy-1", "Seafood-1" ] ] ], [ [ [ "Pescetarianism-1" ] ], [ [ "Anchovy-3" ] ] ] ] }, { "qid": "defd6e3da16a186503c0", "term": "Sophist", "description": "Specific kind of teacher in both Ancient Greece and in the Roman Empire", "question": "Would Sophist's have hypothetically made good lawyers?", "answer": true, "facts": [ "Sophist's were teachers in ancient Greece that used rhetoric.", "Lawyers must persuade juries that their side of the case is correct.", "Rhetoric is the ancient art of persuasion that was meant to sway audiences in specific situations." ], "decomposition": [ "What were Sophist's role in Ancient Greece?", "What did #1 use in their position?", "What do lawyers do in their position?", "Would #3 find #2 to be helpful?" ], "evidence": [ [ [ [ "Sophist-1" ] ], [ [ "Second Sophistic-3" ], "no_evidence" ], [ [ "Lawyer-7" ] ], [ "operation" ] ], [ [ [ "Sophist-1" ] ], [ "no_evidence" ], [ [ "Lawyer-1" ] ], [ "operation" ] ], [ [ [ "Sophist-1", "Sophist-9" ] ], [ [ "Hellenistic philosophy-3" ] ], [ [ "Lawyer-1" ] ], [ [ "Practice of law-1" ], "no_evidence" ] ] ] }, { "qid": "721d168ff5cc0b18c31b", "term": "Tonsillitis", "description": "Inflammation of the tonsils", "question": "Can fish get Tonsillitis?", "answer": false, "facts": [ "Tonsils are a pair of soft tissue masses located at the rear of the throat", "Tonsillitis is the inflammation of tonsils.", "Fish do not have tonsils.", "Tonsils are only found in mammals. " ], "decomposition": [ "What does Tonsillitis affect?", "What kinds of animals are #1 found in?", "Are fish #2?" ], "evidence": [ [ [ [ "Tonsillitis-1" ] ], [ [ "Tonsil-2" ] ], [ "operation" ] ], [ [ [ "Tonsillitis-1" ] ], [ [ "Tonsil-3" ] ], [ "operation" ] ], [ [ [ "Tonsillitis-1" ] ], [ [ "Tonsil-3" ] ], [ [ "Fish-1" ], "operation" ] ] ] }, { "qid": "c6ca93e6d29d85456bf3", "term": "Panthéon", "description": "mausoleum in Paris", "question": "Will Queen Elizabeth be buried in the Pantheon?", "answer": false, "facts": [ "Queen Elizabeth is the reigning monarch of the United Kingdom", "The Pantheon is a resting place for notable French citizens" ], "decomposition": [ "The Panthéon is reserved as a mausoleum for citizens of which country?", "Is Queen Elizabeth from #1?" ], "evidence": [ [ [ [ "Panthéon-2" ] ], [ [ "Elizabeth II-2" ] ] ], [ [ [ "Panthéon-1" ] ], [ [ "Elizabeth II-1" ], "operation" ] ], [ [ [ "Panthéon-2" ] ], [ [ "Elizabeth II-2" ], "operation" ] ] ] }, { "qid": "29ee7da0020eb03888fb", "term": "Boolean algebra", "description": "Algebra involving variables containing only \"true\" and \"false\" (or 1 and 0) as values", "question": "Does coding rely on Boolean algebra characters?", "answer": true, "facts": [ "Boolean algebra uses the characters of 1 and 0 to represent true and false.", "Binary code is an essential part of computer coding.", "Binary code consists of the characters 0 and 1 which represents strings of value." ], "decomposition": [ "What characters does Boolean algebra use?", "What characters does binary code use?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Boolean algebra-1" ] ], [ [ "Binary code-1" ] ], [ "operation" ] ], [ [ [ "Boolean algebra-1" ] ], [ [ "Binary code-1" ] ], [ "operation" ] ], [ [ [ "Boolean algebra-1" ] ], [ [ "Binary code-1" ] ], [ "operation" ] ] ] }, { "qid": "236c7a57f3788a60e47f", "term": "Gandalf", "description": "Fictional character created by J. R. R. Tolkien", "question": "Was Gandalf present at the death of Eomer?", "answer": false, "facts": [ "Eomer died in a skirmish with orcs outside Rohan at the beginning of Two Towers.", "Gandalf had been killed by the Balrog at the end of Fellowship of the Ring.", "Gandalf returns with improved powers later on in Two Towers." ], "decomposition": [ "In which LOTR installment was Gandalf first killed?", "At what point in the LOTR franchise did Eomer die?", "When did Gandalf first reappear after #1?", "Did #2 take place outside of the period between #1 and #3?" ], "evidence": [ [ [ [ "The Lord of the Rings: The Fellowship of the Ring-8" ] ], [ [ "The Lord of the Rings: The Return of the King-10" ], "no_evidence" ], [ [ "The Lord of the Rings: The Two Towers-6" ] ], [ "no_evidence", "operation" ] ], [ [ [ "The Lord of the Rings: The Fellowship of the Ring-8" ] ], [ [ "The Lord of the Rings: The Return of the King-10" ] ], [ [ "The Lord of the Rings: The Two Towers-2" ] ], [ "operation" ] ], [ [ [ "Gandalf-27" ], "no_evidence" ], [ [ "Éomer-6" ], "no_evidence" ], [ [ "Gandalf-28" ] ], [ [ "Gandalf-31" ], "no_evidence", "operation" ] ] ] }, { "qid": "17fc5cdda68b55351597", "term": "Amy Winehouse", "description": "English singer and songwriter", "question": "Would Amy Winehouse's death have been prevented with Narcan?", "answer": false, "facts": [ "Narcan is a medication that save the life of someone overdosing on opiates.", "Amy Winehouse died from alcohol poisoning.", "Narcan cannot work on alcohol overdoses." ], "decomposition": [ "What was the cause of Amy Winehouse's death?", "What are the indications/symptoms that can be treated with Narcan?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Amy Winehouse-4" ] ], [ [ "Naloxone-1" ] ], [ "operation" ] ], [ [ [ "Amy Winehouse-92" ] ], [ [ "Naloxone-4", "Naloxone-7" ] ], [ "operation" ] ], [ [ [ "Amy Winehouse-92" ] ], [ [ "Naloxone-1" ] ], [ "operation" ] ] ] }, { "qid": "722dc38bd849d8b6ec0f", "term": "Reproduction", "description": "Biological process by which new organisms are generated from one or more parent organisms", "question": "Are those incapable of reproduction incapable of parenthood?", "answer": false, "facts": [ "Surrogates are women who will carry a baby to term for a family seeking to adopt.", "Many children are put into the adoption and foster system every year and are always available to adopt, independent of the parents reproductive status." ], "decomposition": [ "What do surrogate mothers do?", "What purpose do adoption and foster systems serve?", "Do #1 and #2 fail to help couples incapable of reproduction become parents?" ], "evidence": [ [ [ [ "Mother-11" ] ], [ [ "Adoption-8" ] ], [ "operation" ] ], [ [ [ "Surrogacy-1", "Surrogacy-2" ] ], [ [ "Adoption-1", "Foster care-1" ] ], [ "operation" ] ], [ [ [ "Surrogacy-1" ] ], [ [ "Adoption-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "4fd64bb6ce5b78ab20b6", "term": "Mixed martial arts", "description": "full contact combat sport", "question": "Is Mixed martial arts totally original from Roman Colosseum games?", "answer": false, "facts": [ "Mixed Martial arts in the UFC takes place in an enclosed structure called The Octagon.", "The Roman Colosseum games were fought in enclosed arenas where combatants would fight until the last man was standing.", "Mixed martial arts contests are stopped when one of the combatants is incapacitated.", "The Roman Colosseum was performed in front of crowds that numbered in the tens of thousands.", "Over 56,000 people attended UFC 193." ], "decomposition": [ "What are the major features of UFC's Mixed martial arts?", "What were the major features of Roman Colosseum games?", "Is #1 a complete match with #2?" ], "evidence": [ [ [ [ "Mixed martial arts-1", "Ultimate Fighting Championship-95", "Ultimate Fighting Championship-97", "Ultimate Fighting Championship-99" ] ], [ [ "Gladiator-1", "Gladiator-37", "Gladiator-40" ] ], [ "operation" ] ], [ [ [ "Ultimate Fighting Championship-1" ] ], [ [ "Gladiator-1" ] ], [ "operation" ] ], [ [ [ "Mixed martial arts-83" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f7a32d14efd589015fe1", "term": "Herpes simplex virus", "description": "Species of virus", "question": "Can Herpes simplex virus spread on Venus?", "answer": false, "facts": [ "Herpes simplex virus is a disease that has the structure of a tiny protein cage.", "Venus is the hottest planet and its temperature can reach 900°F.", "Proteins lose their structure and break down at temperatures above 105.8°F." ], "decomposition": [ "What kind of organism is the Herpes simplex virus?", "What is the maximum temperature that #1 can withstand?", "What is the average temperature on Venus?", "Is #3 less than or equal to #2?" ], "evidence": [ [ [ [ "Herpes simplex virus-1" ] ], [ [ "Virus-18" ], "no_evidence" ], [ [ "Venus-2" ] ], [ "operation" ] ], [ [ [ "Herpes simplex virus-1" ] ], [ [ "Sterilization (microbiology)-14" ], "no_evidence" ], [ [ "Venus-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Herpes simplex virus-1" ] ], [ "no_evidence" ], [ [ "Venus-2" ] ], [ "operation" ] ] ] }, { "qid": "29842ce280ccd460dd7e", "term": "Bern", "description": "Place in Switzerland", "question": "Is Bern located east of Paris?", "answer": true, "facts": [ "Paris is located in France.", "Bern is located in Switzerland.", "Switzerland borders France to the east." ], "decomposition": [ "What country is Paris located in?", "What country is Bern located in?", "Is #2 located east of #1?" ], "evidence": [ [ [ [ "Administration of Paris-4" ] ], [ [ "Districts of Switzerland-10" ] ], [ [ "France-1" ] ] ], [ [ [ "Paris-1" ] ], [ [ "Bern-1" ] ], [ "operation" ] ], [ [ [ "Paris-1" ] ], [ [ "Bern-1" ] ], [ [ "Switzerland-1" ] ] ] ] }, { "qid": "234e4eccb374a4191c86", "term": "Eighth Amendment to the United States Constitution", "description": "prohibits cruel and unusual punishment and excessive bail", "question": "Would keelhauling be a fair punishment under the Eighth Amendment?", "answer": false, "facts": [ "Keelhauling was a severe punishment whereby the condemned man was dragged beneath the ship’s keel on a rope.", "Keelhauling is considered a form of torture.", "Torture is considered cruel.", "The Eighth Amendment forbids the use of \"cruel and unusual punishment\"." ], "decomposition": [ "What kind of punishment is keelhauling considered a form of?", "Does the Eighth Amendment allow #1?" ], "evidence": [ [ [ [ "Keelhauling-1" ] ], [ [ "Eighth Amendment to the United States Constitution-1" ], "operation" ] ], [ [ [ "Keelhauling-2" ] ], [ [ "United States constitutional sentencing law-4" ] ] ], [ [ [ "Keelhauling-6" ] ], [ [ "Eighth Amendment to the United States Constitution-29" ] ] ] ] }, { "qid": "e2d24b9e3cb4133c68b0", "term": "Armadillo", "description": "family of mammals", "question": "Would Franz Ferdinand have survived with armadillo armor?", "answer": false, "facts": [ "Franz Ferdinand was the Archduke of Austria that was assassinated in 1914.", "Franz Ferdinand was assasinated with a FN Model 1910 pistol.", "Armadillos have a hard outer shell made of bony plates called osteoderms.", "The armadillos bony plates can withstand some force, but not a bullet." ], "decomposition": [ "How was Franz Ferdinand killed?", "Can armadillo armor withstand #1?" ], "evidence": [ [ [ [ "Archduke Franz Ferdinand of Austria-25" ] ], [ [ "Armadillo-2" ], "no_evidence", "operation" ] ], [ [ [ "Archduke Franz Ferdinand of Austria-23" ] ], [ [ "Armadillo-2" ] ] ], [ [ [ "Archduke Franz Ferdinand of Austria-25" ] ], [ [ "Armadillo-2" ], "no_evidence" ] ] ] }, { "qid": "03caedef50bc6729e9e5", "term": "Railroad engineer", "description": "person who operates a train on a railroad or railway", "question": "Can you find a railroad engineer on TNT?", "answer": true, "facts": [ "TNT is a cable television network", "Snowpiercer is a show airing on the TNT network", "Snowpiercer involves people living on an active train in a postapocalyptic future", "A railroad engineer is necessary to keep a train running" ], "decomposition": [ "What movies are about trains or railroads?", "Are any of #1 currently airing on TNT?" ], "evidence": [ [ [ [ "Murder on the Orient Express (2017 film)-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Snowpiercer-1", "Snowpiercer-46" ], "no_evidence" ], [ [ "Snowpiercer-46" ], "operation" ] ], [ [ [ "Planes, Trains and Automobiles-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "0bde4bdfecce47fbf7de", "term": "Amazon (company)", "description": "American electronic commerce and cloud computing company", "question": "Could Amazon afford The Mona Lisa?", "answer": true, "facts": [ "Amazon is worth over $1 trillion and had a revenue of $232.887 billion in 2018.", "The Mona Lisa had an insurance valuation equivalent to $650 million as of 2018." ], "decomposition": [ "How much is Amazon worth?", "How much is the Mona Lisa worth?", "Is #1 more than #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Mona Lisa-55" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Amazon (company)-64" ] ], [ [ "Mona Lisa-55" ] ], [ "operation" ] ], [ [ [ "Amazon (company)-65" ] ], [ [ "Mona Lisa-55" ] ], [ "operation" ] ] ] }, { "qid": "0b201a9520a4745fcd59", "term": "Martin Luther", "description": "Saxon priest, monk and theologian, seminal figure in Protestant Reformation", "question": "Was Martin Luther same sect as Martin Luther King Jr.?", "answer": false, "facts": [ "Martin Luther was a Catholic friar that began the movement of Protestantism after he aired several grievances against the church.", "Martin Luther King Jr. was a Baptist minister.", "Baptists form a major branch of Protestantism.", "Baptists trace their Protestantism to the English Separatist movement of the 1600s.", "Martin Luther lived from 1483-1546." ], "decomposition": [ "Which religious denomination did Martin Luther belong to for the significant part of his life?", "Which religious denomination did Martin Luther King Jr. identify with for the significant part of his life?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Martin Luther-88" ] ], [ [ "Martin Luther King Jr.-22" ] ], [ [ "Martin Luther-88" ], "operation" ] ], [ [ [ "Martin Luther-1", "Order of Saint Augustine-1" ] ], [ [ "Calvary Baptist Church (Chester, Pennsylvania)-1", "Martin Luther King Jr.-18" ] ], [ "operation" ] ], [ [ [ "Martin Luther-1" ] ], [ [ "Martin Luther King Jr.-108" ] ], [ "operation" ] ] ] }, { "qid": "6e939983f45cfcba8caa", "term": "Jean-Paul Sartre", "description": "French existentialist philosopher, playwright, novelist, screenwriter, political activist, biographer, and literary critic", "question": "Did Sartre write a play about Hell?", "answer": true, "facts": [ "In 1944, Sartre released No Exit.", "No Exit is a play about three people mysteriously locked in a room together.", "Late in the play, it is revealed the room is a version of Hell." ], "decomposition": [ "What is Jean-Paul Sartre's most famous play?", "What is the plot of #1?", "Is Hell a critical element of #2?" ], "evidence": [ [ [ [ "No Exit-1" ] ], [ [ "No Exit-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "No Exit-1" ] ], [ [ "No Exit-3" ] ], [ "operation" ] ], [ [ [ "Jean-Paul Sartre-62" ] ], [ [ "No Exit-3" ] ], [ [ "No Exit-3" ] ] ] ] }, { "qid": "5bd310c98053eff5445e", "term": "Rowing (sport)", "description": "Sport where individuals or teams row boats by oar", "question": "Can rowing competitions take place indoors?", "answer": false, "facts": [ "Rowing is a sport involving propelling boats.", "Boats need a large body of water in order to move.", "There are no indoor facilities big enough to host a pool with enough size for a boating competition." ], "decomposition": [ "What is the main equipment required for rowing?", "What surface does #1 need in order to move?", "Is there an indoor facility with a big enough amount of #2 to host a competition?" ], "evidence": [ [ [ [ "Rowing-1" ] ], [ [ "Rowing-1" ] ], [ [ "Indoor rowing at the 2017 World Games-1" ] ] ], [ [ [ "Rowing-1" ] ], [ [ "Rowing-1" ] ], [ [ "Olympic-size swimming pool-1" ], "no_evidence", "operation" ] ], [ [ [ "Rowing (sport)-20" ] ], [ [ "Rowing (sport)-20" ] ], [ "no_evidence" ] ] ] }, { "qid": "d477996cc5bfc3451a92", "term": "Middle Ages", "description": "Period of European history from the 5th to the 15th century", "question": "Were there fifty English kings throughout the Middle Ages?", "answer": false, "facts": [ "The Middle Ages was a period of history from 476-1453 AD.", "From 476 to 1453 AD there were around 36 Kings of England including disputed claimants to the throne." ], "decomposition": [ "Which span of time is referred to as the Middle Ages?", "How many kings ruled England through #1?", "Is #2 equal to fifty?" ], "evidence": [ [ [ [ "Middle Ages-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Outline of the Middle Ages-2" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Middle Ages-1" ] ], [ [ "History of Anglo-Saxon England-35" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "3fc11f59d64ea3b65136", "term": "Thanksgiving (United States)", "description": "holiday celebrated in the United States on the fourth Thursday in November", "question": "Is Thanksgiving sometimes considered a day of mourning?", "answer": true, "facts": [ "The Native American People in the United States were brutalized during the colonization period.", "Native Americans in the US often choose to mourn the genocide of their people on Thanksgiving." ], "decomposition": [ "When do Native Americans often choose to mourn the genocide of their people?", "Is Thanksgiving included in #1?" ], "evidence": [ [ [ [ "National Day of Mourning (United States protest)-1" ] ], [ [ "National Day of Mourning (United States protest)-1" ], "operation" ] ], [ [ [ "National Day of Mourning (United States protest)-1", "National Day of Mourning (United States protest)-17" ] ], [ "operation" ] ], [ [ [ "National Day of Mourning (United States protest)-17" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "fabf020bf07e0445c50c", "term": "Sea shanty", "description": "work song sung to accompany labor on board large merchant sailing vessels", "question": "Does Jack Sparrow know any sea shantys?", "answer": true, "facts": [ "Jack Sparrow is the main character of the popular 'Pirates of the Caribbean' movie franchise.", "Jack Sparrow is the captain of a pirate ship.", "Jack Sparrow sings many songs while on the sea." ], "decomposition": [ "Which movie is Jack Sparrow a main character in?", "Which activity is associated with singing of sea shantys?", "As portrayed in #1, is Jack Sparrow in a position to engage in #2?" ], "evidence": [ [ [ [ "Jack Sparrow-1" ] ], [ [ "Sea shanty-1" ] ], [ "operation" ] ], [ [ [ "Jack Sparrow-1" ] ], [ [ "Sea shanty-119" ] ], [ [ "Sea shanty-119" ] ] ], [ [ [ "Jack Sparrow-1" ] ], [ [ "Sea shanty-39", "Sea shanty-4" ] ], [ [ "Jack Sparrow-1" ] ] ] ] }, { "qid": "4760fe3d0c80beb30edd", "term": "Honey", "description": "Sweet food made by bees mostly using nectar from flowers", "question": "If someone is a vegan, would they eat honey?", "answer": false, "facts": [ "Veganism is a type of diet that excludes all animal products, including those that are byproducts. ", "Honey is considered an animal byproduct. " ], "decomposition": [ "Do vegans eat animal byproducts?", "Is honey considered an animal byproduct?", "Are the answers to #1 and #2 the same?" ], "evidence": [ [ [ [ "Veganism-1" ] ], [ [ "Honey-1" ] ], [ "operation" ] ], [ [ [ "Veganism-1" ] ], [ [ "Honey-1" ] ], [ "operation" ] ], [ [ [ "Veganism-1" ] ], [ [ "Honey-1" ] ], [ "operation" ] ] ] }, { "qid": "8cf9e4fddb2265d2ed89", "term": "Mongoose", "description": "family of mammals", "question": "Does a mongoose have natural camouflage for desert?", "answer": true, "facts": [ "The most common fur colors of mongooses are brown and gray.", "The Desert Camouflage color is made of Café Au Lait brown and Pastel Gray." ], "decomposition": [ "What colors are mongoose?", "What colors are desert camouflage?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Mongoose-5" ] ], [ [ "Desert Camouflage Uniform-1" ] ], [ [ "Desert Camouflage Uniform-1", "Mongoose-5" ] ] ], [ [ [ "Egyptian mongoose-2" ], "no_evidence" ], [ [ "Desert Camouflage Uniform-1" ] ], [ "operation" ] ], [ [ [ "Indian brown mongoose-2" ], "no_evidence" ], [ [ "Desert Camouflage Uniform-1" ] ], [ "operation" ] ] ] }, { "qid": "40b7c34188d5b36bc486", "term": "Lamborghini", "description": "Italian car manufacturer", "question": "Can Lamborghini's fastest model win a race against a Porsche 911?", "answer": true, "facts": [ "Lamborghini's fastest model is the Lamborghini Aventador SVJ Roadster.", "The Lamborghini Aventador SVJ Roadster has a top speed of 217 MPH.", "The Porsche 911 has a top speed of 191 MPH." ], "decomposition": [ "Which model of Lamborghini is the fastest?", "What is the top speed of #1?", "What is the top speed of a Porsche 911?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Lamborghini Veneno-1" ] ], [ [ "Lamborghini Veneno-7" ] ], [ [ "Porsche 911-133" ] ], [ "operation" ] ], [ [ [ "Lamborghini Aventador-14" ], "no_evidence" ], [ [ "Lamborghini Aventador-14" ] ], [ [ "Porsche 911-129" ] ], [ "operation" ] ], [ [ [ "Fastest Car-1" ] ], [ [ "Lamborghini Aventador-14" ] ], [ [ "Porsche 911-94" ] ], [ "operation" ] ] ] }, { "qid": "c6678ced9e10fc1c03f1", "term": "Second Amendment to the United States Constitution", "description": "Part of the Bill of Rights, regarding the right to bear arms", "question": "Was the Second Amendment to the United States Constitution written without consideration for black Americans?", "answer": true, "facts": [ "The writers of the Constitutional Amendments did not view black people as legitimate human beings.", "The writers of the Constitutional Amendments believed that slavery benefited black slaves.", "The Constitutional Amendments were written for people that the writers considered human." ], "decomposition": [ "Who were the writers of the Constitutional Amendments?", "Who was the the Constitutional Amendments written for?", "Did #1 fail to view black Americans as #2?" ], "evidence": [ [ [ [ "Constitution of the United States-63" ], "no_evidence" ], [ [ "Constitution of the United States-51" ], "no_evidence" ], [ [ "African Americans-20" ], "operation" ] ], [ [ [ "United States Bill of Rights-2" ], "no_evidence" ], [ [ "Constitution of the United States-132" ], "no_evidence" ], [ [ "Reconstruction Amendments-2" ], "no_evidence", "operation" ] ], [ [ [ "Ratification-18" ] ], [ [ "Constitution of the United States-3" ] ], [ "no_evidence" ] ] ] }, { "qid": "526259cd504bffe7f888", "term": "Gladiator", "description": "combatant who entertained audiences in the Roman Republic and Roman Empire", "question": "Could a Gladiator's weapon crush a diamond?", "answer": false, "facts": [ "Gladiators used a sword known as a Gladius.", "The Gladius was a short sword made from various elements of steel.", "Diamond is one the hardest known substances on earth.", "Only diamond can be used to cut another diamond." ], "decomposition": [ "What material were Gladiator weapons made from?", "Can #1 crush a diamond?" ], "evidence": [ [ [ [ "Gladius-16" ] ], [ [ "Diamond-1", "Diamond-15" ], "no_evidence", "operation" ] ], [ [ [ "Gladiator-36" ] ], [ [ "Diamond-15" ], "no_evidence" ] ], [ [ [ "Gladius-9" ] ], [ [ "Diamond-15" ], "operation" ] ] ] }, { "qid": "76a9327740bf5e4fd9ed", "term": "Republic of Korea Navy", "description": "Naval warfare branch of South Korea's military", "question": "Would Republic of Korea Navy dominate Eritrea navy?", "answer": true, "facts": [ "The Republic of Korea Navy has 150 ships, 70 aircraft, 70,000 personnel including 29,000 marines .", "The Eritrean Navy has 4 ships and an army of 45,000." ], "decomposition": [ "How many ships are in the Republic of Korea's navy?", "How many ships are in the Eritrean Navy?", "How many people are in the Republic of Korea's navy?", "How many people are in the Eritrean navy?", "Is #1 greater than #2 and is #3 greater than #4?" ], "evidence": [ [ [ [ "Republic of Korea Navy-1" ] ], [ [ "Eritrean Navy-4" ] ], [ [ "Republic of Korea Navy-1" ] ], [ [ "Eritrean Defence Forces-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Republic of Korea Navy-1" ] ], [ [ "Eritrean Navy-2" ], "no_evidence" ], [ [ "Republic of Korea Navy-10" ] ], [ [ "Eritrean Defence Forces-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Republic of Korea Navy-81" ] ], [ [ "Eritrean Navy-1" ], "no_evidence" ], [ [ "Republic of Korea Navy-1" ] ], [ [ "Eritrean Defence Forces-5" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "65557713351aed7519b8", "term": "Toyota Prius", "description": "Hybrid electric automobile", "question": "Could someone have arrived at Wrestlemania X in a Toyota Prius?", "answer": false, "facts": [ "Wrestlemania X took place in 1994", "The Toyota Prius was first manufactured in 1997" ], "decomposition": [ "When did Wrestlemania X hold?", "When was the Toyota Prius first manufactured?", "Is #2 before #1?" ], "evidence": [ [ [ [ "WrestleMania X-1" ] ], [ [ "Toyota Prius-1" ] ], [ "operation" ] ], [ [ [ "WrestleMania X-1" ] ], [ [ "Toyota Prius-1" ] ], [ "operation" ] ], [ [ [ "WrestleMania X-1" ] ], [ [ "Toyota Prius-1" ] ], [ "operation" ] ] ] }, { "qid": "7bb5eb593f8cee3b4f5e", "term": "Guam", "description": "Island territory of the United States of America", "question": "Could the first European visitor to Guam been friends with Queen Victoria?", "answer": false, "facts": [ "Portuguese explorer Ferdinand Magellan, while in the service of Spain, was the first European to visit the island.", "Magellan died in 1521.", "Queen Victoria was born in 1819." ], "decomposition": [ "Who was the first European visitor to Guam?", "When did #1 die?", "When was Queen Victoria born?", "Did #3 come before #2?" ], "evidence": [ [ [ [ "The Boy Who Was-11" ], "no_evidence" ], [ "no_evidence" ], [ [ "Queen Victoria-6" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Guam-3" ] ], [ [ "Ferdinand Magellan-20" ] ], [ [ "Queen Victoria-4" ] ], [ "operation" ] ], [ [ [ "Guam-3" ] ], [ [ "Ferdinand Magellan-1" ] ], [ [ "Queen Victoria-1" ] ], [ "operation" ] ] ] }, { "qid": "0aa6769cb88143b3dba1", "term": "Adam Sandler", "description": "American actor, comedian, screenwriter, and producer", "question": "Would the average American family find Adam Sandler's home to be too small?", "answer": false, "facts": [ "The average American family has about 3 people in it.", "Adam Sandler's home has 14 bedrooms and 7 bathrooms." ], "decomposition": [ "How many people are in the average American family?", "How big is Adam Sandler's home?", "Would a home the size of #2 be too small for #1 people?" ], "evidence": [ [ [ [ "Nuclear family-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Adam Sandler-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Nuclear family-1" ], "no_evidence" ], [ [ "Adam Sandler-26" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7ced603c3d79a2f2739d", "term": "Santa Claus", "description": "Folkloric figure, said to deliver gifts to children on Christmas Eve", "question": "Does Santa Claus work during summer?", "answer": false, "facts": [ "Christmas is in winter.", "Santa works on Christmas." ], "decomposition": [ "What holiday does Santa Claus work on?", "Does #1 occur in the summer?" ], "evidence": [ [ [ [ "Santa Claus-1" ] ], [ [ "Christmas-1", "Summer-2" ] ] ], [ [ [ "Santa Claus-1" ] ], [ [ "Christmas-1" ] ] ], [ [ [ "Santa Claus-1" ] ], [ [ "Christmas-1" ], "no_evidence" ] ] ] }, { "qid": "c8e267b583f722ff485e", "term": "Constitution of the United States", "description": "Supreme law of the United States of America", "question": "Is the Hobbit more profitable for proofreader than Constitution of the United States?", "answer": true, "facts": [ "Proofreaders typically get paid per the number of words in a document.", "The Constitution of the United States contains around 7,500 words.", "The Hobbit contains 95,356 words." ], "decomposition": [ "How many words are in the US Constitution?", "What classification is the Hobbit?", "How many words do books in #2 have?", "Is #3 greater than #1?" ], "evidence": [ [ [ [ "Constitution-4" ], "no_evidence" ], [ [ "Hobbit-2" ] ], [ [ "Artamène-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Constitution of the United States-1", "Constitution of the United States-2" ] ], [ [ "The Hobbit-1" ], "no_evidence" ], [ [ "The Hobbit-20" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "State constitution (United States)-2" ] ], [ [ "Hobbit-7" ] ], [ [ "The Hobbit-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "14ef04964c1b786dc45e", "term": "Holy Spirit", "description": "Religious concept with varied meanings", "question": "Is Krishna similar to Holy Spirit?", "answer": true, "facts": [ "The Holy Spirit is a Christian concept of a spirit that is an aspect or agent of God that does good in the world.", "Krishna, from Hinduism, is a manifestation of the God Vishnu.", "Krishna brings compassion, tenderness, and love into the world." ], "decomposition": [ "What are the characteristics of the Christian Holy Spirit?", "What are the characteristics of Krishna?", "Are many characteristics in #2 also found in #1?" ], "evidence": [ [ [ [ "Holy Spirit in Christianity-4" ] ], [ [ "Krishna-1" ] ], [ "no_evidence" ] ], [ [ [ "Holy Spirit-1" ] ], [ [ "Krishna-1" ] ], [ [ "Krishna-1" ] ] ], [ [ [ "God in Abrahamic religions-9", "Holy Spirit-1" ] ], [ [ "Krishna-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "d8eaf52f02c5cfb98bce", "term": "Sacrum", "description": "Triangular-shaped bone at the bottom of the spine", "question": "Do human sacrums have more fused vertebrae than an Alaskan Malamute?", "answer": true, "facts": [ "The human sacrum consists of five fused vertebrae.", "An Alaskan Malamute is a large domestic dog breed.", "Dogs have three fused vertebrae attached to their sacrums." ], "decomposition": [ "How many vertebrae are found in the human sacrum?", "What species of animal is an Alaskan Malamute?", "How many vertebrae are found in a #2's sacrum?", "Is #1 greater than #3?" ], "evidence": [ [ [ [ "Sacrum-1" ] ], [ [ "Alaskan Malamute-1" ] ], [ [ "Dog anatomy-54", "Nuchal ligament-10" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Sacrum-1" ] ], [ [ "Alaskan Malamute-1" ] ], [ [ "Sacrum-4" ] ], [ "operation" ] ], [ [ [ "Sacrum-1" ] ], [ [ "Alaskan Malamute-1" ] ], [ [ "Sacrum-4" ] ], [ "operation" ] ] ] }, { "qid": "bcc0ba30a471776d64a9", "term": "Snoop Dogg", "description": "American rapper", "question": "Was Snoop Dogg's debut studio album released on the weekend?", "answer": false, "facts": [ "Snoop Dogg's debut studio album was Doggystyle.", "Doggystyle was released on November 23, 1993.", "November 23, 1993 was a Tuesday.", "In the USA, the weekend consists of Saturday and Sunday." ], "decomposition": [ "What was Snoop Dogg's first studio album?", "When was #1 released?", "What day of the week did #2 occur on?", "What days are considered the weekend?", "Is #3 one of the answers in #4?" ], "evidence": [ [ [ [ "Snoop Dogg-2" ] ], [ [ "Doggystyle-1" ] ], [ "no_evidence" ], [ [ "Workweek and weekend-1" ] ], [ "operation" ] ], [ [ [ "Doggystyle-1" ] ], [ [ "Doggystyle-1" ] ], [ "no_evidence" ], [ [ "Workweek and weekend-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Snoop Dogg-2" ] ], [ [ "Doggystyle-1" ] ], [ "no_evidence" ], [ [ "Workweek and weekend-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ee757afd1a9e0e96cbc7", "term": "Sable", "description": "Species of marten", "question": "Are Sable's a good choice of Mustelidae to weigh down a scale?", "answer": false, "facts": [ "Mustelidae is the scientific designation for animals that share similarities including polecats, sables, and ferrets.", "Polecats weigh between 2.2 and 3.3 pounds.", "Sable's weigh around 2.4 pounds.", "Ferrets can weigh up to 44 pounds.", "Sable's have sharp teeth and a painful bite and are outlawed in many states." ], "decomposition": [ "How much does a sable weigh?", "What are the weights of other common members of Mustelidae?", "Is #1 greater than all #2?" ], "evidence": [ [ [ [ "Sable-4" ] ], [ [ "Mustelidae-2" ] ], [ "operation" ] ], [ [ [ "Sable-4" ] ], [ [ "Mustelidae-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sable-4" ], "operation" ], [ [ "Mustelidae-4" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "162e5f5ec0f9d4a91cf8", "term": "Richard III of England", "description": "15th-century King of England", "question": "Was Richard III ruler of Adelaide?", "answer": false, "facts": [ "Richard III was King of England and Lord of Ireland from 1483-1485.", "Adelaide is a city in South Australia." ], "decomposition": [ "When was Richard III ruler of England?", "What country is Adelaide in?", "When was #2 ruled by England?", "Does #1 and #3 overlap?" ], "evidence": [ [ [ [ "Richard III of England-1" ] ], [ [ "Adelaide-1" ] ], [ [ "Australia-2" ] ], [ "operation" ] ], [ [ [ "Richard III of England-1" ] ], [ [ "Adelaide-1" ] ], [ [ "Australia-11" ] ], [ "operation" ] ], [ [ [ "Richard III of England-1" ] ], [ [ "Adelaide-1" ] ], [ [ "Australia-14" ] ], [ "operation" ] ] ] }, { "qid": "87c07cc6b730abde6d76", "term": "Sweet potato", "description": "species of plant", "question": "Do Sweet Potatoes prevent other plants from growing in their place?", "answer": true, "facts": [ "When sweet potato plants decompose, they release a chemical that prevents germination in their soil.", "Farmers will work to ensure that all parts of a sweet potato plant are out of the field before trying to grow in it again." ], "decomposition": [ "What chemical is released when sweet potatoes decompose?", "Where is #1 released into?", "Does #1 prevent other plants from growing in #2?" ], "evidence": [ [ [ [ "Sweet potato-19" ], "no_evidence" ], [ [ "Sweet potato-19" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Sweet potato-19" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Sweet potato storage-10" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "06f7878425a995c2a633", "term": "Spaghetti", "description": "Type of pasta", "question": "Should spaghetti be slick when cooked?", "answer": false, "facts": [ "Spaghetti is typically served with a sauce on it.", "When noodles have too smooth a texture, no sauce will stick to them." ], "decomposition": [ "What is typically served on top of spaghetti?", "Would #1 be able to stick if the spaghetti were slick?" ], "evidence": [ [ [ [ "Spaghetti-2" ] ], [ "operation" ] ], [ [ [ "Spaghetti and meatballs-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Tomato sauce-23" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "6693c842968f27dd04ce", "term": "Adrenaline", "description": "hormone, neurotransmitter and medication. Epinephrine is normally produced by both the adrenal glands and certain neurons", "question": "While viewing \"Scary Movie\" is the viewer likely to experience an increase in adrenaline?", "answer": false, "facts": [ "Scary Movie is a film that is a comedy take on horror, intended to make viewers laugh but not afraid.", "Adrenaline is produced when a human is frightened or excited." ], "decomposition": [ "What type of emotion would cause an increase in adrenaline?", "What genre of movie is Scary Movie?", "What emotion do #2 aim to create in the viewer?", "Are #3 and #1 the same?" ], "evidence": [ [ [ [ "Adrenaline-1" ] ], [ [ "Scary Movie-1" ] ], [ [ "Parody film-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Adrenaline-13" ] ], [ [ "Scary Movie-1" ] ], [ [ "Horror film-1" ] ], [ "operation" ] ], [ [ [ "Adrenaline-13" ] ], [ [ "Parody film-1", "Scary Movie-1" ] ], [ [ "Comedy film-1" ] ], [ "operation" ] ] ] }, { "qid": "6316b9ccfc0c645aae30", "term": "Flour", "description": "powder which is made by grinding cereal grains", "question": "Is All Purpose Flour safe for someone who has celiac disease?", "answer": false, "facts": [ "All purpose flour has about 9% gluten in it.", "When someone with Celiac disease eats gluten, their body has an immune response that attacks their small intestine." ], "decomposition": [ "What do people with celiac disease have to avoid?", "Is #1 absent from all purpose flour?" ], "evidence": [ [ [ [ "Coeliac disease-2" ] ], [ [ "Flour-26" ], "operation" ] ], [ [ [ "Healthy diet-23" ] ], [ [ "Flour-26", "Healthy diet-23" ] ] ], [ [ [ "Coeliac disease-13" ] ], [ [ "Coeliac disease-13" ] ] ] ] }, { "qid": "6523b3b72884557b38e8", "term": "Very Large Telescope", "description": "telescope in the Atacama Desert, Chile", "question": "Is the Very Large Telescope the most productive telescope in the world?", "answer": false, "facts": [ "Telescope productivity is measured based on how many scientific papers a telescope generates.", "The Hubble Space Telescope is the most productive telescope in the world. " ], "decomposition": [ "What are counted when measuring telescope productivity?", "How many occurrences of #1 have there been for the Very Large Telescope?", "How many occurrences of #1 have there been for the Hubble Telescope?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Very Large Telescope-3" ] ], [ [ "Very Large Telescope-16" ] ], [ [ "Hubble Space Telescope-84" ] ], [ "operation" ] ], [ [ [ "Very Large Telescope-3" ] ], [ [ "Very Large Telescope-16" ] ], [ [ "Hubble Space Telescope-84" ] ], [ [ "Very Large Telescope-3" ], "operation" ] ], [ [ "no_evidence" ], [ [ "Very Large Telescope-16" ], "no_evidence" ], [ [ "Hubble Space Telescope-69" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f15b0aaa611d1d554186", "term": "Sandal", "description": "Type of footwear with an open upper", "question": "Is it safe to wear sandals in snow?", "answer": false, "facts": [ "Sandals have open toes and don't completely cover the feet.", "Snow is very cold and direct exposure to skin can cause hypothermia.", "The feet need to be completely covered to walk through snow safely." ], "decomposition": [ "What parts of your foot are exposed in sandals?", "What is the temperature of snow?", "Is it safe to have #1 directly exposed to something that is #2?" ], "evidence": [ [ [ [ "Sandal-1" ] ], [ [ "Winter storm-1" ], "no_evidence" ], [ [ "Hypothermia-1" ], "operation" ] ], [ [ [ "Sandal-1" ] ], [ [ "Snow-1" ], "no_evidence" ], [ [ "Frostbite-5" ], "no_evidence" ] ], [ [ [ "Sandal-1" ] ], [ [ "Snow-16" ], "no_evidence" ], [ [ "Frostbite-1" ], "operation" ] ] ] }, { "qid": "f378f856bdaff39cdfa3", "term": "Cuisine of Hawaii", "description": "Cuisine of Hawaii", "question": "Is the cuisine of Hawaii suitable for a vegan?", "answer": false, "facts": [ " Per capita, Hawaiians are the second largest consumers of Spam in the world, right behind Guam.", "Kalua pig is another famous cuisine of Hawaii.", "Fish and seafood are also very common in Hawaii." ], "decomposition": [ "What are the popular foods in Hawaiian cuisine?", "Which foods will a vegan exclude from their diet?", "Are all of #2 excluded from #1?" ], "evidence": [ [ [ [ "Cuisine of Hawaii-2" ] ], [ [ "Veganism-1" ] ], [ "operation" ] ], [ [ [ "Cuisine of Hawaii-26" ] ], [ [ "Veganism-1" ] ], [ "operation" ] ], [ [ [ "Haitian cuisine-15" ], "no_evidence" ], [ [ "Vegetarian and vegan dog diet-2" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "1051df8b6730dcd1b34f", "term": "Game (hunting)", "description": "animal hunted for sport or for food", "question": "Would a customer be happy if their grocery store meat tasted like game?", "answer": false, "facts": [ "\"Gamey\" is a word used to describe meat with a grassier, more wild taste.", "Gaminess in supermarket meat is very unusual.", "Many people find game to be unpleasant in taste." ], "decomposition": [ "Which kind of meat is referred to as game?", "Are grocery store customers accustomed to #1?" ], "evidence": [ [ [ [ "Game (hunting)-6" ] ], [ [ "Meat-1" ] ] ], [ [ [ "Game (hunting)-1" ] ], [ [ "Game (hunting)-5" ], "no_evidence" ] ], [ [ [ "Game (hunting)-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "4ea450758bcead502050", "term": "Royal Air Force", "description": "Aerial warfare service branch of the British Armed Forces", "question": "Is the Royal Air Force ensign on the moon?", "answer": false, "facts": [ "The Royal Air Force ensign is the flag of the Royal Air Force", "The Royal Air Force is a branch of the British Armed Forces", "Britain has never landed on the moon" ], "decomposition": [ "What does the Royal Air Force ensign represent?", "What country is #1 a part of?", "Has #2 ever sent people to the moon?" ], "evidence": [ [ [ [ "Royal Air Force Ensign-3" ], "no_evidence" ], [ [ "Royal Air Force-4" ], "no_evidence" ], [ [ "Moon landing-12" ], "no_evidence" ] ], [ [ [ "Royal Air Force Ensign-1" ] ], [ [ "Royal Air Force-1" ] ], [ "operation" ] ], [ [ [ "Royal Air Force Ensign-1" ] ], [ [ "Royal Air Force-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "b65adb5caa4f7a207879", "term": "Menstruation", "description": "Regular discharge of blood and tissue from the inner lining of the uterus through the vagina", "question": "Are tampons a good 24 hour solution for mentruation?", "answer": false, "facts": [ "Tampons are intended for use up to 8 hours at a time. ", "When left in for longer than 8 hours, tampons pose a dangerous risk for a life threatening condition. " ], "decomposition": [ "How many hours can a tampon be safely used for at a time?", "Is #1 greater than or equal to 24?" ], "evidence": [ [ [ [ "Tampon-11" ] ], [ "operation" ] ], [ [ [ "Tampon-11" ] ], [ "operation" ] ], [ [ [ "Tampon-11" ] ], [ "operation" ] ] ] }, { "qid": "1977a974cf1b946ea1e5", "term": "Capsaicin", "description": "chemical compound", "question": "If someone loves buffalo wings do they enjoy capsaicin?", "answer": true, "facts": [ "Buffalo wings are fried chicken wings covered in a spicy sauce.", "Spicy foods are provided their spice from capsaicin from peppers." ], "decomposition": [ "What sauce is used on buffalo wings?", "What is the flavor of #1", "Is capsaicin used to create #2?" ], "evidence": [ [ [ [ "Buffalo wing-10" ] ], [ [ "Cayenne pepper-1" ] ], [ [ "Capsicum annuum-6" ], "operation" ] ], [ [ [ "Buffalo wing-10" ] ], [ [ "Hot sauce-1" ] ], [ [ "Capsaicin-1" ], "operation" ] ], [ [ [ "Buffalo wing-1" ] ], [ [ "Buffalo wing-10" ] ], [ [ "Capsaicin-1" ] ] ] ] }, { "qid": "ed28d8dc67d5ee85b85c", "term": "Hepatitis", "description": "inflammation of the liver tissue", "question": "Can you cure hepatitis with a tonsillectomy?", "answer": false, "facts": [ "A tonsillectomy removes the tonsils, glands found in the back of the throat", "Hepatitis is a disease that targets the liver" ], "decomposition": [ "What organ does hepatitis affect? ", "What organs are removed during a tonsillectomy?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Hepatitis-1" ] ], [ [ "Tonsillectomy-1" ] ], [ "operation" ] ], [ [ [ "Hepatitis-1" ] ], [ [ "Tonsillectomy-1" ] ], [ "operation" ] ], [ [ [ "Hepatitis-1" ] ], [ [ "Tonsillectomy-1" ] ], [ "operation" ] ] ] }, { "qid": "38a3e1117891b029cd6b", "term": "Adrenaline", "description": "hormone, neurotransmitter and medication. Epinephrine is normally produced by both the adrenal glands and certain neurons", "question": "Can cancer cause excess adrenaline production?", "answer": true, "facts": [ "Adrenaline is produced by the adrenal glands.", "Cancer is a disease characterized by the formation of tumors.", "Tumors on the adrenal glands can cause them to over-express." ], "decomposition": [ "What is cancer cause to grow?", "Can #1 grow on Adrenal glands?", "Does #2 cause excess adrenaline production?" ], "evidence": [ [ [ [ "Cancer cell-5" ] ], [ [ "Adrenal tumor-9" ] ], [ [ "Adrenal tumor-1" ] ] ], [ [ [ "Cancer-1" ] ], [ [ "Adrenal gland-3" ] ], [ [ "Adrenal tumor-10" ], "operation" ] ], [ [ [ "Causes of cancer-1" ] ], [ [ "Adrenal tumor-5" ] ], [ [ "Adrenal tumor-8" ] ] ] ] }, { "qid": "cc542edc9006be8d52a5", "term": "Frigatebird", "description": "A family of seabirds found across tropical and subtropical oceans", "question": "Would a Frigatebird in Ontario be a strange sight?", "answer": true, "facts": [ "Ontario is a province of Canada.", "Canada is surrounded by temperate oceans." ], "decomposition": [ "Where are Frigatebirds usually found?", "Which oceans can be found around Ontario?", "Do all of #2 fail to fit the description of #1?" ], "evidence": [ [ [ [ "Frigatebird-1" ] ], [ [ "Hudson Bay-1", "James Bay-4" ] ], [ [ "Frigatebird-1", "Hudson Bay-1", "James Bay-4" ] ] ], [ [ [ "Frigatebird-1" ] ], [ [ "Hudson Bay-12", "Ontario-2" ] ], [ "operation" ] ], [ [ [ "Frigatebird-17" ] ], [ [ "Geography of Ontario-8" ] ], [ "operation" ] ] ] }, { "qid": "e1f6fead3b0070fe2142", "term": "Intel", "description": "American semiconductor chip manufacturer", "question": "Would a silicon shortage be bad for Intel's sales?", "answer": true, "facts": [ "Silicon is a key material for the production of semiconductor chips.", "A silicon shortage would mean fewer semiconductor chips could be produced.", "A business that produces fewer products than normal will receive lower than normal revenue." ], "decomposition": [ "What kind of products does Intel make?", "What are the key materials used in the production of #1?", "Is silicon in #2?" ], "evidence": [ [ [ [ "Intel-1" ] ], [ [ "Integrated circuit-29" ] ], [ "operation" ] ], [ [ [ "Intel-1" ] ], [ [ "Integrated circuit-1" ] ], [ "operation" ] ], [ [ [ "Intel-1" ] ], [ [ "Integrated circuit-1" ] ], [ "operation" ] ] ] }, { "qid": "54f5fd17d9a2373f68b1", "term": "Ivan the Terrible", "description": "Grand Prince of Moscow and 1st Tsar of Russia", "question": "Has Ivan the Terrible flown to Europe?", "answer": false, "facts": [ "Ivan the Terrible was the 1st Tsar of Russia.", "Ivan the Terrible died in 1584.", "The first confirmed person to fly was Jean Francois Pilatre de Rozier in 1783." ], "decomposition": [ "When did Ivan the Terrible die?", "When was the airplane invented?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Ivan the Terrible-1" ] ], [ [ "Airplane-2" ] ], [ "operation" ] ], [ [ [ "Ivan the Terrible-1" ] ], [ [ "Airplane-2" ] ], [ "operation" ] ], [ [ [ "Ivan the Terrible-1" ] ], [ [ "Airplane-14" ] ], [ "operation" ] ] ] }, { "qid": "ed08e2d4bbe5bbf1b01d", "term": "Oprah Winfrey", "description": "American businesswoman, talk show host, actress, producer, and philanthropist", "question": "Could Oprah Winfrey buy dozens of her staff Bugatti luxury cars?", "answer": true, "facts": [ "Oprah Winfrey is a billionaire", "A new Bugatti costs a few million dollars" ], "decomposition": [ "How much is Oprah Winfrey worth?", "How much does a Bugatti cost?", "Is #2 times 2 dozen less than #1?" ], "evidence": [ [ [ [ "Oprah Winfrey-54" ] ], [ [ "Bugatti Chiron-8" ] ], [ "operation" ] ], [ [ [ "Oprah Winfrey-54" ] ], [ [ "Bugatti Chiron-8" ] ], [ "operation" ] ], [ [ [ "Oprah Winfrey-54" ] ], [ [ "Bugatti Chiron-8" ] ], [ "operation" ] ] ] }, { "qid": "c45743b824e24f745fcf", "term": "Wehrmacht", "description": "unified armed forces of Germany from 1935 to 1945", "question": "Did the Wehrmacht affect the outcome of the War to End All Wars?", "answer": false, "facts": [ "The Wehrmacht was the unified military of Germany from 1935 to 1945", "The War to End All Wars is a nickname for World War I", "World War I ended in 1918" ], "decomposition": [ "What war was the War to End All Wars?", "When did #1 end?", "When was the Wehrmacht formed?", "Is #3 before #2?" ], "evidence": [ [ [ [ "The war to end war-1" ] ], [ [ "World War I-1" ] ], [ [ "Wehrmacht-1" ] ], [ "operation" ] ], [ [ [ "The war to end war-1" ] ], [ [ "The war to end war-1" ] ], [ [ "Wehrmacht-1" ] ], [ "operation" ] ], [ [ [ "World War I-1" ] ], [ [ "Armistice of 11 November 1918-1" ] ], [ [ "Wehrmacht-1" ] ], [ "operation" ] ] ] }, { "qid": "0a4fa11abccd8dd84dc8", "term": "Leopard cat", "description": "Small wild cat", "question": "Are Leopard cats in less dire straits than Bornean Orangutan?", "answer": true, "facts": [ "Leopard cats are classified as Least Concern on IUCN endangered list.", "Bornean Orangutan's are classified as Endangered on IUCN endangered list." ], "decomposition": [ "What are the recognized threats to the Bornean orangutan?", "What are the recognized threats to the leopard cat?", "Is #1 worse than #2?" ], "evidence": [ [ [ [ "Bornean orangutan-25" ] ], [ [ "Leopard cat-28" ] ], [ [ "Bornean orangutan-26", "Leopard cat-30" ] ] ], [ [ [ "Bornean orangutan-26" ] ], [ [ "Leopard-3" ] ], [ [ "Bornean orangutan-25" ], "no_evidence", "operation" ] ], [ [ [ "Bornean orangutan-2" ] ], [ [ "Leopard cat-1" ] ], [ "operation" ] ] ] }, { "qid": "377427ca79b717bdcb83", "term": "Snoop Dogg", "description": "American rapper", "question": "Did Snoop Dogg refuse to make music with rival gang members?", "answer": false, "facts": [ "American rapper Snoop Dogg is a member of the Crips gang.", "The Crips are enemies of their rival gang, The Bloods.", "Rapper, The Game is a member of The Bloods gang.", "Tha Blue Carpet Treatment was a Snoop Dogg mixtape featuring the song California Vacation.", "Snoop Dogg collaborates with Xzibit and The Game on the song California Vacation." ], "decomposition": [ "What is the name of the gang that Snoop Dogg is part of?", "Which gang is the rival of #1?", "In Snoop Dogg's song California Vacation, which rapper did he collaborate with?", "Is #3 not associated with #2?" ], "evidence": [ [ [ [ "Snoop Dogg-7" ] ], [ [ "Crips-14" ] ], [ [ "Doctor's Advocate-4" ], "no_evidence" ], [ [ "The Game (rapper)-5" ], "operation" ] ], [ [ [ "Snoop Dogg-7" ] ], [ [ "Crips-3" ] ], [ [ "Doctor's Advocate-10" ], "no_evidence" ], [ [ "The Game (rapper)-5" ], "operation" ] ], [ [ [ "Snoop Dogg-7" ] ], [ [ "Crips-3" ] ], [ [ "Doctor's Advocate-4" ] ], [ [ "The Game (rapper)-5" ] ] ] ] }, { "qid": "a4819e1c28b5e1eb4b09", "term": "Helium", "description": "Chemical element with atomic number 2", "question": "Does the density of helium cause voices to sound deeper?", "answer": false, "facts": [ "Helium is less dense than air.", "Sound travels more quickly through helium than it does through air. ", "When sound travels more quickly, the tone of it raises and sounds higher." ], "decomposition": [ "What is the density of helium compared to air?", "As a result of #1, what is the speed in which air travel throughs helium compared to air", "When #2 happens, does the tone go deeper?" ], "evidence": [ [ [ [ "Lifting gas-1" ] ], [ [ "Lifting gas-6" ], "no_evidence" ], [ [ "Helium-4" ], "no_evidence" ] ], [ [ [ "Helium-1" ], "no_evidence" ], [ [ "Helium-77" ] ], [ "operation" ] ], [ [ [ "Helium-64" ] ], [ [ "Helium-27" ] ], [ [ "Helium-77" ], "operation" ] ] ] }, { "qid": "9567d9236e4e01580b1a", "term": "Romeo and Juliet", "description": "tragedy by William Shakespeare", "question": "Is Romeo and Juliet an unusual title to teach high schoolers?", "answer": false, "facts": [ "Romeo and Juliet has topped multiple 'Top Read Books In High School' lists.", "Romeo and Juliet is available in multiple editions targeted at school age children." ], "decomposition": [ "What academic sources teach Romeo and Juliet?", "Are high schools included in #1?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Romeo and Juliet-73" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Romeo and Juliet-79" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "6e47423b2fdc8bc3130c", "term": "Winter", "description": "one of the Earth's four temperate seasons, occurring between autumn and spring", "question": "Are there multiple American government holidays during winter?", "answer": true, "facts": [ "Winter runs from about December 20 to about March 20.", "Government holidays include Christmas, New Year, King Day, and President's Day.", "Christmas is always December 25, New Year is always January 1, King Day is a Monday in the middle of January, and President's Day is a Monday in late February." ], "decomposition": [ "Through which period of the year does winter usually last in the US?", "How many government holidays fall within the span of #1?", "Is #2 considerably greater than one?" ], "evidence": [ [ [ [ "Winter-1" ], "no_evidence" ], [ [ "Federal holidays in the United States-14", "Thanksgiving (United States)-1", "Veterans Day-10", "Washington's Birthday-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Northern Hemisphere-2" ] ], [ [ "Christmas-28", "Federal holidays in the United States-6", "New Year's Day-12" ] ], [ "operation" ] ], [ [ [ "Winter-9" ] ], [ [ "Holiday-6" ] ], [ "operation" ] ] ] }, { "qid": "8e073418da1eab499775", "term": "Islamophobia", "description": "Fear, hatred of, or prejudice against the Islamic religion or Muslims generally,", "question": "Is Islamophobia against Cyprus majority religion misdirected?", "answer": true, "facts": [ "Islamophobia is prejudice and fear against Muslims.", "Cyprus is a country in the Middle East, which is a predominantly Muslim region.", "Cyprus is the only Christian majority country in the Middle East, with Christians forming between 76% and 78% of the country's total population, and most of them adhere to Eastern Orthodox Christianity." ], "decomposition": [ "What religion is targeted by Islamophobia?", "What is the most common religion in Cyprus?", "Is #1 different than #2?" ], "evidence": [ [ [ [ "Islamophobia-1" ] ], [ [ "Cyprus-100" ] ], [ "operation" ] ], [ [ [ "Islamophobia-54" ], "no_evidence" ], [ [ "Religion in Cyprus-1" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Islamophobia-1" ] ], [ [ "Religion in Cyprus-1" ] ], [ "operation" ] ] ] }, { "qid": "911c0d74b7882fc20ec8", "term": "Joke", "description": "something spoken, written, or done with humorous intention", "question": "Have jokes killed more people than rats in history?", "answer": false, "facts": [ "Greek philosopher Chrysippus was said to have died from laughter after seeing a donkey eating figs.", "There are only a handful of deaths attributed to jokes throughout history including King Martin of Aragon.", "There are an average of 30 deaths by rat attacks every century.", "The plague which is sometimes associated with diseased rats killed millions of people." ], "decomposition": [ "How many people have been killed by laughing to jokes?", "Which diseases are spread by rats?", "How many people have been killed by #2 over time", "Is #1 greater than #3?" ], "evidence": [ [ [ [ "Death from laughter-1" ] ], [ [ "Rat-30" ] ], [ [ "Bubonic plague-21" ] ], [ [ "Bubonic plague-21", "Death from laughter-1" ] ] ], [ [ [ "Death from laughter-1" ], "no_evidence" ], [ [ "Rat-28" ] ], [ [ "Diseases and epidemics of the 19th century-25" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Death from laughter-1" ], "no_evidence" ], [ [ "Black Death-23" ] ], [ [ "Black Death-4" ] ], [ "operation" ] ] ] }, { "qid": "91ec6f3b34115feccada", "term": "Ham", "description": "Pork from a leg cut that has been preserved by wet or dry curing, with or without smoking", "question": "Will parma ham be ready for New Year's if the pig is slaughtered in December?", "answer": false, "facts": [ "Parma ham requires two months to cure", "New Year's is at most one month away from December" ], "decomposition": [ "What is the minimum period of time required for parma ham to cure?", "How long is New Year's Day from December?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Prosciutto-5" ] ], [ [ "New Year's Day-1" ] ], [ "operation" ] ], [ [ [ "Prosciutto-5" ] ], [ [ "New Year's Day-1" ] ], [ "operation" ] ], [ [ [ "Ham-10" ] ], [ [ "New Year's Day-8" ], "operation" ], [ "operation" ] ] ] }, { "qid": "9deedbba0ca784be1855", "term": "Amtrak", "description": "Intercity rail operator in the United States", "question": "Does Amtrak operate four wheel vehicles?", "answer": true, "facts": [ "Amtrak is a transportation service.", "Amtrak transports people with trains and buses.", "A bus is a four wheel vehicle. " ], "decomposition": [ "What kinds of vehicles does Amtrak use?", "Do any of #1 have four wheels?" ], "evidence": [ [ [ [ "Amtrak-1" ] ], [ [ "Wheelset (rail transport)-1" ], "operation" ] ], [ [ [ "International (Amtrak train)-14" ] ], [ [ "Wheelset (rail transport)-1" ] ] ], [ [ [ "Amtrak-3" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "65d216ee031d7c2a376f", "term": "Chlorine", "description": "Chemical element with atomic number 17", "question": "Does chlorine inhibit photosynthesis?", "answer": true, "facts": [ "Chlorine prevents algae from growing in pools", "Algae photosynthesize " ], "decomposition": [ "What does Chlorine prevent from growing in a pool?", "Does #1 do photosynthesis?" ], "evidence": [ [ [ [ "Swimming pool-67" ] ], [ [ "Algae-1" ], "operation" ] ], [ [ [ "Chlorine-66" ] ], [ [ "Photosynthesis-6" ] ] ], [ [ [ "Chlorine dioxide-25" ] ], [ [ "Bacteria-3" ], "operation" ] ] ] }, { "qid": "54a5df08fef4e5a4c82c", "term": "Bruce Lee", "description": "Hong Kong-American actor, martial artist", "question": "Was Bruce Lee absent from the 1964 University of Washington graduation ceremony?", "answer": true, "facts": [ "Bruce Lee enrolled at the University of Washington in 1961.", "Bruce Lee dropped out of college in early 1964.", "Bruce Lee moved to Oakland to live with James Yimm Lee in 1964." ], "decomposition": [ "When did the University of Washington graduation ceremony for the class of 1964 take place?", "What college did Bruce Lee attend?", "When did Bruce Lee drop out of #2?", "Did #1 occur after #3?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Bruce Lee-30" ] ], [ [ "Bruce Lee-15" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Bruce Lee-2" ] ], [ [ "Bruce Lee-15" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Bruce Lee-14" ] ], [ [ "Bruce Lee-15" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "0a8bb20dbdb99d68127e", "term": "Clove", "description": "species of plant", "question": "Do people who smoke Djarum's like cloves?", "answer": true, "facts": [ "Djarum is a brand of cigarette popular around the world.", "Djarum cigarettes are made with a blend of cloves and tobacco." ], "decomposition": [ "What are Djarum cigarettes made of?", "Does #1 include cloves?" ], "evidence": [ [ [ [ "Djarum-1" ] ], [ "operation" ] ], [ [ [ "Djarum-1" ] ], [ "operation" ] ], [ [ [ "Djarum-1", "Kretek-1" ] ], [ "operation" ] ] ] }, { "qid": "5e692b6eb781e2d6f8c8", "term": "Astronomer", "description": "Scientist who studies celestial bodies", "question": "Is an astronomer interested in drosophila?", "answer": false, "facts": [ "Astronomers study celestial bodies like planets and stars", "Drosophila are a type of fly commonly studied by scientists in fields related to biology" ], "decomposition": [ "What do astronomers study?", "What kind of entity is Drosophila?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Astronomer-1" ] ], [ [ "Drosophila-4" ] ], [ [ "Drosophila-4" ], "operation" ] ], [ [ [ "Astronomer-1" ] ], [ [ "Drosophila-1" ] ], [ "operation" ] ], [ [ [ "Astronomer-1" ] ], [ [ "Drosophila-1" ] ], [ "operation" ] ] ] }, { "qid": "a0896de3fd13cd0f3e16", "term": "Atlantic cod", "description": "benthopelagic fish of the family Gadidae", "question": "Is Atlantic cod found in a vegemite sandwich?", "answer": false, "facts": [ "Vegemite is a spread popular in Australia.", "Vegemite is made from leftover brewers' yeast extract with various vegetable and spice additives. ", "The Atlantic cod is found mostly in North America and Greenland." ], "decomposition": [ "To what taxonomic kingdom does the Atlantic cod belong?", "What are the ingredients of Vegemite?", "Do any of #2 belong in #1?" ], "evidence": [ [ [ [ "Atlantic cod-1", "Fish-1" ] ], [ [ "Yeast extract-11" ] ], [ "operation" ] ], [ [ [ "Atlantic cod-7" ] ], [ [ "Vegemite-18" ] ], [ "operation" ] ], [ [ [ "Animal-14", "Atlantic cod-1" ] ], [ [ "Yeast extract-11" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "9e4811e253f8f28cb09f", "term": "WWE Raw", "description": "WWE television program", "question": "Did Bruiser Brody wrestle on WWE Raw?", "answer": false, "facts": [ "WWE Raw debuted on TV January 11, 1993.", "Bruiser Brody was a wrestler that was stabbed to death in 1988." ], "decomposition": [ "When did WWE Raw make its debut appearance?", "When did wrestler Bruiser Brody's wrestling career come to an end?", "Is #1 before #2?" ], "evidence": [ [ [ [ "WWE Raw-1" ] ], [ [ "Bruiser Brody-1" ] ], [ "operation" ] ], [ [ [ "WWE Raw-1" ] ], [ [ "Bruiser Brody-3" ] ], [ "operation" ] ], [ [ [ "WWE Raw-8" ] ], [ [ "Bruiser Brody-7" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "99b23da6c0f8ad0fc0ea", "term": "Crustacean", "description": "subphylum of arthropods", "question": "Can the largest crustacean stretch out completely on a king-sized mattress?", "answer": false, "facts": [ "The largest crustacean is the Japanese spider crab ", "The largest Japanese spider crabs have a leg span of just over 12 feet ", "The longer edge of a king-sized mattress is six feet, eight inches" ], "decomposition": [ "What is the largest crustacean?", "How long is the largest #1?", "How long is a king-sized matress?", "Is #2 smaller than #3?" ], "evidence": [ [ [ [ "Japanese spider crab-1" ] ], [ [ "Japanese spider crab-2" ] ], [ [ "Bed size-6" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Crustacean-15" ] ], [ [ "Crustacean-15" ] ], [ [ "Mattress-8" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Japanese spider crab-1" ] ], [ [ "Crustacean-15" ] ], [ [ "Bed size-17" ] ], [ "operation" ] ] ] }, { "qid": "59542ff1d7782e4cbd89", "term": "Linux", "description": "Family of free and open-source software operating systems based on the Linux kernel", "question": "Do Windows or Android smartphones run newer versions of Linux?", "answer": false, "facts": [ "Android smartphones are based on Linux.", "Windows smartphones are based on the Windows operating system." ], "decomposition": [ "Which operating system do Windows smartphones run on?", "Which operating system do android smartphones run on?", "Is #1 or #2 Linux-based?" ], "evidence": [ [ [ [ "Windows 10 Mobile-1" ] ], [ [ "Android (operating system)-1" ] ], [ "operation" ] ], [ [ [ "Windows 10 Mobile-1" ] ], [ [ "Android 11-1" ] ], [ [ "Linux-3" ] ] ], [ [ [ "Windows Phone-2" ] ], [ [ "Linux-65" ] ], [ [ "Linux-66" ], "operation" ] ] ] }, { "qid": "24f0e80c50384b28484e", "term": "Courage", "description": "quality of mind or spirit that enables a person to face difficulty, danger, or pain", "question": "Does Neville Longbottom have more courage as a child than as an adult?", "answer": false, "facts": [ "Neville Longbottom is a character from the Harry Potter series.", "In the first few books of the Harry Potter series, Neville is a child.", "In the final few books of the Harry Potter series Neville is becoming an adult. ", "Neville's first appearances in the series show him to be very cowardly.", "Neville is considered a hero by the end of the series." ], "decomposition": [ "Did Neville Longbottom's first appearances in the series show him to be very cowardly?", "Was #1's a child in the first few books of the Harry potter series?", "Was Neville Longbottom considered a hero by the end of the series?", "Was #3's an adult in the final few books of the Harry potter series?", "Was he more courageous in #2 than #4?" ], "evidence": [ [ [ [ "Dumbledore's Army-17" ] ], [ [ "Harry Potter and the Philosopher's Stone-8" ] ], [ [ "Dumbledore's Army-19" ] ], [ [ "Dumbledore's Army-19" ] ], [ "operation" ] ], [ [ [ "Dumbledore's Army-17" ] ], [ [ "Harry Potter (character)-1" ] ], [ [ "Harry Potter and the Deathly Hallows-15" ] ], [ [ "Harry Potter and the Deathly Hallows-5" ] ], [ [ "Harry Potter and the Deathly Hallows – Part 2-10" ], "operation" ] ], [ [ [ "Harry Potter and the Philosopher's Stone-6" ], "no_evidence" ], [ "operation" ], [ [ "Dumbledore's Army-17" ] ], [ [ "Harry Potter (character)-38" ], "no_evidence", "operation" ], [ "operation" ] ] ] }, { "qid": "3ca5966b88394e62271e", "term": "University of Pennsylvania", "description": "Private Ivy League research university in Philadelphia, Pennsylvania", "question": "Could Brooke Shields succeed at University of Pennsylvania?", "answer": true, "facts": [ "Brooke Shields graduated from Princeton University.", "Princeton is ranked as the number 1 national college by US news.", "University of Pennsylvania is ranked as number 6 national college by US news.", "Princeton only admits around 6 percent of applicants as of 2018.", "University of Pennsylvania accepts around 9% of applicants as of 2018." ], "decomposition": [ "What college did Brooke Shields go to?", "Out of all colleges in the US, how is #1 ranked?", "Is the ranking of University of Pennsylvania similar to #2?" ], "evidence": [ [ [ [ "Brooke Shields-6" ] ], [ [ "Princeton University-59" ] ], [ [ "University of Pennsylvania-48" ] ] ], [ [ [ "Brooke Shields-6" ] ], [ [ "Princeton University-59" ] ], [ [ "University of Pennsylvania-48" ], "operation" ] ], [ [ [ "Brooke Shields-6" ] ], [ [ "Princeton University-3" ], "operation" ], [ [ "University of Pennsylvania-47" ], "no_evidence" ] ] ] }, { "qid": "45605f9dbc0cf85f668f", "term": "Atlantic salmon", "description": "species of fish", "question": "Would Atlantic Salmon be within David Duchovny's dietary guidelines?", "answer": true, "facts": [ "David Duchovny is a pescatarian. ", "Pescatarians do not eat chicken, pork, or beef, but will eat fish." ], "decomposition": [ "What kind of diet does David Duchovny follow?", "What type of food is Atlantic Salmon?", "Do people who follow #1 diets eat #2?" ], "evidence": [ [ [ [ "David Duchovny-12" ] ], [ [ "Atlantic salmon-1" ] ], [ [ "Pescetarianism-1" ] ] ], [ [ [ "David Duchovny-12" ] ], [ [ "Atlantic salmon-1", "Seafood-1" ] ], [ [ "Pescetarianism-1" ] ] ], [ [ [ "David Duchovny-3" ], "no_evidence" ], [ [ "Atlantic salmon-1" ] ], [ "operation" ] ] ] }, { "qid": "765107a950759075813a", "term": "Winemaking", "description": "the production of wine, starting with the selection of the fruit, its fermentation into alcohol, and the bottling of the finished liquid", "question": "Do people remember Lucille Ball's winemaking as successful?", "answer": false, "facts": [ "Lucille Ball was the star of \"I Love Lucy\".", "On \"I Love Lucy\", Lucille's character fails miserably while stomping grapes for wine." ], "decomposition": [ "What show was Lucille Ball a star of?", "On #1, was Lucille's character successful in making wine?" ], "evidence": [ [ [ [ "Lucille Ball-1" ] ], [ [ "Grape treading-3" ], "no_evidence", "operation" ] ], [ [ [ "I Love Lucy-1" ] ], [ [ "Grape treading-3" ], "no_evidence", "operation" ] ], [ [ [ "Lucille Ball-24" ] ], [ "no_evidence" ] ] ] }, { "qid": "b6e0094f030a326e510a", "term": "Wool", "description": "Textile fibre from the hair of sheep or other mammals", "question": "Can a Sphynx cat be used for wool?", "answer": false, "facts": [ "A Sphynx cat is a breed of cats that lacks hair.", "Wool is a soft smooth fabric derived from the hair of animals.", "Sphynx cats skin are covered in an oily sticky substance." ], "decomposition": [ "Which animals can wool be derived from?", "Is the Sphynx cat likely to be included in #1?" ], "evidence": [ [ [ [ "Wool-1" ] ], [ [ "Sphynx cat-5" ], "operation" ] ], [ [ [ "Wool-1" ] ], [ "operation" ] ], [ [ [ "Wool-7" ] ], [ [ "Sphynx cat-10" ], "operation" ] ] ] }, { "qid": "b01c95ce0b47590fd29a", "term": "Emulator", "description": "system that emulates a real system such that the behavior closely resembles the behavior of the real system", "question": "Are classic nintendo games for emulator legal?", "answer": false, "facts": [ "Distribution of copyrighted games by anyone other than the owner is considered theft.", "Nintendo has not released any games for emulators." ], "decomposition": [ "Who owns the copyright for classic Nintendo games?", "Has #1 issued any versions of classic Nintendo games for emulators?" ], "evidence": [ [ [ [ "Nintendo Switch-65" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Video game console emulator-5" ], "no_evidence" ], [ [ "NES Classic Edition-1" ], "no_evidence", "operation" ] ], [ [ [ "NES Classic Edition-17" ], "no_evidence" ], [ [ "Video game-42" ], "no_evidence", "operation" ] ] ] }, { "qid": "aeea08d186f49c455038", "term": "T-Mobile", "description": "global telecommunication company", "question": "Can you use the T-Mobile tuesdays app if you aren't a T-Mobile customer?", "answer": false, "facts": [ "T-Mobile tuesdays is a rewards app for T-Mobile subscribers.", "T-Mobile Tuesdays verifies users by making sure they have a T-Mobile phone number." ], "decomposition": [ "Who can use the T-Mobile tuesdays app?", "Does T-Mobile allow use of the app if you aren't #1?" ], "evidence": [ [ [ [ "Un-carrier-22" ] ], [ [ "Un-carrier-22" ] ] ], [ [ [ "T-Mobile-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Un-carrier-22" ] ], [ [ "Un-carrier-22" ] ] ] ] }, { "qid": "bbd4e385d8867b66aa60", "term": "Aretha Franklin", "description": "American singer, songwriter, and pianist", "question": "Could Aretha Franklin vote for a president when her second child was born?", "answer": false, "facts": [ "Aretha Franklin was born in 1942.", "Aretha Franklin had her second child in 1957.", "You must be at least 18 years old to vote in a presidential election in the United States." ], "decomposition": [ "How old was Aretha Franklin when her second child was born?", "What is the minimum age required to vote in a U.S. election?", "Is #2 lower than #1?" ], "evidence": [ [ [ [ "Aretha Franklin-1", "Aretha Franklin-32" ] ], [ [ "Voting age-9" ] ], [ "operation" ] ], [ [ [ "Aretha Franklin-1", "Aretha Franklin-32" ] ], [ [ "United States presidential election-24" ] ], [ "operation" ] ], [ [ [ "Aretha Franklin-32" ] ], [ [ "Voting age-4" ] ], [ "operation" ] ] ] }, { "qid": "157e11814d768fe69782", "term": "Eighth Amendment to the United States Constitution", "description": "prohibits cruel and unusual punishment and excessive bail", "question": "Is Eighth Amendment to the United States Constitution popular in court?", "answer": false, "facts": [ "The Eighth Amendment prohibits cruel and unusual punishment.", "The Fifth Amendment prevents a person from incriminating themselves.", "The Fifth Amendment is often invoked in criminal cases.", "The Fourteenth Amendment regards equal protection under the law and has been in numerous landmark cases." ], "decomposition": [ "How many cases have involved the 8th amendment?", "How many cases have involved the other amendments?", "Is #1 the highest out of #2?" ], "evidence": [ [ [ [ "Eighth Amendment to the United States Constitution-30", "Eighth Amendment to the United States Constitution-41" ], "no_evidence" ], [ [ "Supreme Court of the United States-73" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Eighth Amendment to the United States Constitution-2", "Eighth Amendment to the United States Constitution-3" ], "no_evidence" ], [ [ "First Amendment to the United States Constitution-3", "First Amendment to the United States Constitution-4" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Eighth Amendment to the United States Constitution-1" ], "no_evidence" ], [ [ "American Civil Liberties Union-36" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "cc41d3b2a3f1aab33adf", "term": "Social Democratic Party of Germany", "description": "Social-democratic political party in Germany", "question": "Did the Social Democratic Party of Germany help Frederick II become King of Prussia?", "answer": false, "facts": [ "The Social Democratic Party of Germany was founded in 1863.", "Frederick II was King of Prussia from 1740-1786." ], "decomposition": [ "In what year was the Social Democratic Party of Germany founded?", "In what year did Frederick II become King of Prussia?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Social Democratic Party of Germany-3" ] ], [ [ "Frederick the Great-1" ] ], [ "operation" ] ], [ [ [ "Social Democratic Party of Germany-3" ] ], [ [ "Frederick the Great-1" ] ], [ "operation" ] ], [ [ [ "Social Democratic Party of Germany-3" ] ], [ [ "Frederick the Great-1" ] ], [ "operation" ] ] ] }, { "qid": "e0400033f078e56faad2", "term": "Pride", "description": "inwardly directed emotion that carries two common meanings", "question": "Would a Catholic priest commend someone's pride?", "answer": false, "facts": [ "Adherents to Catholicism subscribe to the notion of the '7 deadly sins'.", "Pride is one of the 7 deadly sins." ], "decomposition": [ "According to Catholic beliefs, what are the seven deadly sins?", "Is pride excluded from #1?" ], "evidence": [ [ [ [ "Seven deadly sins-1" ] ], [ "operation" ] ], [ [ [ "Seven deadly sins-1" ] ], [ [ "Seven deadly sins-1" ], "operation" ] ], [ [ [ "Seven deadly sins-1" ] ], [ "operation" ] ] ] }, { "qid": "63b0be164dfd44bf1890", "term": "PlayStation 4", "description": "Sony's eighth-generation home video game console", "question": "Did Bill Gates help to develop the PlayStation 4?", "answer": false, "facts": [ "The PlayStation 4 was developed by Sony Interactive Entertainment.", "Bill Gates works for Microsoft Corporation, which is a competitor of Sony." ], "decomposition": [ "Which organization does Bill Gate work for?", "Which organization developed PlayStation 4?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Bill Gates-1" ] ], [ [ "PlayStation 4-1" ] ], [ "operation" ] ], [ [ [ "Bill Gates-1" ] ], [ [ "PlayStation 4 system software-1" ] ], [ "operation" ] ], [ [ [ "Bill Gates-1" ] ], [ [ "PlayStation 4-1" ] ], [ "operation" ] ] ] }, { "qid": "83821526f11659f145b3", "term": "Ape", "description": "superfamily of mammals", "question": "Would a teacher still have their job if they called a black student an ape?", "answer": false, "facts": [ "'Ape' and 'monkey' are words that have been used in a derogatory manner against black people.", "Teachers are held to a level of professionalism and cannot act in an abusive way towards children." ], "decomposition": [ "What kind of term would \"Ape\" be if used to describe a black person?", "What standards are teachers held up to?", "If a teacher used #1, would they be upholding #2?" ], "evidence": [ [ [ [ "Race and ethnicity in the United States-8" ], "no_evidence" ], [ [ "Teacher-74" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Monkey chanting-1" ] ], [ [ "Standards-based education reform in the United States-9" ] ], [ "operation" ] ], [ [ [ "Racism-54" ] ], [ [ "Teacher-28" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "b9f5942431b6ae3ef7bd", "term": "Compact disc", "description": "Optical disc for storage and playback of digital audio", "question": "Did compact discs make computer gaming more popular?", "answer": true, "facts": [ "Compact discs contained significantly more storage space than the previously popular floppy disc format.", "Gaming studios were therefore able to significantly improve the graphics, sounds, and features of their games to make them more immersive.", "The better games led to a massive increase in popularity for computer gaming." ], "decomposition": [ "Which external storage device was previously popular before compact discs?", "What features of compact disc made them much better than #1?", "What new possibilities did #2 create for computer games?", "Did #3 lead to increased interest in computer games?" ], "evidence": [ [ [ [ "Floppy disk-3" ] ], [ [ "CD-ROM-21" ] ], [ [ "PC game-22" ] ], [ [ "PC game-4" ], "operation" ] ], [ [ [ "Floppy disk-2", "Floppy disk-22" ] ], [ [ "Compact disc-3", "Video CD-2" ] ], [ [ "Video CD-2" ], "no_evidence" ], [ [ "Compact disc-59", "PC game-22" ] ] ], [ [ [ "Floppy disk-1" ] ], [ [ "Compact disc-2", "Compact disc-41" ] ], [ [ "Fifth generation of video game consoles-4" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "4dd3a8a73cc4786cd638", "term": "Stork", "description": "family of birds", "question": "Do storks need golden toads to survive?", "answer": false, "facts": [ "Storks feed on a number of reptiles, amphibians, and ammals, and insects.", "The golden toad is an amphibian.", "The golden toad is a rare animal that has not been seen since 1989." ], "decomposition": [ "What is the most current population estimate of storks?", "What is the most current population estimate of golden toads?", "If storks exclusively ate golden toads, would #2 have been enough to sustain #1?" ], "evidence": [ [ [ [ "Stork-1" ], "no_evidence" ], [ [ "Golden toad-1" ] ], [ "operation" ] ], [ [ [ "Stork-10" ], "no_evidence" ], [ [ "Golden toad-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Stork-1" ], "no_evidence" ], [ [ "Golden toad-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "63caa686e799fd5825e8", "term": "H", "description": "letter in the Latin alphabet", "question": "Are any letters directly before and after H missing from Roman numerals?", "answer": true, "facts": [ "The Roman numerals are: I, V, X, L, C, D, and M.", "The letter H in the alphabet is preceded by G and followed by I." ], "decomposition": [ "What is the letter before \"H\"?", "What is the letter after \"H\"?", "What are the Roman numerals? ", "Is it the case that not both #1 and #2 are in #3?" ], "evidence": [ [ [ "operation" ], [ "operation" ], [ [ "Roman numerals-43" ] ], [ "operation" ] ], [ [ [ "G-1" ] ], [ [ "I-1" ] ], [ [ "1444-1" ] ], [ "operation" ] ], [ [ [ "G-1", "H-1" ] ], [ [ "I-1" ] ], [ [ "Roman numerals-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "250d2ecf5f5bc889a863", "term": "Hammer and sickle", "description": "Communist symbol", "question": "Did the Nazis use the Hammer and sickle flag?", "answer": false, "facts": [ "Hammer and sickle is a communist symbol used on flags", "The Nazi flag had a large symbol of a swastika. ", "The hammer and sickle was used as a anti Nazi symbol during World War II." ], "decomposition": [ "Which symbol is featured in the Nazi flag?", "Is #1 a hammer and sickle symbol?" ], "evidence": [ [ [ [ "Flag of Nazi Germany-1" ] ], [ [ "Hammer and sickle (disambiguation)-1" ] ] ], [ [ [ "Flag of Nazi Germany-4" ] ], [ "operation" ] ], [ [ [ "Flag of Nazi Germany-1" ] ], [ [ "Swastika-1" ] ] ] ] }, { "qid": "faf10ced482aedfe9c58", "term": "Johnny Cash", "description": "American singer-songwriter and actor", "question": "Are there enough Jonny Cash records in the world to give one to each French citizen?", "answer": true, "facts": [ "Johnny Cash has sold about 90 million albums", "The population of France is around 66 million " ], "decomposition": [ "How many Johnny Cash records have been sold?", "What is the population of France?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Johnny Cash-1" ] ], [ [ "France-1" ] ], [ "operation" ] ], [ [ [ "Johnny Cash-1" ] ], [ [ "France-1" ] ], [ "operation" ] ], [ [ [ "Johnny Cash-1" ] ], [ [ "France-1" ] ], [ "operation" ] ] ] }, { "qid": "66a3c0af3141c7c7d215", "term": "QWERTY", "description": "keyboard layout where the first line is \"QWERTYUIOP\"", "question": "Can monkeys use QWERTY keyboards?", "answer": true, "facts": [ "QWERTY keyboards are an alphabet key layout that were first used on typrwriters. ", "Monkeys can be trained to push buttons.", "Typewriter key's are buttons.", "Monkeys can press keys on keyboards." ], "decomposition": [ "What kind of keys are found on QWERTY keyboards?", "Can #1 be likened to buttons?", "Can monkeys be trained to push buttons?", "Are #2 and #3 positive?" ], "evidence": [ [ [ [ "QWERTY-17" ] ], [ "operation" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "QWERTY-12" ] ], [ [ "Keyboard layout-3" ] ], [ [ "Pet monkey-4" ] ], [ "operation" ] ], [ [ [ "QWERTY-16", "QWERTY-17" ] ], [ [ "Push-button-1" ], "no_evidence" ], [ [ "Tool use by animals-21" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "55c54e04a9446aaf6b45", "term": "Chocolate brownie", "description": "A square, baked, chocolate dessert", "question": "Should children be kept from \"special brownies\"?", "answer": true, "facts": [ "\"Special brownies\" typically refer to brownies that have been laced with THC.", "THC is an active component of cannabis, a drug meant for adults only." ], "decomposition": [ "What are \"special brownies\" made from that makes them special?", "Who is #1 made specifically for?", "Are children allowed to have things meant for #2?" ], "evidence": [ [ [ [ "Cannabis edible-1" ] ], [ [ "Cannabis edible-2" ] ], [ "no_evidence" ] ], [ [ [ "Cannabis edible-11" ] ], [ [ "Cannabis edible-2" ] ], [ [ "Cannabis edible-7" ], "operation" ] ], [ [ [ "Cannabis edible-6" ] ], [ [ "Medical cannabis-30" ] ], [ [ "Medical cannabis-30" ] ] ] ] }, { "qid": "f66e34419f26c8027ee6", "term": "Snoopy", "description": "cartoon dog", "question": "Would Taylor Swift refer to Snoopy as oppa?", "answer": true, "facts": [ "Oppa is a Korean word used by women to address a man who is 10 or more years older than her", "Snoopy is 47 years old", "Taylor Swift is 30 years old" ], "decomposition": [ "What is the minimum age difference that a Korean woman would use Oppa to address an older man?", "How old is Snoopy?", "How old is Taylor Swift?", "What is #2 minus #3?", "Is #4 greater than or equal to #1?" ], "evidence": [ [ [ [ "Korean pronouns-20" ], "no_evidence" ], [ [ "Snoopy-7" ] ], [ [ "Taylor Swift-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Third-person pronoun-106" ], "no_evidence" ], [ [ "Snoopy-1" ], "no_evidence" ], [ [ "Taylor Swift-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Korean honorifics-1" ], "no_evidence" ], [ [ "Snoopy-1" ] ], [ [ "Taylor Swift-4" ] ], [ "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "9c14845b31238b66baf6", "term": "Horseradish", "description": "species of plant", "question": "Can horseradish be eaten in a religious context?", "answer": true, "facts": [ "A Seder is typically held during the Jewish holiday Passover.", "The Seder involves several items representing the story of the Exodus.", "Horseradish is commonly used for the traditional bitter herb item." ], "decomposition": [ "What are some commonly used traditional bitter herb items for the Seder held during the Passover?", "Is horseradish included in #1?" ], "evidence": [ [ [ [ "Maror-1" ] ], [ [ "Maror-13" ], "operation" ] ], [ [ [ "Maror-7" ] ], [ "operation" ] ], [ [ [ "Passover Seder-53" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f67ebf32de48c788dfbb", "term": "Hamster", "description": "subfamily of mammals", "question": "Do hamsters provide food for any animals?", "answer": true, "facts": [ "Hamsters are prey animals.", "Prey animals provide food for predators. " ], "decomposition": [ "What types of animal are hamsters?", "Do #1 provide food for any other animals?" ], "evidence": [ [ [ [ "Hamster-1" ] ], [ [ "Ball python-9", "Snake-85" ], "no_evidence", "operation" ] ], [ [ [ "Hamster-1" ] ], [ [ "Cat-1", "Hamster-2" ], "operation" ] ], [ [ [ "Hamster-1" ] ], [ [ "Rodent-73" ] ] ] ] }, { "qid": "c28b918d3baf499d1191", "term": "Chick-fil-A", "description": "American fast food chain", "question": "Will Chick-fil-A hypothetically refuse to sponsor a Pride parade?", "answer": true, "facts": [ "Pride parades are annual festivals held to celebrate the gay community.", "Chick-fil-A is a fast food restaurant founded by S. Truett Cathy.", "S. Truett Cathy was a devout Southern Baptist. ", "Chick-fil-A's opposition to same-sex marriage has been the subject of public controversy." ], "decomposition": [ "Who founded Chick-fil-A?", "What religion was #1?", "What do pride parades typically celebrate?", "Do #2's oppose #3?" ], "evidence": [ [ [ [ "Chick-fil-A-3" ] ], [ [ "S. Truett Cathy-3" ] ], [ [ "Pride parade-1" ] ], [ [ "Public image of Mike Huckabee-17" ] ] ], [ [ [ "Chick-fil-A-3" ] ], [ [ "Chick-fil-A-2" ] ], [ [ "Pride parade-1" ] ], [ [ "Chick-fil-A-33" ], "no_evidence", "operation" ] ], [ [ [ "S. Truett Cathy-1" ] ], [ [ "S. Truett Cathy-3" ] ], [ [ "Pride parade-1" ] ], [ [ "Hate group-17" ], "operation" ] ] ] }, { "qid": "a8f96cb3309095eeadc6", "term": "All Nippon Airways", "description": "Japanese Airline", "question": "Are the headquarters of All Nippon Airways near a beach?", "answer": false, "facts": [ "The headquarters of All Nippon Airways are located in Shiodome City Center in the Shiodome area of the Minato ward of Tokyo.", "Tokyo is a metropolitan area.", "A beach is a landform alongside a body of water.", "Metropolitan areas typically do not have bodies of water in the surrounding area." ], "decomposition": [ "Where city are the headquarters of All Nippon Airways?", "What kind of development area is #1?", "What is a beach characterized as?", "Do #2 areas typically have #3?" ], "evidence": [ [ [ [ "All Nippon Airways-1" ] ], [ [ "Shiodome-2" ] ], [ [ "Beach-1" ] ], [ "operation" ] ], [ [ [ "All Nippon Airways-1" ] ], [ [ "Shiodome-7" ] ], [ [ "Beach-1" ] ], [ "operation" ] ], [ [ [ "All Nippon Airways-1" ] ], [ [ "Shiodome-1" ] ], [ [ "Beach-1" ] ], [ "operation" ] ] ] }, { "qid": "7035f77c008349a22833", "term": "Evander Holyfield", "description": "American boxer", "question": "Does Evander Holyfield eat pork products?", "answer": true, "facts": [ "Evander Holyfield is a born-again Christian", "Mainstream Christian denominations do not observe restrictions on the kinds of animals they can eat" ], "decomposition": [ "What religion was Evander Holyfield?", "Does #1 allow eating pork?" ], "evidence": [ [ [ [ "Evander Holyfield-62" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Christian dietary laws-1" ], "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "e32511f311bfd294ebf1", "term": "John Key", "description": "38th Prime Minister of New Zealand", "question": "Could John Key issue an executive order in the USA?", "answer": false, "facts": [ "An executive order is a means of issuing federal directives in the United States, used by the president of the United States.", "To serve as president of the United States, one must be a natural-born citizen of the United States.", "John Key was born in Auckland, New Zealand." ], "decomposition": [ "Who can issue executive orders in the USA?", "What are the requirements to become #1?", "Does John Key satisfy all of #2?" ], "evidence": [ [ [ [ "Executive order-1" ] ], [ [ "President of the United States-37" ], "no_evidence" ], [ [ "John Key-1" ], "no_evidence" ] ], [ [ [ "Federal government of the United States-18" ] ], [ [ "President of the United States-38" ] ], [ [ "John Key-1" ] ] ], [ [ [ "Executive order-1" ] ], [ [ "President of the United States-38" ] ], [ [ "John Key-5" ] ] ] ] }, { "qid": "32c562173099f5c2345f", "term": "Goblin shark", "description": "Deep-sea shark", "question": "Can a Goblin shark hypothetically ride a bike if it had limbs?", "answer": false, "facts": [ "A Goblin shark weighs around 460 pounds.", "The weight capacity of the average bike is 300 pounds." ], "decomposition": [ "What is the average weight of a goblin? ", "What is the average weight a bike can hold? ", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Goblin shark-8" ], "no_evidence" ], [ [ "Birdy (bicycle)-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Goblin shark-1", "Goblin shark-8" ] ], [ [ "Outline of bicycles-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Goblin shark-8" ] ], [ [ "Bicycle-26" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "3f726a8aa808d26ab076", "term": "Anchor", "description": "Device used to connect a vessel to the bed of a body of water to prevent the craft from drifting", "question": "Does a Trek 9000 require an anchor in order to park?", "answer": false, "facts": [ "A Trek 9000 is a mountain bike", "An anchor is used on water borne vehicles like boats" ], "decomposition": [ "What kind of vehicle is the Trek 9000?", "Does #1 need an anchor to park?" ], "evidence": [ [ [ [ "Trek Bicycle Corporation-7" ] ], [ "operation" ] ], [ [ [ "International 9000-6" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Klein Bicycle Corporation-7" ] ], [ [ "Bicycle-30" ], "operation" ] ] ] }, { "qid": "0cb73f1ccb217757bddf", "term": "Steven Spielberg", "description": "American film director and screenwriter", "question": "Does Steven Spielberg's 1998 film take place in a period after War Horse setting?", "answer": true, "facts": [ "Steven Spielberg's 1998 film, Saving Private Ryan, takes place during World War II.", "World War II took place from 1939-1945.", "The 2011 movie War Horse takes place during World War I.", "World War I took place from 1914-1918." ], "decomposition": [ "What time period does War Horse take place in?", "What years did #1 take place in?", "What time period does Steven Spielberg's 1998 film take place in?", "When years did #3 take place in?", "Did #4 happen after #2?" ], "evidence": [ [ [ [ "War Horse (film)-2" ] ], [ [ "World War I-1" ] ], [ [ "Saving Private Ryan-1" ] ], [ [ "World War II-1" ] ], [ "operation" ] ], [ [ [ "War Horse (film)-2" ] ], [ [ "World War I-1" ] ], [ [ "Saving Private Ryan-1" ] ], [ [ "World War II-1" ] ], [ "operation" ] ], [ [ [ "War Horse (film)-2" ] ], [ [ "War Horse (film)-10", "War Horse (film)-5" ] ], [ [ "Saving Private Ryan-1" ] ], [ [ "Saving Private Ryan-6" ] ], [ "operation" ] ] ] }, { "qid": "cfe1c436a761f01041dd", "term": "Plum", "description": "subgenus of plants", "question": "Is November a bad time for a photographer to take pictures of a plum tree in bloom?", "answer": true, "facts": [ "A plum tree is a deciduous tree that bears fruit.", "Deciduous trees shed their leaves in the autumn.", "Autumn happens from September until the end of Deember." ], "decomposition": [ "What kind of tree is a plum tree?", "What season will it be in temperate regions by November?", "What do #1 do during #2", "Considering #3, will a plum tree have an unattractive appearance at that time?" ], "evidence": [ [ [ [ "Plum-7" ] ], [ [ "Winter-10" ] ], [ [ "Plum-7" ] ], [ [ "Plum-7" ] ] ], [ [ [ "Plum-5" ], "no_evidence" ], [ [ "Season-3" ] ], [ [ "Plum-5" ], "no_evidence" ], [ [ "Plum-5", "Plum-7" ] ] ], [ [ [ "Prunus-1" ] ], [ [ "November-2" ] ], [ [ "Deciduous-1" ] ], [ "operation" ] ] ] }, { "qid": "3486b6b5c69878bb088d", "term": "Cauliflower", "description": "cauliflower plants (for the vegetable see Q23900272)", "question": "Do more Cauliflower grow in Arizona than California?", "answer": false, "facts": [ "Cauliflower grows best in cool temperatures with lots of sun.", "California is the largest producer of Cauliflower in the U.S.", "Arizona has a hot arid climate." ], "decomposition": [ "Which kind of climate favors the growth of Cauliflower?", "What kind of climate does Arizona have?", "What kind of weather does California have?", "Is #1 more similar to #2 than #3?" ], "evidence": [ [ [ [ "Cauliflower-6" ] ], [ [ "Arizona-39", "Arizona-40", "Arizona-42" ] ], [ [ "Climate of the United States-18" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Cauliflower-6" ] ], [ [ "Arizona-39" ] ], [ [ "California-52" ] ], [ "operation" ] ], [ [ [ "Cauliflower-6" ] ], [ [ "Arizona-39" ] ], [ [ "Climate of the United States-18" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ab44cb3353f36e75ef24", "term": "Martyr", "description": "person who suffers persecution and death for advocating, refusing to renounce, and/or refusing to advocate a belief or cause, usually a religious one", "question": "Would Jason Voorhees hypothetically fail at being a martyr?", "answer": true, "facts": [ "A martyr is someone that is killed for their beliefs.", "Jason Voorhees is the horror maniac from the Friday the 13th movies.", "Jason Voorhees is immortal and cannot be killed.", "Characters in Friday the 13th thought that dismembering Jason Voorhees would kill him but Jason even survived dismemberment." ], "decomposition": [ "What experience must one pass through in order to be considered a matyr?", "Can Jason Voorhes be killed?", "Would #2 being negative make Jason Voorhees unable to undergo #1?" ], "evidence": [ [ [ [ "Martyr-1" ] ], [ [ "Jason Voorhees-40" ] ], [ "operation" ] ], [ [ [ "Martyr-1" ] ], [ [ "Jason Voorhees-40" ] ], [ "operation" ] ], [ [ [ "Martyr-1" ] ], [ [ "Jason Voorhees-40" ] ], [ "operation" ] ] ] }, { "qid": "ec13093ea857962c647f", "term": "Palm Beach, Florida", "description": "Town in Florida, United States", "question": "Could Palm Beach be held in the palm of your hand?", "answer": false, "facts": [ "Palm Beach has a total area of 8.12 square miles.", "The average palm is around 3 inches in length.", "There are 63360 inches in a mile." ], "decomposition": [ "What is the total area of Palm Beach?", "What is the maximum area that can be held on the palm of a human hand?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Palm Beach, Florida-17" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Palm Beach, Florida-53" ] ], [ [ "Human body-6" ] ], [ "operation" ] ], [ [ [ "Palm Beach, Florida-17" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "9e477711fe01970389d6", "term": "Alfred Nobel", "description": "Swedish chemist, innovator, and armaments manufacturer (1833–1896)", "question": "Did Alfred Nobel write a banned book?", "answer": true, "facts": [ "Banned books are works which are not available for free access due to government restrictions.", "Alfred Nobel is the author of Nemesis, a prose tragedy in four acts about Beatrice Cenci", "The entire stock of Nemesis was destroyed immediately after Nobel's death except for three copies, being regarded as scandalous and blasphemous. " ], "decomposition": [ "What literary works did Alfred Nobel write?", "Have any of the works listed in #1 been banned?" ], "evidence": [ [ [ [ "Alfred Nobel-21" ] ], [ "operation" ] ], [ [ [ "Alfred Nobel-21" ] ], [ [ "Alfred Nobel-21" ] ] ], [ [ [ "Alfred Nobel-21" ] ], [ "operation" ] ] ] }, { "qid": "9224ee338a77834434c9", "term": "Kidney", "description": "internal organ in most animals, including vertebrates and some invertebrates", "question": "Can a quarter fit inside of a human kidney?", "answer": true, "facts": [ "Kidney stones are hard mineral deposits that can form in the kidneys.", "The largest kidney stone ever recorded was 13 cm wide.", "The diameter of a quarter is 2.4 cm." ], "decomposition": [ "How big is the largest kidney stone ever recorded?", "How wide is a quarter?", "Is #1 larger than #2?" ], "evidence": [ [ [ [ "Kidney stone disease-46" ], "no_evidence" ], [ [ "Quarter (United States coin)-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Quarter (United States coin)-3" ] ], [ "operation" ] ], [ [ [ "Kidney-1" ] ], [ [ "Quarter (United States coin)-1" ] ], [ "operation" ] ] ] }, { "qid": "f46ccce500df67cd8f56", "term": "2008 Summer Olympics", "description": "Games of the XXIX Olympiad, held in Beijing in 2008", "question": "Did Boris Yeltsin watch the 2008 Summer Olympics?", "answer": false, "facts": [ "The 2008 Summer Olympics were held Aug 08 - 24, 2008", "Boris Yeltsin died on Apr 23, 2007" ], "decomposition": [ "What were the date of the 2008 Summer Olympics?", "When did Boris Yeltsin die?", "is #2 before #1?" ], "evidence": [ [ [ [ "2008 Summer Olympics-1" ] ], [ [ "Boris Yeltsin-77" ] ], [ "operation" ] ], [ [ [ "2008 Summer Olympics-1" ] ], [ [ "Boris Yeltsin-77" ] ], [ "operation" ] ], [ [ [ "2008 Summer Olympics-1" ] ], [ [ "Boris Yeltsin-1" ] ], [ "operation" ] ] ] }, { "qid": "137d189866b9e69845c1", "term": "Frost", "description": "coating or deposit of ice that may form in humid air in cold conditions, usually overnight", "question": "Does frost mean that it will be a snowy day?", "answer": false, "facts": [ "Frost forms regularly in areas that experience freezing temperatures and morning dew.", "Frost isn't deposited from the sky like snow, it forms on the ground." ], "decomposition": [ "How is frost formed?", "Does #1 usually involve the falling of snow?" ], "evidence": [ [ [ [ "Frost-5" ] ], [ [ "Frost-5" ] ] ], [ [ [ "Frost-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Frost-5" ] ], [ [ "Frost-5" ] ] ] ] }, { "qid": "4b10af83d232d301542d", "term": "Rowing (sport)", "description": "Sport where individuals or teams row boats by oar", "question": "Would students at Marist have to petition to get a rowing team?", "answer": false, "facts": [ "Marist is located along the Hudson River.", "Marist college has rowing teams for both men and women." ], "decomposition": [ "What sports teams exist at Marist?", "Does #1 exclude mention of any rowing team?" ], "evidence": [ [ [ [ "Marist-2" ] ], [ [ "Marist Red Foxes-25" ], "operation" ] ], [ [ [ "Marist College-89" ] ], [ [ "Rowing (sport)-1" ], "operation" ] ], [ [ [ "Marist College-89", "Marist Red Foxes-22" ] ], [ "operation" ] ] ] }, { "qid": "f6cc5e1c7456dd41a094", "term": "Zorro", "description": "Fictional character", "question": "Are there multiple Disney Zorro?", "answer": true, "facts": [ "Zorro is a man who is a famous fictional Spanish hero and crime fighter featured in a novel.", "Disney produced a 1957 TV show featuring Zorro the spanish crime fighter. ", "Zorro is spanish for \"fox\".", "\"El Zorro y El Sabueso\" is the spanish title for Disney's animated movie called \"The Fox and The Hound\"." ], "decomposition": [ "Which famous hero was featured in Disney's 1957 TV show?", "What does 'The Fox' in Disney's 'The Fox and the Hound' translate to in Spanish?", "Do #1 and #2 refer to Zorro?" ], "evidence": [ [ [ [ "Zorro-4" ] ], [ [ "The Fox and the Hound-4", "Zorro-1" ] ], [ "operation" ] ], [ [ [ "Zorro-4" ] ], [ [ "Zorro (disambiguation)-1" ] ], [ [ "Zorro-1" ], "operation" ] ], [ [ [ "Zorro-4" ] ], [ [ "The Fox and the Hound-1", "Zorro-1" ] ], [ "operation" ] ] ] }, { "qid": "f9d0edee44a2e73f9765", "term": "Lolcat", "description": "image combining a photograph of a cat with text intended to contribute humour", "question": "Could a hundred thousand lolcats fit on a first generation iPhone?", "answer": true, "facts": [ "Lolcat memes are often in jpeg form", "The average size of a jpeg is 10 to 30 kb", "One gigabyte is 1000000 kb", "The first iPhone had 4GB to 16GB of storage " ], "decomposition": [ "What file formats are lolcats usually saved in?", "What is the typical size of #1?", "What was the storage size range of first generation iPhones?", "What is 100000 multiplied by #2 expressed in gigabytes?", "Is #4 less than or equal to the minimum value of #3?" ], "evidence": [ [ [ [ "Cats and the Internet-1", "Image macro-2" ] ], [ "no_evidence" ], [ [ "IPhone (1st generation)-11", "IPhone (1st generation)-7" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Lolcat-1" ] ], [ "no_evidence" ], [ [ "IPhone (1st generation)-7" ] ], [ [ "Gigabyte-1" ], "operation" ], [ "operation" ] ], [ [ [ "Image macro-1", "Lolcat-1" ] ], [ "no_evidence" ], [ [ "IPhone (1st generation)-10" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c5aabbea64966b4f2d42", "term": "Chevrolet Corvette", "description": "Sports car by the Chevrolet division of General Motors (GM)", "question": "Does selling a 2020 Chevrolet Corvette almost pay for a year at Columbia University?", "answer": true, "facts": [ "The price of a 2020 Chevrolet Corvette is $58,900.", "Columbia University cost $59,430 during the 2018-2019 school year." ], "decomposition": [ "How much does a 2020 Chevrolet Corvette cost?", "How much does a year at Columbia University cost?", "Is #1 almost as much as #2?" ], "evidence": [ [ [ [ "Chevrolet Corvette-1" ], "no_evidence" ], [ [ "Columbia University-28" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Columbia University-28" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Columbia University-28" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "d3013f42b7ca0c0d8685", "term": "Suburb", "description": "Human settlement that is part of or near to a larger city", "question": "Does the book Revolutionary Road give a glimpse at life in a suburb?", "answer": true, "facts": [ "The setting of the book is in Connecticut.", "The area of Connecticut where the book takes place is not in a major city.", "The book takes place in a suburb called Revolutionary Hill Estates." ], "decomposition": [ "What location is the setting of the book Revolutionary Road?", "Is #1 a suburb?" ], "evidence": [ [ [ [ "Revolutionary Road-1" ] ], [ [ "Revolutionary Road-5" ] ] ], [ [ [ "Revolutionary Road-4" ] ], [ "operation" ] ], [ [ [ "Revolutionary Road-4" ] ], [ [ "Connecticut-61" ], "operation" ] ] ] }, { "qid": "b816f51e0fedcae2a789", "term": "Liberty Bell", "description": "bell that serves as a symbol of American independence and liberty", "question": "Is the Liberty Bell still in its original location?", "answer": false, "facts": [ "The Liberty Bell originally was located in Independence Hall in Philadelphia.", "It was moved to a nearby pavilion to accommodate viewers in 1976." ], "decomposition": [ "What was the original location of the Liberty Bell?", "What is the current location of the Liberty Bell?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "The Liberty Bell (band)-2" ], "operation" ], [ [ "Location, Location, Location-3" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Liberty Bell-1" ] ], [ [ "Liberty Bell-1" ] ], [ [ "Liberty Bell-1" ], "operation" ] ], [ [ [ "Liberty Bell-1" ] ], [ [ "Liberty Bell-1" ] ], [ "operation" ] ] ] }, { "qid": "a946324a5ad6d3528da9", "term": "Publishing", "description": "Process of production and dissemination of literature, music, or information", "question": "Does Buddy The Elf know anyone who works in publishing?", "answer": true, "facts": [ "Buddy The Elf is a character from the movie Elf.", "Buddy The Elf's father works in a Manhattan publishing firm." ], "decomposition": [ "Which people are known to the movie character Buddy The Elf?", "Does any of #1 work in publishing?" ], "evidence": [ [ [ [ "Elf (film)-5", "Elf (film)-9" ] ], [ [ "Elf (film)-6" ], "no_evidence" ] ], [ [ [ "Elf (film)-10" ] ], [ "operation" ] ], [ [ [ "Elf (film)-3" ] ], [ "operation" ] ] ] }, { "qid": "b257b34db67a10038f18", "term": "Human overpopulation", "description": "The condition where human numbers exceed the short or long-term carrying capacity of the environment", "question": "Does Rusev have to worry about human overpopulation in his homeland?", "answer": false, "facts": [ "Human overpopulation results from the birthrate exceeding the death rate in a country.", "Rusev is a professional wrestler who was born in Bulgaria.", "The population of Bulgaria decreased by .7% in 2018." ], "decomposition": [ "Who is Rusev?", "What is the homeland of #1?", "Is #2 overpopulated?" ], "evidence": [ [ [ [ "Rusev (wrestler)-1" ] ], [ [ "Rusev (wrestler)-3" ] ], [ [ "Population decline-50" ], "operation" ] ], [ [ [ "Rusev (wrestler)-2" ] ], [ [ "Rusev (wrestler)-3" ] ], [ [ "Plovdiv-42" ] ] ], [ [ [ "Rusev (wrestler)-1" ] ], [ [ "Rusev (wrestler)-3" ] ], [ [ "Demographics of Bulgaria-4" ] ] ] ] }, { "qid": "6a13b8cefb285c2b81cf", "term": "Mail carrier", "description": "employee of the post office or postal service, who delivers mail to residences and businesses", "question": "Is unanimously elected president's birthday a break for mail carriers?", "answer": true, "facts": [ "The post office has several holidays including: New Year's Day, Washington's Birthday (President's Day), and Veterans Day.", "George Washington was the only US president elected unanimously." ], "decomposition": [ "Which US president was elected unanimously?", "When is #1's birthday?", "Is #2 a break or holiday for the post office?" ], "evidence": [ [ [ [ "1788–89 United States presidential election-6" ] ], [ [ "George Washington-1" ] ], [ [ "Washington's Birthday-1" ] ] ], [ [ [ "George Washington-107" ] ], [ [ "Washington's Birthday-1" ] ], [ [ "Postal holiday-3" ], "operation" ] ], [ [ [ "Living presidents of the United States-3" ], "no_evidence" ], [ [ "Jimmy Carter-5" ] ], [ [ "Public holidays in the United States-16" ] ] ] ] }, { "qid": "316ea9032a8d63df7c91", "term": "Sea otter", "description": "A species of marine mammal from the northern and eastern coasts of the North Pacific Ocean", "question": "Does a sea otter eat spiders?", "answer": false, "facts": [ "Sea otters prey mostly on marine invertebrates and other aquatic creatures.", "Spiders are not aquatic creatures and they reside on land." ], "decomposition": [ "What are sea otters known to feed on?", "Are spiders included in #1?" ], "evidence": [ [ [ [ "Sea otter-49" ] ], [ "operation" ] ], [ [ [ "Sea otter-2" ] ], [ "operation" ] ], [ [ [ "Sea otter-2" ] ], [ "operation" ] ] ] }, { "qid": "1e97ab50309873ca1789", "term": "Bengal cat", "description": "Breed of cat", "question": "Could a Bengal cat hypothetically best Javier Sotomayor's record?", "answer": true, "facts": [ "Javier Sotomayor is an athlete that holds the men's high jump world record of slightly over 8 feet.", "The average cat can jump from 7.5 to 9 feet.", "Bengal cats have powerful hind legs which make them jump higher than other breeds." ], "decomposition": [ "How high is Javier Sotomayor's world record high jump?", "Which breed of cat can jump the highest?", "If the average cat can jump up to 9 feet, then #2 can jump higher than what number?", "Is #3 greater than #1?" ], "evidence": [ [ [ [ "Javier Sotomayor-1" ] ], [ [ "Bengal cat-21" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "High jump-3" ] ], [ [ "Savannah cat-1", "Savannah cat-21" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Javier Sotomayor-1" ] ], [ [ "Caracal-2" ], "no_evidence" ], [ [ "Bengal cat-21" ], "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "59acb3e278f8a75eac86", "term": "Cinnamon", "description": "spice obtained from the inner bark of several trees from the genus Cinnamomum", "question": "Are Chipotle Cinnamon Pork Chops appropriate for a Seder?", "answer": false, "facts": [ "Chipotle Cinnamon Pork Chops are a popular recipe made by applying a cinnamon rub to pork chops.", "The Seder is a Jewish feast day that begins the season of Passover.", "Pork is forbidden by Jewish kosher laws." ], "decomposition": [ "What kind of event is a Seder?", "Which religious group observes #1?", "Which foods are considered appropriate by the dietary restrictions imposed on #2?", "What are the main ingredients of Chipotle Cinnamon Pork Chops?", "Are #4 included in #3?" ], "evidence": [ [ [ [ "Passover Seder-1" ] ], [ [ "Passover Seder-1" ] ], [ [ "American Jewish cuisine-4" ] ], [ [ "Pork chops and applesauce-1" ] ], [ "operation" ] ], [ [ [ "Passover Seder-1" ] ], [ [ "Passover Seder-2" ] ], [ [ "Kashrut-3" ] ], [ [ "Pork chop-1" ] ], [ "operation" ] ], [ [ [ "Passover Seder-1" ] ], [ [ "Passover Seder-1" ] ], [ [ "American Jewish cuisine-4" ] ], [ [ "Pork chop-1" ] ], [ "operation" ] ] ] }, { "qid": "08db047c63681c02184f", "term": "Basil", "description": "species of plant", "question": "Would the chef at Carmine's restaurant panic if there was no basil?", "answer": true, "facts": [ "Carmines is an Italian family-style restaurant.", "Basil is an essential in Italian cooking." ], "decomposition": [ "What kind of cuisine does Carmine's serve?", "Is basil an essential ingredient in #1?" ], "evidence": [ [ [ [ "Carmine Romano-2" ], "no_evidence" ], [ [ "Italian cuisine-27" ], "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "a4ae401042cea6e3ef15", "term": "Rice pudding", "description": "Dish made from rice mixed with water or milk", "question": "Is most store bought rice pudding made with brown rice?", "answer": false, "facts": [ "Brown rice is more expensive than white rice. ", "Most store bought rice pudding is white in color.", "Brown rice, when cooked, is light brown in color." ], "decomposition": [ "Which rice pudding is most commonly purchased in stores?", "What color is #1", "Which types of rice are commonly used to make rice pudding?", "Is the one among #3 having color of #2 brown rice?" ], "evidence": [ [ [ [ "Rice pudding-10" ] ], [ "no_evidence" ], [ [ "Rice pudding-10" ] ], [ "operation" ] ], [ [ [ "Rice pudding-17" ], "no_evidence" ], [ "no_evidence", "operation" ], [ [ "Rice pudding-4" ] ], [ "operation" ] ], [ [ [ "Rice pudding-10", "Rice pudding-17" ] ], [ [ "Rice pudding-4" ] ], [ [ "Rice pudding-4" ] ], [ "operation" ] ] ] }, { "qid": "fcff106193bfeb59372e", "term": "Easy Rider", "description": "1969 film by Dennis Hopper", "question": "Did Easy Rider make a profit at the theater when it was released?", "answer": true, "facts": [ "Easy Rider had a filming budget of about half a million dollars.", "Upon release in 1969, it earned about 60 million dollars." ], "decomposition": [ "What was the budget of Easy Rider?", "How much did Easy Rider earn upon its release?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Easy Rider-13" ] ], [ [ "Easy Rider-3" ] ], [ [ "Easy Rider-3" ], "operation" ] ], [ [ [ "Easy Rider-13" ] ], [ [ "Easy Rider-32" ] ], [ "operation" ] ], [ [ [ "Easy Rider-3" ] ], [ [ "Easy Rider-3" ] ], [ "operation" ] ] ] }, { "qid": "52c712384f56ec6347ad", "term": "Mail carrier", "description": "employee of the post office or postal service, who delivers mail to residences and businesses", "question": "Do mail carriers need multiple uniforms?", "answer": true, "facts": [ "Mail carriers work throughout the year independent of the weather.", "Mail carriers must often leave their vehicle in various weather conditions." ], "decomposition": [ "What seasons do mail carriers work through?", "In order to make it through all of #1, does one need different clothing pieces?" ], "evidence": [ [ [ [ "United States Postal Service-145" ], "no_evidence" ], [ [ "Clothing-2" ] ] ], [ [ [ "United States Postal Service creed-1" ], "no_evidence" ], [ [ "Winter clothing-2" ], "operation" ] ], [ [ [ "Season-1" ], "no_evidence" ], [ [ "Mail carrier-8" ], "no_evidence", "operation" ] ] ] }, { "qid": "382571a1ab13a99b1f74", "term": "Shrimp", "description": "Decapod crustaceans", "question": "Is Steve Martin someone who would refuse a dish of shrimp pasta?", "answer": true, "facts": [ "Steve Martin is allergic to shellfish.", "Shrimp are one of the many types of shellfish." ], "decomposition": [ "What types of food is Steve Martin allergic to?", "What type of food is shrimp?", "Is #2 included in #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Shrimp-36" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Steve Martin-1" ], "no_evidence" ], [ [ "Shrimp-1", "Shrimp-34" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Shrimp-16" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e201b35fd4c6f00aa5ae", "term": "Peach", "description": "species of fruit tree (for the fruit use Q13411121)", "question": "Does Princess Peach's dress resemble a peach fruit?", "answer": false, "facts": [ "Peaches have fuzzy red, orange, and yellow skin.", "Princess Peach is a character in the Nintendo Mario Universe.", "Princess Peach's dress is pink and floor length." ], "decomposition": [ "What color is a peach?", "What color is Princess Peach normally seen in?", "What shape is a peach?", "What shape is princess peach?", "Is #1 the same as #2 or is #3 the same as #4?" ], "evidence": [ [ [ [ "Peach (fruit)-5" ], "no_evidence" ], [ [ "Princess Peach-3" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Peach (fruit)-3" ] ], [ [ "Princess Peach-3" ] ], [ [ "Peach (fruit)-3" ], "no_evidence" ], [ [ "Princess Peach-3" ] ], [ "operation" ] ], [ [ [ "Peach-23" ] ], [ [ "Princess Peach-3" ] ], [ [ "Peach-9" ], "no_evidence" ], [ [ "Princess Peach-3" ] ], [ "operation" ] ] ] }, { "qid": "5897ec22db850f7b416e", "term": "John Key", "description": "38th Prime Minister of New Zealand", "question": "As of 2020 have more women succeeded John Key than preceded him?", "answer": false, "facts": [ "John Key, the 38th Prime Minister of New Zealand, has had one woman succeed him as Prime Minister.", "John key was preceded by two women as Prime Minister of New Zealand." ], "decomposition": [ "Which notable position did John Key occupy?", "How many women served as #1 before him?", "How many women have served as #1 after him?", "Is #3 greater than #2?" ], "evidence": [ [ [ [ "John Key-1" ], "no_evidence" ], [ [ "Helen Clark-1" ] ], [ [ "Jacinda Ardern-1" ] ], [ "operation" ] ], [ [ [ "John Key-1" ] ], [ [ "Prime Minister of New Zealand-23" ] ], [ [ "Prime Minister of New Zealand-23" ] ], [ "operation" ] ], [ [ [ "John Key-1" ] ], [ [ "Helen Clark-1", "Jenny Shipley-1" ] ], [ [ "Jacinda Ardern-1" ] ], [ "operation" ] ] ] }, { "qid": "a3759d8aff52a52c7155", "term": "Common Era", "description": "alternative (and religiously neutral) naming of the traditional calendar era, Anno Domini", "question": "Would Methuselah hypothetically hold a record in the Common Era?", "answer": true, "facts": [ "Methuselah was a biblical figure said to have lived until 969.", "The Common Era is the years after the BC era and is alternatively referred to as A.D.", "Jeanne Louise Calment 1875–1997 was the oldest human whose age was well-documented, with a lifespan of 122 years and 164 days." ], "decomposition": [ "Which period is referred to as the Common Era?", "Who is the oldest human whose age was well documented during #1?", "How old was #2 when she died?", "How old was Methuselah when he died?", "Is #4 greater than #3?" ], "evidence": [ [ [ [ "Common Era-1" ] ], [ "no_evidence" ], [ "no_evidence" ], [ [ "Methuselah-1" ] ], [ "operation" ] ], [ [ [ "Common Era-1" ] ], [ [ "Oldest people-2" ] ], [ [ "Oldest people-2" ] ], [ [ "Methuselah-1" ] ], [ "operation" ] ], [ [ [ "Common Era-1" ] ], [ [ "Oldest people-2" ] ], [ [ "Oldest people-2" ] ], [ [ "Methuselah-1" ] ], [ "operation" ] ] ] }, { "qid": "083292799a2e3d07abbf", "term": "Stork", "description": "family of birds", "question": "Would you be likely to see storks at a baby shower?", "answer": true, "facts": [ "Storks are a symbol of the arrival of a new baby.", "Baby showers are parties held to celebrate a woman who will soon give birth." ], "decomposition": [ "What do storks represent?", "Do baby showers celebrate #1?" ], "evidence": [ [ [ [ "White stork-46" ] ], [ [ "Baby shower-1" ] ] ], [ [ [ "White stork-46" ] ], [ [ "Baby shower-1" ] ] ], [ [ [ "White stork-46" ] ], [ "operation" ] ] ] }, { "qid": "4013c96b17809c27699c", "term": "National Diet", "description": "legislature of Japan", "question": "Can Viper Room concert hypothetically be held at National Diet building?", "answer": true, "facts": [ "The Viper Room has a capacity of 250 people.", "The National Diet building has two wings with over 700 seats." ], "decomposition": [ "What is the capacity of the The Viper Room?", "What is the capacity of the National Diet Building?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "The Viper Room-1" ], "no_evidence" ], [ [ "National Diet Building-28" ] ], [ "operation" ] ], [ [ [ "The Viper Room-1" ], "no_evidence" ], [ [ "National Diet Building-11" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "National Diet Building-28" ] ], [ [ "National Diet Building-28" ], "operation" ] ] ] }, { "qid": "6274bd508f7fbca9332f", "term": "Fever", "description": "common medical sign characterized by elevated body temperature", "question": "Can you get a fever from consuming meat?", "answer": true, "facts": [ "A fever is a medical symptom that elevates the core body temperature. ", "Eating under cooked meat can cause food poisoning.", "One of the symptoms of food poisoning is elevated core body temperature. " ], "decomposition": [ "What is a fever?", "What can consuming uncooked meat cause?", "Is #1 a symptom of #2?" ], "evidence": [ [ [ [ "Fever-1" ] ], [ [ "Raw meat-4" ] ], [ [ "Fever-1" ] ] ], [ [ [ "Fever-1" ] ], [ [ "Trichinosis-1", "Trichinosis-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Fever-1" ] ], [ [ "Raw meat-4" ] ], [ [ "Listeria-14" ], "operation" ] ] ] }, { "qid": "af64e22f2e17583b79cf", "term": "Byzantine calendar", "description": "The calendar used by the Eastern Orthodox Church from c. 691 to 1728", "question": "Did Ivan the Terrible use the Byzantine calendar?", "answer": true, "facts": [ "Ivan the Terrible was the Tsar of Russia from 1530 to 1585. ", "The Byzantine calendar was the official calendar of the Russian government from 988 to 1700.", "The Tsar was the leader of the Russian government. " ], "decomposition": [ "What was Ivan the Terrible's role from 1530 to 1585?", "What country was Ivan the Terrible #1 of?", "Was the Byzantine calendar the official calendar of #2 from 1530 to 1585?" ], "evidence": [ [ [ [ "Ivan the Terrible-1" ] ], [ [ "Ivan the Terrible-1" ] ], [ [ "Byzantine calendar-1" ], "operation" ] ], [ [ [ "Ivan the Terrible-1" ] ], [ [ "Ivan the Terrible-1" ] ], [ [ "Byzantine calendar-1" ] ] ], [ [ [ "Ivan the Terrible-1" ], "no_evidence" ], [ [ "Ivan the Terrible-2" ] ], [ [ "Byzantine calendar-1" ], "operation" ] ] ] }, { "qid": "04863d35bf4e68084907", "term": "United States Air Force", "description": "Air and space warfare branch of the United States Armed Forces", "question": "Are psychiatric patients welcome to join the United States Air Force?", "answer": false, "facts": [ "Having a history of mental illness disqualifies most people from joining the Armed Forces.", "Psychiatric patients are being seen for management of mental illness." ], "decomposition": [ "What do psychiatric patients suffer from?", "Would having #1 disqualify someone from joining the United States Air Force?" ], "evidence": [ [ [ [ "Mental disorder-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Psychiatry-1", "Psychiatry-3" ] ], [ [ "United States Air Force Fitness Assessment-1" ], "no_evidence", "operation" ] ], [ [ [ "Mental disorder-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "dca314ab2d3166f19182", "term": "Rabbi", "description": "teacher of Torah in Judaism", "question": "Would a Rabbi celebrate Christmas?", "answer": false, "facts": [ "A Rabbi is a spiritual leader or religious teacher in Judaism.", "Christmas is a holiday observed by Christians." ], "decomposition": [ "What religion do Rabbis belong to?", "Which religion celebrates Christmas?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Rabbi-17" ] ], [ [ "Christmas-7" ] ], [ "operation" ] ], [ [ [ "Rabbi-1" ] ], [ [ "Christmas-1" ] ], [ "operation" ] ], [ [ [ "Rabbi-1" ] ], [ [ "Christmas-1" ] ], [ "operation" ] ] ] }, { "qid": "80b883ce348170c85aed", "term": "Clementine", "description": "nothospecies of plant, Clementine", "question": "Is clementine pith highly sought after?", "answer": false, "facts": [ "Pith is the white part of the clementine fruit between the orange colored peel and the edible fruit.", "Most people discard the pith after peeling." ], "decomposition": [ "What is a pith?", "Do people usually like to keep #1 after peeling?" ], "evidence": [ [ [ [ "Pith-1" ] ], [ [ "Pith-1" ] ] ], [ [ [ "Pith-1" ] ], [ [ "Clementine-1" ], "no_evidence", "operation" ] ], [ [ [ "Pith-3" ] ], [ "no_evidence" ] ] ] }, { "qid": "068379affae04debca2b", "term": "Nepalese Civil War", "description": "civil war in Nepal between 1996 and 2006", "question": "Did the Nepalese Civil War take place near India?", "answer": true, "facts": [ "The Nepalese Civil War happened in Nepal.", "Nepal is a country that shares a border with India." ], "decomposition": [ "Where did the Nepalese Civil War take place?", "Is #1 near India?" ], "evidence": [ [ [ [ "Nepalese Civil War-1" ] ], [ [ "Nepal-1" ], "operation" ] ], [ [ [ "Nepalese Civil War-1" ] ], [ [ "Nepal-1" ] ] ], [ [ [ "Nepalese Civil War-1" ] ], [ [ "Nepal-1" ], "operation" ] ] ] }, { "qid": "73cc8dadbae6bc0eb433", "term": "Bing (search engine)", "description": "Web search engine from Microsoft", "question": "Can I hold Bing in a basket?", "answer": false, "facts": [ "Bing is a search engine, which is a digital object.", "A basket is a physical object.", "Physical objects cannot hold digital objects." ], "decomposition": [ "What is Bing?", "What kind of product is #1?", "What kind of object is a basket?", "Can #3 hold #2?" ], "evidence": [ [ [ [ "Bing (search engine)-1" ] ], [ [ "Web search engine-1" ] ], [ [ "Basket-1" ] ], [ "operation" ] ], [ [ [ "Bing (search engine)-1" ] ], [ [ "Web search engine-1" ] ], [ [ "Basket-1" ] ], [ "operation" ] ], [ [ [ "Bing (search engine)-1" ] ], [ [ "Bing (search engine)-1" ] ], [ [ "Basket-1" ] ], [ "operation" ] ] ] }, { "qid": "31a82f72b96dd956c6cf", "term": "Paratrooper", "description": "Military parachutists functioning as part of an airborne force", "question": "Can paratroopers be used in a vacuum?", "answer": false, "facts": [ "Paratroopers use parachutes to glide", "Parachutes function by creating drag in an atmosphere", "There is no atmosphere in a vacuum" ], "decomposition": [ "What equipment do paratroopers use?", "What does #1 need to create in order to function?", "In what does #1 create #2?", "Is #3 present in a vacuum?" ], "evidence": [ [ [ [ "Paratrooper-1" ] ], [ [ "Parachute-1" ] ], [ [ "Drag (physics)-1" ] ], [ [ "Vacuum-1" ], "operation" ] ], [ [ [ "Parachuting-1", "Paratrooper-1" ] ], [ [ "Drag (physics)-1" ] ], [ [ "Atmosphere of Earth-1" ], "no_evidence" ], [ [ "Vacuum-1" ], "operation" ] ], [ [ [ "Paratrooper-1" ] ], [ [ "Parachute-1" ] ], [ [ "Parasitic drag-3" ] ], [ [ "Vacuum-16" ] ] ] ] }, { "qid": "2c71f90e9c5656eb8edc", "term": "Black Sea", "description": "Marginal sea of the Atlantic Ocean between Europe and Asia", "question": "Could the moon fit inside the Black Sea?", "answer": false, "facts": [ "The volume of the Black Sea is 547,000 cubic kilometers.", "The volume of the moon is 21.9 billion cubic kilometers." ], "decomposition": [ "What is the volume of the Black Sea?", "What is the volume of the moon?", "Is #1 higher than #2?" ], "evidence": [ [ [ [ "Black Sea-2" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Black Sea-2" ] ], [ [ "Moon-48" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Black Sea-28" ], "no_evidence" ], [ [ "Earth-85" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "4ab77024b00c43ab7445", "term": "Thesis", "description": "document submitted in support of candidature for an academic degree", "question": "Could R. Kelly write a college thesis?", "answer": false, "facts": [ "A college thesis is a long and complicated written document.", "R. Kelly claims to be illiterate, which means he cannot read and write. " ], "decomposition": [ "What does writing a college thesis require a person be able to do?", "What does R. Kelly claim to be?", "Can someone who is #2 do #1?" ], "evidence": [ [ [ [ "Reading-1" ] ], [ [ "R. Kelly-9" ] ], [ [ "Dyslexia-20" ] ] ], [ [ [ "Thesis-1" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Thesis-1" ], "no_evidence" ], [ [ "R. Kelly-9" ] ], [ [ "Dyslexia-1" ], "operation" ] ] ] }, { "qid": "80cf7360512c5b284e4c", "term": "Shogi", "description": "Game native to Japan", "question": "Can a chess board be converted to a Shogi board?", "answer": false, "facts": [ "Chess is a game that is made up of 64 squares.", "The Japanese game of Shogi requires a total of 81 squares." ], "decomposition": [ "How many squares are on a chessboard?", "How many squares are on a Shogi board?", "Is #1 equal to #2?" ], "evidence": [ [ [ [ "Chess-1" ] ], [ [ "Shogi-4" ] ], [ "operation" ] ], [ [ [ "Chess-1" ] ], [ [ "Shogi-4" ] ], [ "operation" ] ], [ [ [ "Chessboard-3" ] ], [ [ "Shogi-4" ] ], [ "operation" ] ] ] }, { "qid": "11cc9b01c009823d5f82", "term": "Short-eared dog", "description": "species of canid", "question": "Has Cesar Millan ever tamed a short-eared dog?", "answer": false, "facts": [ "Cesar Millan is a Mexican-American dog trainer with over 25 years of canine experience.", "The short-eared dog lives in various parts of the rainforest environment, preferring areas with little human disturbance.", "The short-eared dog is a solitary animal and prefers moving in trees away from human and other animal interactions.", "The short-eared dog is a wild animal that is not suitable as a pet." ], "decomposition": [ "Which kind of dogs does Cesar Millan's train?", "What are the social characteristics of the short-eared dog?", "Does #2 match the characteristics of #1?" ], "evidence": [ [ [ [ "Cesar Millan-11" ] ], [ [ "Short-eared dog-9" ] ], [ "operation" ] ], [ [ [ "Cesar Millan-2" ], "no_evidence" ], [ [ "Short-eared dog-1", "Short-eared dog-9" ] ], [ "operation" ] ], [ [ [ "Cesar Millan-2" ] ], [ [ "Short-eared dog-9" ] ], [ "operation" ] ] ] }, { "qid": "605a31607ccb00fc8537", "term": "Byzantine calendar", "description": "The calendar used by the Eastern Orthodox Church from c. 691 to 1728", "question": "Did the Eastern Orthodox Church and the Byzantine Empire ever use the same calendar?", "answer": true, "facts": [ "Eastern Orthodox Church used the Byzantine calendar from c. 691 to 1728", "The Byzantine Empire used the Byzantine calendar from c. 988 to 1453" ], "decomposition": [ "What calendar did the Eastern Orthodox Church use from c. 691 to 1728?", "What calendar did the Byzantine Empire use from c. 988 to 1453?", "Is #1 and #2 the same?" ], "evidence": [ [ [ [ "Eastern Orthodox Church-77" ] ], [ [ "Byzantine calendar-8" ] ], [ "operation" ] ], [ [ [ "Julian calendar-82" ], "no_evidence" ], [ [ "Julian calendar-69" ], "operation" ], [ "operation" ] ], [ [ [ "Eastern Orthodox Church-207" ], "no_evidence" ], [ [ "Byzantine calendar-1" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "3c4beb8b4c0728a34fef", "term": "Ancient Greece", "description": "Civilization belonging to an early period of Greek history", "question": "Were number of states in Ancient Greece underwhelming compared to US states in 1900?", "answer": false, "facts": [ "In the year 1900 there were 42 US states.", "Ancient Greece had several hundred relatively independent city-states called poleis." ], "decomposition": [ "How many states were in the United States in 1900?", "How many city-states were there in Ancient Greece?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Oklahoma-2", "Utah-2" ] ], [ [ "City-state-6" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Oklahoma-2", "Utah Territory-1" ] ], [ [ "Ancient Greece-47" ] ], [ "operation" ] ], [ [ [ "Oklahoma Territory-52", "Utah-2" ] ], [ [ "Ancient Greece-22" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "66d56f7e42649a3329ad", "term": "Agriculture", "description": "Cultivation of plants and animals to provide useful products", "question": "Is the rise of agriculture attributed to rivers?", "answer": true, "facts": [ "Some of the earliest civilizations on record are in river valleys.", "Early civilizations used water to irrigate crops, leading to the growth of agriculture." ], "decomposition": [ "What are the basic factors of agricultural production?", "Do rivers provide any of #1?" ], "evidence": [ [ [ [ "Irrigation-45" ] ], [ [ "Irrigation-45" ] ] ], [ [ [ "Agriculture-1" ] ], [ [ "Agriculture-7" ], "operation" ] ], [ [ [ "Agriculture-7" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "1feaaac968a53eac2a67", "term": "Goldfish", "description": "freshwater fish, common in aquariums", "question": "Are goldfish more difficult to care for than isopods?", "answer": true, "facts": [ "Isopod care is compared to that of a houseplant.", "Goldfish are notorious for making their tanks dirty quite often." ], "decomposition": [ "What is isopod care comparable to?", "What challenges do Goldfish pose to keepers?", "Is #1 easier to deal with than #2?" ], "evidence": [ [ [ [ "Isopoda-14", "Isopoda-15" ] ], [ [ "Goldfish-28" ] ], [ "operation" ] ], [ [ [ "Isopoda-1", "Isopoda-13" ] ], [ [ "Goldfish-15" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Isopoda-1" ] ], [ [ "Goldfish-17", "Goldfish-21", "Goldfish-22" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "3b38e00d8d1da26f7f02", "term": "Mario", "description": "fictional character from Nintendo's ''Mario'' franchise and the company's mascot", "question": "Does Mario use mushrooms to run faster?", "answer": false, "facts": [ "Mushrooms in the Mario universe are used to provide extra lives and to make Mario grow, providing him an extra hit point. ", "Mario is made to run faster when he wears bunny ears or uses a starman." ], "decomposition": [ "In the Mario Universe what abilities do mushrooms give to Mario when collected?", "Is any of #1 increased speed?" ], "evidence": [ [ [ [ "Super Mario-33", "Super Mario-35", "Super Mario-36" ] ], [ [ "Super Mario-33" ] ] ], [ [ [ "Goomba-1" ] ], [ "operation" ] ], [ [ [ "Super Mario-33" ] ], [ "operation" ] ] ] }, { "qid": "be42f2f3008b22766d2b", "term": "Celery", "description": "species of plant", "question": "Can eating your weight in celery prevent diabetes?", "answer": false, "facts": [ "Diabetes is a disease in which the body cannot process sugar.", "Celery is known as a healthy snack and has 1 gram of sugar per serving.", "The recommended daily intake of sugar to prevent diabetes is less than 36 grams per day for an adult male.", "The average weight of an adult male is 197 pounds." ], "decomposition": [ "If a person has diabetes, what is there body unable to process?", "To prevent diabetes, what is the average amount of #1 an adult man should eat daily in grams?", "How much does the average male weigh in pounds?", "How many grams of sugar does a pound of celery have?", "Is #3 times #4 less than #2?" ], "evidence": [ [ [ [ "Diabetes-27", "Diabetes-28" ] ], [ [ "Diabetic diet-4" ], "no_evidence" ], [ [ "Human-46" ] ], [ "no_evidence" ], [ [ "Celery-28" ], "operation" ] ], [ [ [ "Diabetes-1" ], "no_evidence" ], [ [ "Prevention of type 2 diabetes-2" ], "no_evidence" ], [ [ "Man-6" ], "no_evidence" ], [ [ "Celery-41" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Diabetes-2", "Outline of diabetes-2" ] ], [ [ "Diabetes-36" ] ], [ [ "Human-46" ] ], [ [ "Celery-28" ] ], [ "operation" ] ] ] }, { "qid": "58be99559ef0ccdbb36c", "term": "Meatball", "description": "dish made from ground meat rolled into a small ball-like form", "question": "Do restaurants associate meatballs with the wrong country of origin?", "answer": true, "facts": [ "Spaghetti and meatballs are a staple on Italian pizzeria menus in the US.", "The Olive Garden, an Italian family restaurant, has several dishes with meatballs.", "Meatballs originated in the Chinese Qin dynasty (221 BC to 207 BC)." ], "decomposition": [ "In what country is the oldest evidence of people eating meatballs found?", "What dish involving meatballs became popular in the United States after being invented in New York City in the 20th century?", "With which national cuisine do Americans typically associate #2?", "Are #3 and #1 different?" ], "evidence": [ [ [ [ "Meatball-2" ] ], [ [ "Spaghetti and meatballs-2" ] ], [ [ "Spaghetti and meatballs-2" ] ], [ [ "Meatball-2", "Spaghetti and meatballs-2" ], "operation" ] ], [ [ [ "Meatball-2" ] ], [ [ "Meatball-8" ] ], [ [ "Spaghetti and meatballs-3" ] ], [ "operation" ] ], [ [ [ "Meatball-2" ] ], [ [ "Spaghetti and meatballs-2" ] ], [ [ "Spaghetti and meatballs-2" ] ], [ "operation" ] ] ] }, { "qid": "a0eb7bcaa09e2ede2c1e", "term": "Central Park Zoo", "description": "Zoo in Central Park, Manhattan, New York City", "question": "Would it be wise to bring a robusto into Central Park Zoo?", "answer": false, "facts": [ "A robusto is a short, fat cigar that is very popular in America.", "The Central Park Zoo has several rules including: no feeding the animals and no smoking.", "NYPD's 19th precinct is only an 11 minute walk away from the Central Park Zoo." ], "decomposition": [ "What is a robusto?", "According to the rules, what can you not bring into The Central Park Zoo?", "Is #1 not in #2?" ], "evidence": [ [ [ [ "Cigar-1" ] ], [ [ "Central Park Zoo-1", "Smoking ban-10" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Cigar-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Cigar-1" ], "no_evidence" ], [ [ "Passive smoking-61" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "30fcf62f58849cb4ab05", "term": "Theocracy", "description": "Form of government with religious leaders", "question": "Were the Great Pyramids built by a theocratic government?", "answer": true, "facts": [ "The Great Pyramids were built by the Ancient Egyptians.", "A theocracy is a government that is led by religious leaders or who worships their leader as a god.", "The Pharaohs of Ancient Egypt were worshipped as children of the Sun god Ra." ], "decomposition": [ "Who were the builders of the Great Pyramids?", "How did #1 serve their leaders?", "Could #2 be described as a theocracy?" ], "evidence": [ [ [ [ "Great Pyramid of Giza-8" ] ], [ "no_evidence" ], [ [ "Theocracy-1" ] ] ], [ [ [ "Giza pyramid complex-17" ] ], [ [ "Ancient Egypt-73" ], "no_evidence" ], [ [ "Theocracy-1" ], "operation" ] ], [ [ [ "Giza pyramid complex-1" ] ], [ [ "Giza pyramid complex-17" ] ], [ [ "Ancient Egypt-3" ], "operation" ] ] ] }, { "qid": "ab833ae041b323f106cf", "term": "Sesame", "description": "species of plant", "question": "Are sesame seeds glued onto hamburger buns?", "answer": false, "facts": [ "Glue is toxic and not used in food production.", "Sesame seeds add texture and visual appeal to hamburger buns.", "Beaten eggwhites are often used to adhere foods to other foods. " ], "decomposition": [ "What do people usually do with hamburger buns?", "Can you #1 sesame seeds?" ], "evidence": [ [ [ [ "Hamburger-1" ] ], [ [ "Sesame-1" ] ] ], [ [ [ "Bread-1", "Bun-1" ] ], [ [ "Sesame-2" ], "operation" ] ], [ [ [ "Hamburger-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "34c35185139b0e8672ee", "term": "Snoop Dogg", "description": "American rapper", "question": "Was Snoop Dogg an adult when Tickle Me Elmo was popular?", "answer": true, "facts": [ "Tickle Me Elmo was first popular in 1996.", "Snoop Dogg would have been 25 when the Tickle Me Elmo craze took off." ], "decomposition": [ "In what year did Tickle Me Elmo become popular?", "In what year was Snoop Dogg born?", "What is the difference between #1 and #2?", "Is #3 greater than or equal to 18?" ], "evidence": [ [ [ [ "Tickle Me Elmo-2" ] ], [ [ "Snoop Dogg-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Tickle Me Elmo-2" ] ], [ [ "Snoop Dogg-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Tickle Me Elmo-2" ] ], [ [ "Snoop Dogg-1" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "16338eaba71f146a0c40", "term": "Central processing unit", "description": "Central component of any computer system which executes input/output, arithmetical, and logical operations", "question": "Does the central processing unit usually have a dedicated fan?", "answer": true, "facts": [ "The CPU is the main chip on a computer's board, and generates a lot of heat.", "Computer manufacturers generally include a dedicated cooling system over the CPU in addition to the main board fans." ], "decomposition": [ "What do CPUs generate as they work over time?", "Do manufacturers incorporate fans specifically for removing #1 into computer systems?" ], "evidence": [ [ [ [ "Central processing unit-53" ] ], [ "no_evidence", "operation" ] ], [ [ [ "CPU core voltage-14" ] ], [ [ "Computer fan-7" ] ] ], [ [ [ "Central processing unit-46" ] ], [ [ "Computer cooling-15" ] ] ] ] }, { "qid": "06985ebcd197732ef1f6", "term": "James Watson", "description": "American molecular biologist, geneticist, and zoologist", "question": "Did James Watson's partner in studying the double helix outlive him? ", "answer": false, "facts": [ "James Watson studied the double helix with Francis Crick.", "Francis Crick passed away in 2004 at 88 years of age.", "James Watson is alive and is 92 years old." ], "decomposition": [ "Who did James Watson study the double helix with?", "How old was #1 at their death?", "How old is James Watson currently?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Francis Crick-1" ] ], [ [ "Francis Crick-1" ] ], [ [ "James Watson-1" ] ], [ "operation" ] ], [ [ [ "James Watson-1" ] ], [ [ "Francis Crick-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "James Watson-1" ] ], [ [ "Francis Crick-1" ] ], [ [ "James Watson-1" ] ], [ "operation" ] ] ] }, { "qid": "def41e470ce44e6a7590", "term": "Atheism", "description": "Absence of belief in the existence of deities", "question": "Can atheism surpass Christianity in American black communities by 2021?", "answer": false, "facts": [ "83% of blacks in the US identify as Christians.", " In the United States, blacks are less likely than other ethnic groups to be religiously unaffiliated, let alone identifying as atheist.", "African American history, slavery and the civil rights movement are all closely tied to Christianity in America." ], "decomposition": [ "What is the population of black Americans?", "How many out of #1 follow a religion?", "Is #2 close to or less than 50%?" ], "evidence": [ [ [ [ "African Americans-38" ] ], [ [ "African Americans-104" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "African Americans-103", "African Americans-110" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Americans-19" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "45cccf7bbcd884bc0af0", "term": "Led Zeppelin", "description": "English rock band", "question": "Did the band Led Zeppelin own a prime number of gilded gramophones?", "answer": true, "facts": [ "5 is a prime number", "A Grammy Award trophy is a gilded gramophone", "Led Zeppelin won 5 Grammy Awards" ], "decomposition": [ "What award has a trophy which consists of a gilded gramophone?", "How many #1 have Led Zeppelin won?", "Is #2 a prime number?" ], "evidence": [ [ [ [ "Grammy Award-1" ] ], [ [ "Led Zeppelin-57" ], "no_evidence" ], [ [ "Prime number-14" ] ] ], [ [ [ "Grammy Award-1" ] ], [ [ "Led Zeppelin-57" ] ], [ "operation" ] ], [ [ [ "Grammy Award-1" ] ], [ [ "Led Zeppelin-57" ] ], [ [ "Prime number-1" ] ] ] ] }, { "qid": "e239a7c6e9a2efc98ccb", "term": "Quartz", "description": "mineral composed of silicon and oxygen atoms in a continuous framework of SiO₄ silicon–oxygen tetrahedra, with each oxygen being shared between two tetrahedra, giving an overall chemical formula of SiO₂", "question": "Are Doctors of Homeopathy more likely than Doctors of Internal Medicine to recommend Quartz as a treatment?", "answer": true, "facts": [ "Doctors of Homeopathy are practitioners of \"alternative medicine\" ", "In alternative medicine practices, Quartz is believed to have powers.", "Doctors of Internal Medicine have completed a medical residency and do not recommend alternative medicine." ], "decomposition": [ "What do doctors of homeopathy practice?", "What is Quartz believed to have in #1?", "What do doctors of internal medicine study?", "Are #1 or #2 not included in #3?" ], "evidence": [ [ [ [ "Homeopathy-1" ] ], [ [ "Crystal healing-1" ] ], [ [ "Internal medicine-1" ] ], [ "operation" ] ], [ [ [ "Homeopathy-1" ] ], [ [ "Quartz-1" ] ], [ [ "Internal medicine-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Homeopathy-1" ] ], [ [ "Crystal healing-1", "Quartz-1" ] ], [ [ "Internal medicine-1" ] ], [ "operation" ] ] ] }, { "qid": "73defbf5e28a5bacdb7d", "term": "Rock in Rio", "description": "Brazilian music festival", "question": "Would it be difficult to host Stanley Cup Finals at Rock in Rio?", "answer": true, "facts": [ "The Stanley Cup Finals is the last series in hockey each year.", "Hockey rinks are indoors and require ice for players to skate on.", "Rock in Rio is a massive outdoor music festival in Brazil.", "Rock in Rio takes place in June each year.", "The temperature in Brazil during June averages around 80F." ], "decomposition": [ "Which sport has the Stanley Cup Finals?", "Which kind of surface is required to play #1?", "What time of the year does Rock in Rio take place, and where?", "Do weather conditions in #3 at that time not favor the formation of #2?" ], "evidence": [ [ [ [ "Stanley Cup-1" ] ], [ [ "Ice hockey-1" ] ], [ [ "Rock in Rio-1", "Rock in Rio-21" ] ], [ "operation" ] ], [ [ [ "Stanley Cup-1" ] ], [ [ "Ice hockey-1" ] ], [ [ "Rock in Rio-1", "Rock in Rio-31" ] ], [ [ "Brazil-47" ], "operation" ] ], [ [ [ "Stanley Cup-1" ] ], [ [ "Hockey-16" ] ], [ [ "Rock in Rio-16" ] ], [ [ "Rio de Janeiro-31" ] ] ] ] }, { "qid": "a406e06cd0164c1c6d15", "term": "Auburn, New York", "description": "City in New York, United States", "question": "Would the trees in Auburn, New York be changing colors in September?", "answer": true, "facts": [ "In New York, fall begins during the month of September. ", "When the weather gets cooler in the fall, leaves begin to change colors. " ], "decomposition": [ "In the month of September, what season would it be in New York?", "Do trees change color during #1?" ], "evidence": [ [ [ [ "Climate of New York-2" ] ], [ [ "Autumn leaf color-1" ] ] ], [ [ [ "Autumn-1" ] ], [ [ "Autumn-3" ], "operation" ] ], [ [ [ "Autumn-3" ] ], [ [ "Autumn-3" ] ] ] ] }, { "qid": "a6bf045651f7b6b64035", "term": "Macaque", "description": "genus of Old World monkeys", "question": "Could an elephant easily defeat a male macaque?", "answer": true, "facts": [ "Male macaques range from 16 to 28 inches tall with a weight between 12.13 to 39.7 pounds.", "Elephants are between 7 to 11 feet tall and weigh several thousand pounds.", "Elephants contain large, sharp tusks that can injure or kill other animals." ], "decomposition": [ "How much does a male macaques weigh?", "How much can an elephant weigh?", "How tall is a male macaque?", "How tall is an elephant?", "Is #2 more than #1 and is #4 more than #3?" ], "evidence": [ [ [ [ "Macaque-4" ] ], [ [ "Elephant-14" ] ], [ [ "Macaque-4" ] ], [ [ "Elephant-12" ] ], [ "operation" ] ], [ [ [ "Macaque-4" ] ], [ [ "Elephant-15" ] ], [ [ "Macaque-4" ] ], [ [ "Elephant-15" ] ], [ "operation" ] ], [ [ [ "Macaque-4" ] ], [ [ "Elephant-15", "Elephantidae-1" ], "no_evidence" ], [ [ "Macaque-4" ] ], [ [ "Elephant-15" ] ], [ "operation" ] ] ] }, { "qid": "bf6d3d190f809c066b43", "term": "NATO", "description": "Intergovernmental military alliance of Western states", "question": "Can Cyril Ramaphosa become Secretary General of NATO?", "answer": false, "facts": [ "Cyril Ramaphosa is the President of South Africa", "The Secretary General of NATO comes from one of NATO's member countries", "South Africa is not a member of NATO" ], "decomposition": [ "What country is Cyril Ramaphosa from?", "What are the requirements for someone to hold office in NATO?", "What countries meet the citizenship requirements of #2?", "Is #1 included in #3?" ], "evidence": [ [ [ [ "Cyril Ramaphosa-1" ] ], [ [ "Secretary General of NATO-13", "Secretary General of NATO-14" ] ], [ [ "Enlargement of NATO-2" ] ], [ "operation" ] ], [ [ [ "Cyril Ramaphosa-1" ] ], [ [ "NATO-32" ] ], [ [ "NATO-32" ] ], [ "operation" ] ], [ [ [ "Cyril Ramaphosa-1" ] ], [ "no_evidence" ], [ [ "NATO-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e7fec4d0e4350a2702c7", "term": "Japanese people", "description": "Ethnic group native to Japan", "question": "Did Pedubastis I know Japanese people?", "answer": false, "facts": [ "Pedubastis I was a pharaoh that died in 800 BC", "Japan's origins are in 600 BC according to a letter of the Sui dynasty.", "Pedubastis I ruled over the country of Egypt." ], "decomposition": [ "When did Pedubastis I die?", "When did the nation of Japan form?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Pedubastis-2" ] ], [ [ "Graphic pejoratives in written Chinese-14" ] ], [ [ "Graphic pejoratives in written Chinese-14", "Pedubastis-2" ], "operation" ] ], [ [ [ "Pedubastis-2" ] ], [ [ "Graphic pejoratives in written Chinese-14" ] ], [ "operation" ] ], [ [ [ "Pedubast I-1" ], "no_evidence" ], [ [ "Japan-9" ] ], [ "operation" ] ] ] }, { "qid": "76d3f6c91518061deb7d", "term": "Drum", "description": "type of musical instrument of the percussion family", "question": "Would a cattle farmer be useful to a drum maker?", "answer": true, "facts": [ "Cattle are often slaughtered for meat and other products, like leather.", "Drums are often made with leather." ], "decomposition": [ "Which animal products would a drum maker need?", "Are #1 commonly obtained from cattle?" ], "evidence": [ [ [ [ "Drumhead-3" ], "no_evidence" ], [ [ "Leather-1" ] ] ], [ [ [ "Drumhead-5" ] ], [ "operation" ] ], [ [ [ "Drum-7" ] ], [ [ "Drum-7", "Leather-5" ] ] ] ] }, { "qid": "4a974e322e3586af4c2f", "term": "Jalapeño", "description": "Hot pepper", "question": "Can children be hurt by jalapeno peppers?", "answer": true, "facts": [ "Jalapeno peppers contain capsaicin. ", "Capsaicin creates a burning sensation in the eyes and can lead to surface injuries. ", "Small children do not understand how to protect themselves from peppers or how to wash their hands properly." ], "decomposition": [ "What do Jalapeno peppers contain?", "Can #1 cause injuries to a child if they are not careful?" ], "evidence": [ [ [ [ "Jalapeño-15" ] ], [ "no_evidence" ] ], [ [ [ "Capsaicin-1" ] ], [ [ "Capsaicin-17" ], "operation" ] ], [ [ [ "Capsaicin-1" ] ], [ [ "Capsaicin-17" ], "operation" ] ] ] }, { "qid": "ced05cba04173f1248b3", "term": "Brooklyn", "description": "Borough in New York City and county in New York state, United States", "question": "Is Brooklyn known for its bread products?", "answer": true, "facts": [ "Brooklyn and NYC bagels are known around the world for being high quality.", "Brooklyn pizza is regarded as the best in the country. " ], "decomposition": [ "What food items from Brooklyn are known around the world for being high quality?", "What food item from Brooklyn is known as the best in the country?", "Are any items from #1 or #2 bread products?" ], "evidence": [ [ [ [ "Bagel-31" ] ], [ [ "Bagel-21" ] ], [ "operation" ] ], [ [ [ "New York City-137" ], "no_evidence" ], [ [ "Brooklyn-68" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Neapolitan cuisine-45" ], "no_evidence" ], [ [ "Pizza-1" ], "no_evidence" ], [ [ "Pizza-6" ], "operation" ] ] ] }, { "qid": "1af3364389f69c57f600", "term": "Vice President of the United States", "description": "Second highest executive office in United States", "question": "Can Vice President of the United States kill with impunity?", "answer": true, "facts": [ " Vice President Aaron Burr fatally wounded Alexander Hamilton in a duel on July 11, 1804.", "Aaron Burr continued his term as Vice President of the United States after killing Alexander Hamilton.", "US stand-your-ground laws allow a person to defend themselves even to the point of applying lethal force." ], "decomposition": [ "What was the outcome of Vice President Aaron Burr's duel in July, 1804?", "Did#1 lead to loss of his opponent's life and did he continue his term afterwards?" ], "evidence": [ [ [ [ "Aaron Burr-1" ] ], [ [ "Aaron Burr-34", "Aaron Burr-35" ], "operation" ] ], [ [ [ "Burr–Hamilton duel-35" ] ], [ [ "Aaron Burr-35", "Burr–Hamilton duel-35" ], "no_evidence" ] ], [ [ [ "Aaron Burr-4" ] ], [ [ "Aaron Burr-35", "Aaron Burr-4" ] ] ] ] }, { "qid": "076f3f72b7e6d17073eb", "term": "Amtrak", "description": "Intercity rail operator in the United States", "question": "Can Amtrak's Acela Express break the sound barrier?", "answer": false, "facts": [ "Amtrak's Acela Express is the fastest train in the Western Hemisphere and can reach 150 mph.", "A US Navy plane would need to travel 770 mph to break the sound barrier." ], "decomposition": [ "What is the maximum speed of Amtrak's Acela Express?", "What is the minimum speed needed to break the sound barrier?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Acela Express-1" ] ], [ [ "Sound barrier-2" ] ], [ "operation" ] ], [ [ [ "Acela Express-1" ] ], [ [ "Sound barrier-2" ] ], [ "operation" ] ], [ [ [ "Acela Express-20" ] ], [ [ "Sonic boom-5" ] ], [ [ "Counting-14" ] ] ] ] }, { "qid": "b15c390b221b7e0f9b1e", "term": "Legend", "description": "Traditional story of heroic humans.", "question": "Are all characters in Legend of Robin Hood fictional?", "answer": false, "facts": [ "The Legend of Robin Hood tells of an archer that stole from the rich and gave to the poor.", "Robin Hood's main antagonist is the Sheriff of Nottingham.", "The Sheriff of Nottingham is an agent for Prince John who has usurped the throne from his brother Richard.", "Richard I was King of England from 1189-1199.", "Prince John became John, King of England and reigned from 1199-1216." ], "decomposition": [ "Who is Robin Hood's main antagonist in the Legend of Robin Hood?", "Who is #1's employer?", "Who is #2's brother who was usurped from the throne by him?", "Are #2 and #3 completely fictional characters?" ], "evidence": [ [ [ [ "Robin Hood-2" ] ], [ [ "Sheriff of Nottingham-3" ] ], [ [ "The Legend of Robin Hood-2" ] ], [ [ "John, King of England-1" ], "operation" ] ], [ [ [ "Sheriff of Nottingham-1" ] ], [ [ "Sheriff of Nottingham-5" ] ], [ [ "John, King of England-67" ] ], [ [ "John, King of England-1", "Sheriff of Nottingham-2" ] ] ], [ [ [ "Sheriff of Nottingham-1" ] ], [ [ "Sheriff of Nottingham-5" ] ], [ [ "The Legend of Robin Hood-2" ] ], [ [ "Richard I of England-1" ], "operation" ] ] ] }, { "qid": "c497b4a83bc55a840e6c", "term": "Carrot", "description": "Root vegetable, usually orange in color", "question": "Are raw carrots better for maximizing vitamin A intake?", "answer": false, "facts": [ " 3% of the β-carotene in raw carrots is released during digestion, which can be improved to 39% by pulping, cooking and adding cooking oil", "Retinal is a form of Vitamin A", "Human bodies break down β-carotene into retinal" ], "decomposition": [ "What is the source of Vitamin A in carrots?", "Is absorption of #1 reduced by cooking?" ], "evidence": [ [ [ [ "Vitamin A-13" ] ], [ [ "Carrot-35" ], "operation" ] ], [ [ [ "Carrot-42" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Carrot-42" ] ], [ [ "Carotene-9" ] ] ] ] }, { "qid": "ab70365febc75dcda77d", "term": "Radioactive waste", "description": "wastes that contain nuclear material", "question": "Is radioactive waste a plot device for many shows?", "answer": true, "facts": [ "Radioactive isotopes in an ooze-like waste cause turtles to become the Teenage Mutant Ninja Turtles.", "In the Fox animated hit, Family Guy, radioactive waste is used to turn give the main characters superpowers. ", "The superhero 'Daredevil' encounters radioactive waste that blinds him as a child and gives him super powers." ], "decomposition": [ "What turned turtles into mutant turtles in Teenage Mutant Ninja Turtles?", "Which substance gives the main characters of Family Guy superpowers?", "Which substance gave Daredevil his super powers?", "Are #1. #2 and #3 radioactive waste?" ], "evidence": [ [ [ [ "Teenage Mutant Ninja Turtles (1990 film)-4" ] ], [ [ "Griffin family-1" ], "no_evidence" ], [ [ "Daredevil (Marvel Comics character)-2" ] ], [ "no_evidence" ] ], [ [ [ "Teenage Mutant Ninja Turtles II: The Secret of the Ooze-6" ] ], [ "no_evidence" ], [ [ "Alternative versions of Daredevil-33" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Teenage Mutant Ninja Turtles (Mirage Studios)-9" ], "no_evidence" ], [ "no_evidence" ], [ [ "Daredevil (Marvel Comics character)-2" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "10bf1b31f79f8d9e03fe", "term": "Firewall (computing)", "description": "Software or hardware-based network security system", "question": "Could a firewall be destroyed by a hammer?", "answer": false, "facts": [ "A firewall is not a physical entity and only exists on a network.", "Hammers cannot be used to destroy non-physical entities." ], "decomposition": [ "What enables firewall software to work on a computer?", "Can #1 be physically removed from a computer?", "Can a hammer do #2?" ], "evidence": [ [ [ [ "Firewall (computing)-1" ] ], [ "no_evidence" ], [ [ "Hammer-1" ], "no_evidence" ] ], [ [ [ "Firewall (computing)-1" ] ], [ "no_evidence" ], [ [ "Hammer-1" ], "no_evidence" ] ], [ [ [ "NPF (firewall)-1" ] ], [ "operation" ], [ [ "Hammer-3" ] ] ], [ [ [ "Firewall (computing)-1" ] ], [ [ "Firewall (computing)-13" ] ], [ "operation" ] ] ] }, { "qid": "1eb4bd98ec77e32e511e", "term": "President of Mexico", "description": "Head of state of the country of Mexico", "question": "Can the President of Mexico vote in New Mexico primaries?", "answer": false, "facts": [ "Mexico is an independent country located in North America.", "New Mexico is a state located in the United States.", "US laws require a voter to be a citizen of the United States.", "The President of Mexico in 2020, Andrés Manuel López Obrador, is a Mexican citizen." ], "decomposition": [ "What is the citizenship requirement for voting in US states such as New Mexico?", "What is the citizenship requirement of any President of Mexico?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Article Two of the United States Constitution-22" ] ], [ [ "President of Mexico-5" ] ], [ "operation" ] ], [ [ [ "Elections in the United States-7" ] ], [ [ "President of Mexico-4" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Voting rights in the United States-101" ] ], [ [ "President of Mexico-5" ] ], [ "operation" ] ] ] }, { "qid": "7ce5f72988455dfb63da", "term": "Samsung Galaxy", "description": "series of Android mobile computing devices", "question": "Would the operating system of a Samsung Galaxy 1 sound edible?", "answer": true, "facts": [ "The first Samsung Galaxy device ran a version of Android from 2009.", "In 2009, the Android edition was called \"cupcake.\" " ], "decomposition": [ "What are the operating systems of a Samsung Galaxy 1?", "Does #1 sound like something that is edible?" ], "evidence": [ [ [ [ "Samsung Galaxy S-22" ] ], [ "operation" ] ], [ [ [ "Android Cupcake-1", "Samsung Galaxy (original)-1" ] ], [ [ "Cupcake-1" ], "operation" ] ], [ [ [ "Samsung Galaxy S-26" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "ea627ac7d9ba306324d3", "term": "Metallica", "description": "American heavy metal band", "question": "Does Metallica use Soulseek?", "answer": false, "facts": [ "Soulseek is a computer program used to illegally share music files", "Metallica has been outspoken in their opposition of digital piracy" ], "decomposition": [ "Does Soulseek enable users to illegally share music files?", "Does Metallica encourage illegal sharing of digital files?", "Are the answers to #1 and #2 the same?" ], "evidence": [ [ [ [ "Soulseek-20" ] ], [ [ "Metallica-29" ] ], [ "operation" ] ], [ [ [ "Soulseek-22" ] ], [ [ "Metallica v. Napster, Inc.-1" ] ], [ "operation" ] ], [ [ [ "Soulseek-1" ] ], [ [ "Metallica-3" ] ], [ "operation" ] ] ] }, { "qid": "a8bbf2e84d0649b1c52c", "term": "Monk", "description": "member of a monastic religious order", "question": "Are monks forbidden from engaging in warfare?", "answer": false, "facts": [ "Monks are members of religious orders that usually take vows of poverty, chastity, and obedience.", "The Knights Templar were a religious order that fought during the Crusades and captured Jerusalem in 1099.", "Buddhist Shaolin monks developed very powerful martial arts skills, have defended temples during conquests." ], "decomposition": [ "What role did the Knights Templar play during the Crusades?", "What role have Shaolin monks played at temples during conquests?", "Did #1 or #2 not involve warfare?" ], "evidence": [ [ [ [ "Knights Templar-2" ] ], [ [ "Shaolin Kung Fu-9" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Knights Templar-10" ] ], [ [ "Shaolin Kung Fu-9" ] ], [ "operation" ] ], [ [ [ "Knights Templar-2" ] ], [ [ "Shaolin Monastery-11" ] ], [ "no_evidence" ] ] ] }, { "qid": "6d14da7484991bf588cf", "term": "Royal Air Force", "description": "Aerial warfare service branch of the British Armed Forces", "question": "Did the Royal Air Force fight in the Boxer Rebellion?", "answer": false, "facts": [ "The Boxer Rebellion took place from 1899–1901", "The Royal Air Force was formed on 1 April 1918" ], "decomposition": [ "When was the Royal Air Force formed?", "In what year did the Boxer Rebellion end?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Royal Air Force-1" ] ], [ [ "Boxer Rebellion-1" ] ], [ "operation" ] ], [ [ [ "Royal Air Force-1" ] ], [ [ "Boxer Rebellion-1" ] ], [ "operation" ] ], [ [ [ "Royal Air Force-1" ] ], [ [ "Boxer Rebellion-1" ] ], [ "operation" ] ] ] }, { "qid": "9247b5afb2eef4c9f95b", "term": "Tibia", "description": "larger of the two bones of the leg below the knee for vertebrates", "question": "Can a minotaur hypothetically injure a tibia playing football?", "answer": true, "facts": [ "A minotaur is a mythological creature with the head of a bull and the body of a human.", "The tibia is the bone between the knee and the ankle in humans.", "Tibia injuries are common injuries in contact sports such as football." ], "decomposition": [ "What is the body structure of a Minotaur?", "Where in the human body is the tibia located?", "Does #1 account for the presence of #2 in a Minotaur?", "Are injuries to #2 common during football?", "Are #3 and #4 positive" ], "evidence": [ [ [ [ "Minotaur-1" ] ], [ [ "Tibia-1" ] ], [ "operation" ], [ [ "Running injuries-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Minotaur-1" ] ], [ [ "Tibia-1" ] ], [ "operation" ], [ [ "Anterior cruciate ligament injury-2", "Intercondylar area-6" ] ], [ "operation" ] ], [ [ [ "Minotaur-1" ] ], [ [ "Tibia-1" ] ], [ "operation" ], [ [ "Shin splints-1", "Shin splints-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "adf00eea72beb009ff3e", "term": "Portuguese Empire", "description": "Global empire centered in Portugal", "question": "Did Columbus obtain his funding from the rulers of the Portugese Empire?", "answer": false, "facts": [ " King Ferdinand and Queen Isabella funded Columbus' voyage to the New World.", "King Ferdinand of Argon and Queen Isabella of Castille were the joint rulers of kingdoms of the Iberian Peninsula, which included modern-day Spain but excludes Portugal. ", "King John II of Portugal rejected Columbus' request for funding. " ], "decomposition": [ "Which major voyage did Columbus require funding to embark upon?", "Who funded #1?", "Which kingdoms did #2 rule over?", "Is the Portuguese Empire included in #3?" ], "evidence": [ [ [ [ "Voyages of Christopher Columbus-62" ] ], [ [ "Voyages of Christopher Columbus-6" ] ], [ [ "The empire on which the sun never sets-12" ] ], [ "operation" ] ], [ [ [ "Christopher Columbus-1" ] ], [ [ "Christopher Columbus-2" ] ], [ [ "Isabella I of Castile-1" ] ], [ "operation" ] ], [ [ [ "Voyages of Christopher Columbus-7" ] ], [ [ "Voyages of Christopher Columbus-12" ] ], [ [ "Voyages of Christopher Columbus-9" ] ], [ [ "Portuguese Empire-4" ], "operation" ] ] ] }, { "qid": "0a95811a52c939156796", "term": "Shrimp", "description": "Decapod crustaceans", "question": "Do shrimp taste best when cooked for a long time?", "answer": false, "facts": [ "Shrimp becomes tough and rubbery if cooked for a long time.", "The ideal texture for shrimp is soft and easily chewed." ], "decomposition": [ "What happens when shrimp is cooked for a long time?", "What is the ideal texture for shrimp?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Shrimp and prawn as food-11" ], "no_evidence" ], [ [ "Shrimp and prawn as food-8" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Shrimp and prawn as food-10" ] ], [ [ "Longjing prawns-1" ] ], [ "operation" ] ], [ [ [ "Shrimp and prawn as food-9" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d2e6dfad9db2e8eec4ed", "term": "Birdwatching", "description": "hobby", "question": "Would a birdwatcher pursue their hobby at a Philadelphia Eagles game?", "answer": false, "facts": [ "Birdwatching is a recreational activity in which people observe and/or listen to the sounds of birds.", "Despite their name, the Philadelphia Eagles are a professional American Football team comprised of humans, not birds." ], "decomposition": [ "What is a birdwwatcher interested in watching?", "What kind of sport does the Philadelphia eagles play?", "Can #1 be found at #2?" ], "evidence": [ [ [ [ "Birdwatching-8" ], "no_evidence" ], [ [ "Philadelphia Eagles-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Birdwatching-1" ] ], [ [ "Philadelphia Eagles-1" ] ], [ [ "American football-1" ] ] ], [ [ [ "Birdwatching-1" ] ], [ [ "Philadelphia Eagles-1" ] ], [ "operation" ] ] ] }, { "qid": "94ac5ee73e1f13747a87", "term": "Cantonese", "description": "Standard dialect of Yue language that originated in the vicinity of Guangzhou (Canton) in southern China", "question": "Is Cantonese spoken in Japan?", "answer": false, "facts": [ "Cantonese is a dialect of Chinese language used in southern China.", "There is no relation to the Japanese language." ], "decomposition": [ "Where is Cantonese widely spoken?", "Is Japan included in #1?" ], "evidence": [ [ [ [ "Cantonese-1" ] ], [ [ "Japan-1" ], "operation" ] ], [ [ [ "Cantonese-2" ] ], [ "operation" ] ], [ [ [ "Cantonese-11", "Cantonese-16" ] ], [ "operation" ] ] ] }, { "qid": "41cd8b744096063d5a83", "term": "Ape", "description": "superfamily of mammals", "question": "Do ants outperform apes on language ability?", "answer": false, "facts": [ "Language involves grammar and vocabulary", "Ants have not shown any understanding of grammar", "Apes include humans who use language to communicate" ], "decomposition": [ "What faculties are required for language?", "Which of #1 do ants possess?", "Which of #1 do apes possess?", "Does #2 have more overlap with #1 than #3 does?" ], "evidence": [ [ [ [ "Larynx-18" ] ], [ "no_evidence", "operation" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Language-1" ] ], [ [ "Ant-28" ] ], [ [ "Gorilla-28" ] ], [ [ "Ant-28", "Gorilla-28" ] ] ], [ [ [ "Language-10" ] ], [ "no_evidence" ], [ [ "Ape-26" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "a9d639b7f43108848d99", "term": "François Mitterrand", "description": "21st President of the French Republic", "question": "Did François Mitterrand serve under Napoleon Bonapart in the French army?", "answer": false, "facts": [ "François Mitterrand was born in 1916.", "Napoleon Bonapart died in 1821." ], "decomposition": [ "When was François Mitterrand born?", "When did Napoleon Bonapart die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "François Mitterrand-1" ] ], [ [ "Napoleon-1" ] ], [ "operation" ] ], [ [ [ "François Mitterrand-1" ] ], [ [ "Napoleon-121" ] ], [ "operation" ] ], [ [ [ "François Mitterrand-1" ] ], [ [ "Napoleon-1" ] ], [ "operation" ] ] ] }, { "qid": "8fe1b7912f41e5653e88", "term": "Tonsure", "description": "hairstyle related to religious devotion", "question": "Would Christopher Hitchens be very unlikely to engage in tonsure?", "answer": true, "facts": [ "Tonsure is the practice of cutting or shaving some or all of the hair on the scalp as a sign of religious devotion or humility.", "Christopher Hitchens was an anti-theist, and he regarded all religions as false, harmful, and authoritarian." ], "decomposition": [ "What were Christopher Hitchens' views on religion?", "What is the purpose of tonsure?", "Would a proponent of #1 have a negative opinion of #2?" ], "evidence": [ [ [ [ "Christopher Hitchens-2" ] ], [ [ "Tonsure-1" ] ], [ "operation" ] ], [ [ [ "Christopher Hitchens-2" ] ], [ [ "Tonsure-1" ] ], [ "operation" ] ], [ [ [ "Christopher Hitchens-32" ] ], [ [ "Tonsure-5" ] ], [ [ "Christopher Hitchens-33" ] ] ] ] }, { "qid": "d47120efd0f09badd848", "term": "Menthol", "description": "chemical compound", "question": "Is menthol associated with Christmas?", "answer": true, "facts": [ "Menthol is the chemical in mint products that give mint its characteristic cool and tangy taste.", "Peppermint is a popular candy flavor during Christmas season." ], "decomposition": [ "What is a popular candy flavor during Christmas?", "Is menthol an ingredient in #1?" ], "evidence": [ [ [ [ "Candy cane-1" ] ], [ [ "Peppermint-2" ] ] ], [ [ [ "Candy cane-1" ] ], [ [ "Menthol-1" ] ] ], [ [ [ "Candy cane-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "88c9faa10774a7b71c8c", "term": "Snoop Dogg", "description": "American rapper", "question": "Does Snoop Dogg advocate a straight edge lifestyle?", "answer": false, "facts": [ "A straight edge lifestyle requires abstaining from the usage of recreational drugs or alcohol.", "Snoop Dogg is famous for his chronic usage of marijuana." ], "decomposition": [ "What is the position of the straight edge advocates on recreational drugs?", "What is Snoop Dogg's position on recreational drugs?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Straight edge-1" ] ], [ [ "Snoop Dogg-89" ] ], [ "operation" ] ], [ [ [ "Straight edge-1" ] ], [ [ "Snoop Dogg-68" ] ], [ "operation" ] ], [ [ [ "Straight edge-1" ] ], [ [ "Snoop Dogg-87", "Snoop Dogg-88", "Snoop Dogg-89" ] ], [ "operation" ] ] ] }, { "qid": "45977557439c92e17155", "term": "Cancer", "description": "group of diseases", "question": "Can amoebas get cancer?", "answer": false, "facts": [ "An amoeba is a single-celled organism.", "Cancer is the improper growth of a mass of cellular tissue, made of many incorrectly formed cells." ], "decomposition": [ "What is cancer the growth of?", "Does an amoeba have #1?" ], "evidence": [ [ [ [ "Cancer-1" ] ], [ [ "Amoeba-1" ], "no_evidence", "operation" ] ], [ [ [ "Cancer-1" ] ], [ [ "Amoeba-1" ], "operation" ] ], [ [ [ "Cancer-1" ] ], [ [ "Amoeba-1" ], "operation" ] ] ] }, { "qid": "b0ab236d19fec61c0111", "term": "Gorilla", "description": "Genus of mammals", "question": "Is it expected that Charla Nash would be anxious near a gorilla?", "answer": true, "facts": [ "In 2009, Charla Nash was attacked and nearly killed by a chimpanzee. ", "While a different species, Gorillas and Chimpanzees have similar physical appearances and are both primates." ], "decomposition": [ "Which animal attacked Charla Nash in 2009?", "Does #1 bear significant similarity to a gorilla?" ], "evidence": [ [ [ [ "Travis (chimpanzee)-8" ] ], [ "operation" ] ], [ [ [ "Travis (chimpanzee)-8" ] ], [ [ "Hominidae-1" ] ] ], [ [ [ "Travis (chimpanzee)-8" ] ], [ [ "Gorilla-1" ], "operation" ] ] ] }, { "qid": "2d768d23cc5920911f05", "term": "Oyster", "description": "salt-water bivalve mollusc", "question": "Should oysters be avoided by people with ADHD?", "answer": false, "facts": [ "Oysters are an excellent source of zinc.", "ADHD is a mental disorder of the neurodevelopmental type characterized by difficulty paying attention.", "Zinc supplementation has been reported to improve symptoms of ADHD and depression." ], "decomposition": [ "Which metal are oysters are known to be an excellent source of?", "Is the consumption of #1 known to worsen symptoms of ADHD?" ], "evidence": [ [ [ [ "Oyster-50" ] ], [ [ "Attention deficit hyperactivity disorder-21", "Attention deficit hyperactivity disorder-59" ], "no_evidence" ] ], [ [ [ "Oyster-50" ] ], [ [ "Attention deficit hyperactivity disorder-59" ], "operation" ] ], [ [ [ "Oyster-50" ] ], [ [ "Attention deficit hyperactivity disorder-59" ] ] ] ] }, { "qid": "26813dc7504fd7355c8c", "term": "KFC", "description": "American fast food restaurant chain", "question": "Does Magnus Carlsen enjoy KFC?", "answer": false, "facts": [ "Magnus Carlsen is a chess grandmaster from Norway", "There are no KFC locations in Norway" ], "decomposition": [ "What country is Magnus Carlsen from?", "In what countries does KFC have a location?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Magnus Carlsen-1" ] ], [ [ "KFC-3" ] ], [ "operation" ] ], [ [ [ "Magnus Carlsen-1" ] ], [ [ "KFC-1" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Magnus Carlsen-1" ] ], [ [ "KFC-51" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "f69c8412d57ad726d658", "term": "Cholera", "description": "Bacterial infection of the small intestine", "question": "Is Cholera alive?", "answer": true, "facts": [ "Cholera are a type of bacteria.", "Bacteria are considered living creatures." ], "decomposition": [ "Is cholera a bacteria?", "Are bacteria considered to be living?", "Are the answers to #1 and #2 the same?" ], "evidence": [ [ [ [ "Cholera-1" ] ], [ [ "Bacteria-1" ] ], [ "operation" ] ], [ [ [ "Cholera-1" ] ], [ [ "Bacteria-1" ] ], [ "operation" ] ], [ [ [ "Vibrio cholerae-6" ] ], [ [ "Evolution of bacteria-5" ] ], [ "operation" ] ] ] }, { "qid": "d03daae5b38423aec74c", "term": "Mongols", "description": "ethnic group of central Asia", "question": "Would a packed Wembley stadium be likely to have a descendant of the Mongols inside?", "answer": true, "facts": [ "Wembley stadium has a capacity of 90,000 people.", "The Mongols were an ethnic group that dominated the 13th and 14th centuries.", "Genghis Khan was the founder of the Mongol Empire.", "Geneticists have determined that 1 in every 200 men are descended from Genghis Khan." ], "decomposition": [ "What is the capacity of the Wembley stadium?", "Who is the founder of the Mongol empire?", "What is the minimum number of men within which at least one descendant of #2 is found?", "Is #1 divided by #3 greater than or equal to one?" ], "evidence": [ [ [ [ "Wembley Stadium-2" ] ], [ [ "Mongol Empire-2" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Wembley Stadium-2" ] ], [ [ "Mongol Empire-2" ] ], [ [ "Descent from Genghis Khan-22" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Wembley Stadium-2" ] ], [ [ "Mongol Empire-2" ] ], [ [ "Descent from Genghis Khan-22" ] ], [ "operation" ] ] ] }, { "qid": "bea8de56cae6a9dc374c", "term": "Allosaurus", "description": "Genus of large theropod dinosaur", "question": "Is Oculudentavis more dangerous than Allosaurus?", "answer": false, "facts": [ "Oculudentavis was a dinosaur that resembled a tiny bird with a half an inch skull.", "The Allosaurus was a carnivorous dinosaur with teeth described as saws." ], "decomposition": [ "What were the characteristics of the Oculudentavis?", "What were the characteristics of the Allosaurus?", "Are #1 more likely to cause harm than #2?" ], "evidence": [ [ [ [ "Oculudentavis-3" ] ], [ [ "Allosaurus-2" ] ], [ "operation" ] ], [ [ [ "Oculudentavis-1" ] ], [ [ "Allosaurus-2" ] ], [ "operation" ] ], [ [ [ "Oculudentavis-4" ] ], [ [ "Allosaurus-3" ] ], [ "operation" ] ] ] }, { "qid": "1a5d8dbceb6a7411a52b", "term": "Smooth jazz", "description": "category of music", "question": "Would James Cotton's instrument be too strident for a smooth jazz band?", "answer": true, "facts": [ "Smooth jazz is an offshoot of jazz music that relies on a more melodic form.", "Smooth jazz employs the following instruments: saxophone. guitar. piano. trumpet. synthesizer. electric bass. and drums.", "James Cotton was a famous blues harmonica player." ], "decomposition": [ "What instument is James Cotton known for?", "What instuments are used to play Smooth Jazz?", "Is #1 not one of #2?" ], "evidence": [ [ [ [ "James Cotton-1" ] ], [ [ "Smooth jazz-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "James Cotton-1" ] ], [ [ "Smooth jazz-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "James Cotton-1" ] ], [ [ "Musical ensemble-15" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "95b91109c6228074725b", "term": "United Airlines", "description": "Airline in the United States", "question": "Are there tearjerkers about United Airlines flights?", "answer": true, "facts": [ "Tearjerkers typically refer to a genre of movie. ", "United Airlines flight 93 was involved in a terrorist attack in 2001.", "Several flights memorialize the passengers of Flight 93,." ], "decomposition": [ "What do tearjerkers refer to?", "Which United Airlines flight was involved in a terrorist attack in 2001?", "Are there any #1 in memory of the passengers of #2?" ], "evidence": [ [ [ [ "Melodrama-1" ], "no_evidence" ], [ [ "September 11 attacks-2" ] ], [ [ "United 93 (film)-1" ], "no_evidence", "operation" ] ], [ [ [ "Melodrama-1" ] ], [ [ "American Airlines Flight 11-1", "American Airlines Flight 77-1", "United Airlines Flight 175-1", "United Airlines Flight 93-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Tearjerker-1" ] ], [ [ "United Airlines Flight 811-29" ] ], [ [ "United Airlines Flight 811-29" ] ] ] ] }, { "qid": "1675495e9a3ed30329bd", "term": "Rabbi", "description": "teacher of Torah in Judaism", "question": "Would a rabbi worship martyrs Ranavalona I killed?", "answer": false, "facts": [ "Rabbis are teachers of Judaism.", "Ranavalona I, ruler of Madagascar, killed many Christians that were later determined by the church to be martyrs.", "Judaism does not have a group of saints and martyrs that are prayed to like Christianity.." ], "decomposition": [ "Which religion are rabbis teachers of?", "Which religion were the matyrs killed by Ranavalona I adherents of?", "Do adherent of #1 worship matyrs like those of #2?" ], "evidence": [ [ [ [ "Rabbi-1" ] ], [ [ "Christianity in Madagascar-13" ] ], [ "operation" ] ], [ [ [ "Rabbi-1" ] ], [ [ "Christianity in Madagascar-13" ] ], [ "operation" ] ], [ [ [ "Rabbi-1" ] ], [ [ "Christianity in Madagascar-13" ] ], [ "no_evidence" ] ] ] }, { "qid": "cec0d82b89976b50a4db", "term": "Sweet potato", "description": "species of plant", "question": "Would someone typically confuse a sweet potato with a pineapple?", "answer": false, "facts": [ "Sweet potatoes have a smooth skin and are orange in color.", "Pineapples have a short, stocky stem with tough, waxy leaves and the fruit is yellow in color." ], "decomposition": [ "What are the visual characteristics of a sweet potato?", "What are the visual characteristics of a pineapple?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Sweet potato-2" ] ], [ [ "Pineapple-3" ] ], [ "operation" ] ], [ [ [ "Sweet potato-27" ] ], [ [ "Pineapple-35" ] ], [ "operation" ] ], [ [ [ "Sweet potato-2" ] ], [ [ "Pineapple-27" ] ], [ "operation" ] ] ] }, { "qid": "f3a7a8fe3a00f075e871", "term": "Mercury (element)", "description": "Chemical element with atomic number 80", "question": "Does Mercury help detect coronavirus?", "answer": true, "facts": [ "Mercury is used in thermometers", "Thermometers are used in taking body temperature", "High temperature or fever is one symptom of coronavirus" ], "decomposition": [ "What are the basic symptoms of coronavirus?", "Which instrument is used to measure a symptom among #1 that can be measured?", "Does a type of #2 use mercury?" ], "evidence": [ [ [ [ "Coronavirus-26" ] ], [ [ "Fever-1", "Medical thermometer-1" ] ], [ [ "Medical thermometer-24" ] ] ], [ [ [ "Coronavirus-26" ] ], [ [ "Thermometer-1" ] ], [ "operation" ] ], [ [ [ "Human coronavirus NL63-4" ] ], [ [ "Fever-7", "Medical thermometer-12" ] ], [ [ "Thermometer-44" ] ] ] ] }, { "qid": "aef16532a9009fd318e2", "term": "The Jackson 5", "description": "American pop music family group", "question": "Was The Jackson 5 bigger family band than The Isley Brothers?", "answer": true, "facts": [ "The Jackson 5 has sold over 100 million albums worldwide.", "The Eisley Brothers have sold over 18 million albums.", "The Jackson 5 consisted of Jackie, Tito, Jermaine, Marlon and Michael.", "The Isley Brothers consisted of brothers O'Kelly, Rudolph. Ronald, and Vernon." ], "decomposition": [ "How many albums has the Jackson 5 sold?", "How many albums has the Eisley Brothers sold?", "How many people were in the Jackson 5?", "How many people made up the Eisley Brothers?", "Is #1 greater than #2 and is #3 greater than #4?" ], "evidence": [ [ [ [ "The Jackson 5-4" ] ], [ [ "The Isley Brothers-5" ] ], [ [ "Ronnie Rancifer-1" ] ], [ [ "The Isley Brothers-4" ] ], [ "operation" ] ], [ [ [ "The Jackson 5-4" ] ], [ [ "The Isley Brothers-5" ] ], [ [ "The Jackson 5-1" ] ], [ [ "The Isley Brothers-1" ] ], [ "operation" ] ], [ [ [ "The Jackson 5-4" ] ], [ [ "The Isley Brothers-5" ] ], [ [ "The Jackson 5-1" ] ], [ [ "The Isley Brothers-4" ] ], [ "operation" ] ] ] }, { "qid": "d3c99400a786bc11c1fb", "term": "Ginger", "description": "Species of plant", "question": "Does a Starbucks passion tea have ginger in it?", "answer": false, "facts": [ "Starbucks Passion tea features cinnamon, apple, licorice root, and lemongrass flavors.", "Ginger is a spicy flavored plant.", "Starbucks Passion tea is a sweet drink." ], "decomposition": [ "What ingredients are in the Starbucks Passion Tea?", "Is ginger part of #1?" ], "evidence": [ [ [ [ "Starbucks-19" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "9f43aedfec93ab7c6cc7", "term": "Ammonia", "description": "Chemical compound of nitrogen and hydrogen", "question": "Do Shivambu practitioners believe ammonia is unhealthy?", "answer": false, "facts": [ "Shivambu is another term for 'Urine Therapy', an alternative belief about healing with urine.", "Human urine contains ammonia.", "Shivambu practitioners believe that ingesting urine is healthy." ], "decomposition": [ "What is another term for Shivambu?", "What did #1 believe could have healing properties?", "Is #2 void of ammonia?" ], "evidence": [ [ [ [ "Urine therapy-1" ] ], [ [ "Urine therapy-1" ] ], [ [ "Urine therapy-5" ], "operation" ] ], [ [ [ "Urine therapy-1" ] ], [ [ "Urine therapy-2" ] ], [ [ "Urine-32" ], "operation" ] ], [ [ [ "Urine therapy-1" ] ], [ [ "Urine therapy-1" ] ], [ [ "Ammonia-32", "Ammonia-32", "Ammonia-90" ], "no_evidence" ] ] ] }, { "qid": "25a088d9d2ce674e639a", "term": "Grapefruit", "description": "citrus fruit", "question": "Can eating grapefruit kill besides allergies or choking?", "answer": true, "facts": [ "Grapefruit is a citrus fruit consumed mostly during the summer months.", "Chemicals in grapefruit can interact with medications such as statins.", "Grapefruit can lead to too much absorption of statin medicine.", "Too much statins can cause severe muscle pain, liver damage, kidney failure and death. " ], "decomposition": [ "What health risks associated with eating grapefruit could lead to death?", "Is #1 more than just allergy and choking?" ], "evidence": [ [ [ [ "Grapefruit-16" ] ], [ [ "Grapefruit-16" ] ] ], [ [ [ "Grapefruit-18" ] ], [ [ "Drug overdose-1" ] ] ], [ [ [ "Grapefruit–drug interactions-3" ] ], [ "operation" ] ] ] }, { "qid": "46328b2913ed93358007", "term": "Jealousy", "description": "emotion referring to the thoughts and feelings of insecurity, fear, and envy over relative lack of possessions, status or something of great personal value", "question": "Should someone prone to jealousy be in a polyamorous relationship?", "answer": false, "facts": [ "Polyamorous people are those who seek to have an intimate relationship with more than one partner.", "In relationships, untreated jealousy typically leads to a breakup." ], "decomposition": [ "What kind of relationship would a polyamorous person engage in?", "Would a jealous person be comfortable with #1?" ], "evidence": [ [ [ [ "Polyamory-10" ] ], [ [ "Polyamory-22" ] ] ], [ [ [ "Polyamory-1" ] ], [ [ "Jealousy-3", "Jealousy-32" ], "operation" ] ], [ [ [ "The Industrial Christian Home for Polygamous Wives-3" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "e740feb0c2f9aa799675", "term": "B", "description": "letter in the Latin alphabet", "question": "Would early Eastern Canadian Natives language have use of the letter B?", "answer": false, "facts": [ "The Early Eastern Canadian Natives were a group of people that spoke the Inuktitut language.", "The Inuktitut language began as an oral language with no letters, only uvular sounds.", "The later Inuktitut language has no letters that resemble the Latin alphabet." ], "decomposition": [ "What language did Eastern Canadian Natives speak?", "What kind of language is #1?", "Does #2 involve the use of letters?" ], "evidence": [ [ [ [ "Inuktitut-1" ] ], [ [ "Inuktitut-30" ] ], [ [ "Syllabary-1" ] ] ], [ [ [ "Métis-1" ], "no_evidence" ], [ "no_evidence" ], [ [ "Indigenous peoples in Canada-59" ], "no_evidence", "operation" ] ], [ [ [ "Inuktitut-1" ] ], [ [ "Inuktitut-1" ] ], [ [ "Inuktitut-1" ], "operation" ] ] ] }, { "qid": "8b41a17e65ef7f6e22b5", "term": "Advertising", "description": "Form of communication for marketing, typically paid for", "question": "During the pandemic, is door to door advertising considered inconsiderate?", "answer": true, "facts": [ "Door to door advertising involves someone going to several homes in a residential area to make sales and leave informational packets.", "During the COVID-19 pandemic, the CDC recommends that people limit their travel to essential needs only.", "During the COVID-19 pandemic, citizens are advised to stay home and to limit their interaction with others.", "During the COVID-19 pandemic, people are encouraged to remain six feet away from each other at all times.", "The more people that someone interacts with, the higher the likelihood of them becoming a vector for the COVID-19 virus." ], "decomposition": [ "What does door to door advertising involve a person to do?", "During the COVID-19 pandemic, what does the CDC advise people to do in terms of traveling?", "During the COVID-19 pandemic, what does the CDC advise people to do in terms of interaction with others?", "Does doing #1 go against #2 and #3?" ], "evidence": [ [ [ [ "Door-to-door-1" ] ], [ [ "Cloth face mask-12" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Door-to-door-1" ] ], [ [ "Coronavirus recession-13" ] ], [ [ "Social distancing-30" ] ], [ "operation" ] ], [ [ [ "Door-to-door-1" ] ], [ [ "Stay-at-home order-18" ], "no_evidence" ], [ [ "Social distancing-1" ] ], [ "operation" ] ] ] }, { "qid": "09348bfb96097f3b4a99", "term": "Mickey Mouse", "description": "Disney cartoon character", "question": "Is Mickey Mouse hypothetically unlikely to make a purchase at Zazzle?", "answer": true, "facts": [ "Mickey Mouse is a Disney character that has starred in numerous movies and TV specials.", "Mickey Mouse wears a pair of red pants and never wears a shirt.", "Zazzle is a website that specializes in custom T-shirts." ], "decomposition": [ "What clothing pieces does Micky Mouse typically wear?", "What clothing pieces does Zazzle specialize in?", "Is there no overlap between #1 and #2?" ], "evidence": [ [ [ [ "Mickey Mouse-49" ] ], [ [ "Zazzle-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Mickey Mouse-1" ] ], [ [ "Zazzle-3" ] ], [ [ "Mickey Mouse-1", "Zazzle-1" ] ] ], [ [ [ "Mickey Mouse-49" ] ], [ [ "Zazzle-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d0ad95f36e42f85cc94d", "term": "Linus Torvalds", "description": "Creator and lead developer of Linux kernel", "question": "Is Maruti Suzuki Baleno an efficient car for Linus Torvald's family?", "answer": true, "facts": [ "Linus Torvald has a family consisting of five people including his wife and children.", "The Maruti Suzuki Baleno is and Indian car that can seat five people." ], "decomposition": [ "How many people can sit in a Maruti Suzuki Baleno?", "How many people are in Linus Torvald's family?", "Is #1 at least equal or greater than #2?" ], "evidence": [ [ [ [ "Suzuki Baleno (2015)-4" ] ], [ [ "Linus Torvalds-20" ] ], [ "operation" ] ], [ [ [ "Suzuki Baleno (2015)-14" ], "no_evidence" ], [ [ "Linus Torvalds-20" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Linus Torvalds-20" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "5ab66b27a0caee886970", "term": "Miami", "description": "City in Florida, United States", "question": "Would it be common to find a penguin in Miami?", "answer": false, "facts": [ "Penguins are native to the deep, very cold parts of the southern hemisphere.", "Miami is located in the northern hemisphere and has a very warm climate." ], "decomposition": [ "Where is a typical penguin's natural habitat?", "What conditions make #1 suitable for penguins?", "Are all of #2 present in Miami?" ], "evidence": [ [ [ [ "Penguin-2", "Penguin-48", "Penguin-50" ] ], [ [ "Penguin-48" ] ], [ [ "Miami-20" ], "operation" ] ], [ [ [ "Chinstrap penguin-5", "Penguin-48" ] ], [ [ "Penguin-48" ] ], [ [ "Miami-20", "Miami-22" ] ] ], [ [ [ "Penguin-1" ] ], [ [ "Penguin-2" ] ], [ [ "Miami-20" ], "operation" ] ] ] }, { "qid": "9422300529f87f9917bf", "term": "Benjamin Franklin", "description": "American polymath and a Founding Father of the United States", "question": "Is Benjamin Franklin a prime candidate to have his statues removed by Black Lives Matter movement?", "answer": true, "facts": [ "The Black Lives Matter movement is a social movement advocating for racial equality.", "Benjamin Franklin, a famous founding father, has his image on many monuments and on American currency.", "Members of the Black Lives Matter movement petitioned for statues of Christopher Columbus to be removed due to his subjugation of Native Americans.", "Benjamin Franklin's 1730s newspaper, The Philadelphia Gazette, posted ads for black slaves.", "Benjamin Franklin owned two slaves, George and King, who worked as personal servants." ], "decomposition": [ "What social issue motivates the Black Lives Matter movement?", "Did Benjamin Franklin act against achieving #1?" ], "evidence": [ [ [ [ "Black Lives Matter-10" ] ], [ [ "Benjamin Franklin-135", "Benjamin Franklin-136" ] ] ], [ [ [ "Black Lives Matter-14" ], "no_evidence" ], [ [ "Benjamin Franklin-5" ], "operation" ] ], [ [ [ "Black Lives Matter-10" ] ], [ [ "Benjamin Franklin-5" ], "operation" ] ] ] }, { "qid": "cdcb51a6a8a0b9cdb2a9", "term": "Pharmacy", "description": "academic discipline studying preparation and dispensation of medicinal", "question": "Is ID required to get all medications from all pharmacies?", "answer": false, "facts": [ "Controlled substance prescriptions can require an ID for pickup depending on state law.", "Non controlled substances can be picked up without ID by anybody who knows the patient information.", "State laws regarding pharmacies ID restrictions are not the same across the country." ], "decomposition": [ "Which category of medications usually require an ID for pickup?", "What are the regulations guiding #1 across states in the US?", "Are #2 the same across all states?" ], "evidence": [ [ [ [ "Medication-37" ] ], [ [ "Medication-37", "Over-the-counter drug-5" ] ], [ [ "Over-the-counter drug-5" ], "no_evidence" ] ], [ [ [ "Controlled Substances Act-45" ] ], [ [ "Combat Methamphetamine Epidemic Act of 2005-6" ] ], [ [ "Controlled Substances Act-14" ], "operation" ] ], [ [ [ "Opiate-1" ], "no_evidence" ], [ [ "Uniform Controlled Substances Act-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "99e34b51538c03fcd8bb", "term": "Fiat Chrysler Automobiles", "description": "Multinational automotive manufacturing conglomerate", "question": "Is Fiat Chrysler gaining a new overall corporate identity?", "answer": true, "facts": [ "The company is renaming itself Stellantis following the completion of its merger.", "There are 14 automobile brands owned by the company, which will be keeping their names and logos." ], "decomposition": [ "What plans are underway as regards naming after the completion of the Fiat Chrysler merger?", "Does #1 involve a change of the collective corporate identity?" ], "evidence": [ [ [ [ "Fiat Chrysler Automobiles-37" ], "no_evidence" ], [ [ "Corporate identity-2" ], "operation" ] ], [ [ [ "Fiat Chrysler Automobiles-1" ] ], [ [ "Fiat Chrysler Automobiles-1" ], "no_evidence" ] ], [ [ [ "Groupe PSA-23" ] ], [ "operation" ] ] ] }, { "qid": "72c3d20cf35ba7643311", "term": "The Jungle Book", "description": "1894 children's book by Rudyard Kipling", "question": "Did Disney get most of Rudyard Kipling's The Jungle Book profits?", "answer": true, "facts": [ "Rudyard Kipling's 1894 book The Jungle Book has was adapted into several Disney films.", "The 2016 film The Jungle Book grossed over 966 million dollars.", "Disney's 1967 film version of The Jungle Book grossed over 378 million dollars.", "The Jungle Book was not a worldwide phenomenon in Kipling's time." ], "decomposition": [ "When did Rudyard Kipling write \"The Jungle Book\"?", "The 1967 and 2016 adaptations of the book was produced by which media company?", "When did #2 produce these movies?", "Is #3 several decades after #1 and #2 the same as Disney?" ], "evidence": [ [ [ [ "Rudyard Kipling-2" ] ], [ [ "The Jungle Book (2016 film)-1" ] ], [ [ "The Jungle Book (2016 film)-1" ] ], [ "operation" ] ], [ [ [ "The Jungle Book-1" ] ], [ [ "The Jungle Book (franchise)-1" ], "no_evidence" ], [ [ "The Jungle Book (franchise)-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "The Jungle Book-1" ] ], [ [ "The Jungle Book-26" ] ], [ [ "Adventures of Mowgli-1" ] ], [ "operation" ] ] ] }, { "qid": "3e70c2ee8dd1ed87cc09", "term": "Kidney", "description": "internal organ in most animals, including vertebrates and some invertebrates", "question": "Does an organ donor need to be dead to donate a kidney?", "answer": false, "facts": [ "The average human has two kidneys.", "Only one kidney is required to function as a healthy person.", "Living organ donors will sometimes donate their spare kidney to someone experiencing failure of both their kidneys." ], "decomposition": [ "How many kidneys does the average person have?", "How many kidneys does a person require to function?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Kidney-1" ] ], [ [ "Kidney-33" ] ], [ "operation" ] ], [ [ [ "Kidney-1" ] ], [ [ "Organ donation-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Kidney-1" ] ], [ [ "Kidney-33" ] ], [ "operation" ] ] ] }, { "qid": "fa1e09fee5546d5b7b65", "term": "Kelly Clarkson", "description": "American singer-songwriter, actress, and television personality", "question": "Would Kelly Clarkson's voice shake glass?", "answer": true, "facts": [ "Glass vibrates at its resonant frequency which is around a middle C note.", "Kelly Clarkson has an impressive three octave vocal range.", "Kelly Clarkson's Never Enough is in the key of A-flat.", "A-flat is above middle C in terms of notes." ], "decomposition": [ "At what note would glass start to vibrate?", "In Kelly Clarkson's song Never Enough, what key is the song sung in?", "Is #2 above #1?" ], "evidence": [ [ [ [ "Acoustic resonance-51" ], "no_evidence" ], [ [ "Never Again (Kelly Clarkson song)-5" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Crystallophone-2", "Resonance-1", "Resonance-8" ], "no_evidence" ], [ [ "Kelly Clarkson-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ad3fef8d0670d91eff56", "term": "Railroad engineer", "description": "person who operates a train on a railroad or railway", "question": "Is a railroad engineer needed during NASCAR events?", "answer": false, "facts": [ "Railroad engineers work on trains and railway systems", "NASCAR events feature automobile races" ], "decomposition": [ "On what kind of transportation do railroad engineers work?", "NASCAR involves what kind of transportation?", "Is #1 and #2 the same?" ], "evidence": [ [ [ [ "Edward Banfield (railroad engineer)-1" ] ], [ [ "Safety car-34" ] ], [ "operation" ] ], [ [ [ "Train driver-1" ] ], [ [ "NASCAR-1" ] ], [ "operation" ] ], [ [ [ "Train driver-1" ] ], [ [ "NASCAR-1" ] ], [ "operation" ] ] ] }, { "qid": "7281474f2760dce03f39", "term": "Crane (bird)", "description": "family of birds", "question": "Can crane slamdunk?", "answer": false, "facts": [ "Crane are a type of bird. ", "Slamdunking is a basketball maneuver in which the player puts the basketball in the basket with one or two hands above the rim.", "Birds don't have hands." ], "decomposition": [ "What is a slamdunk?", "What body parts are needed to perform #1?", "Do cranes have #2?" ], "evidence": [ [ [ [ "Slam dunk-1" ] ], [ [ "Slam dunk-6" ] ], [ [ "Crane (bird)-1" ], "operation" ] ], [ [ [ "Slam dunk-1" ] ], [ [ "Hand-1" ] ], [ [ "Crane (bird)-1" ], "operation" ] ], [ [ [ "Slam dunk-1" ] ], [ [ "Slam dunk-1" ] ], [ [ "Crane (bird)-1" ], "no_evidence" ] ] ] }, { "qid": "cd43ae9cd3bb64c45247", "term": "QWERTY", "description": "keyboard layout where the first line is \"QWERTYUIOP\"", "question": "Did the Qwerty keyboard layout predate computers?", "answer": true, "facts": [ "The Qwerty layout was originally developed for mechanical typewriters in the 1870s.", "ENIAC was considered to be the first computer, built in the late 1940s." ], "decomposition": [ "When was the QWERTY keyboard layout developed?", "When was the modern computer invented?", "Is #1 before #2?" ], "evidence": [ [ [ [ "QWERTY-1" ] ], [ [ "Computer-3", "QWERTY-20" ] ], [ "operation" ] ], [ [ [ "QWERTY-1" ] ], [ [ "Computer-25", "Computer-26" ] ], [ "operation" ] ], [ [ [ "QWERTY-8" ] ], [ [ "Computer-23" ] ], [ "operation" ] ] ] }, { "qid": "e041c5791de5ec89261f", "term": "Lymph node", "description": "organ of the lymphatic system", "question": "Are tumors in the lymph nodes ignorable?", "answer": false, "facts": [ "Lymphoma is a serious type of cancer that can begin with tumors in the lymph nodes.", "Lymphoma can kill when left untreated." ], "decomposition": [ "What are the threats posed by tumors in the lymph nodes?", "Is it safe for a person's health to ignore #1?" ], "evidence": [ [ [ [ "Lymph node-25" ] ], [ [ "Lymph node-27" ] ] ], [ [ [ "Lymph node-25" ] ], [ [ "Hodgkin lymphoma-6" ] ] ], [ [ [ "Lymph node-25", "Lymph node-3" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "4ec8cd118be8a53ec516", "term": "Koala", "description": "An arboreal herbivorous marsupial native to Australia.", "question": "Would Alexander Hamilton have known about koalas?", "answer": false, "facts": [ "Alexander Hamilton died in 1804.", "The first published depiction of a koala was in 1810." ], "decomposition": [ "When were Koalas first sighted?", "When did Alexander Hamilton die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Koala-33", "Koala-35" ] ], [ [ "Alexander Hamilton-1" ] ], [ "operation" ] ], [ [ [ "Koala-3" ], "no_evidence" ], [ [ "Alexander Hamilton-1" ] ], [ "operation" ] ], [ [ [ "Koala-3" ] ], [ [ "Alexander Hamilton-109" ] ], [ "operation" ] ] ] }, { "qid": "bdc60c16ac8d47491464", "term": "Spider-Man", "description": "Fictional Marvel superhero", "question": "Did Spiderman fight against Falcon in the MCU?", "answer": true, "facts": [ "In Captain America: Civil War, Iron Man and Captain America became enemies following a disagreement.", "Iron Man summoned Spiderman to fight with his team of still-loyal Avengers.", "Falcon was one of Captain America's best friends and supported the Captain in the conflict.", "Therefore, Spiderman and Falcon were on opposite teams during the inter-Avenger battle in the movie." ], "decomposition": [ "In the marvel movie Captain America: Civil War, which factions were the avengers divided into?", "Were Spiderman and Falcon on opposing sides of #1?" ], "evidence": [ [ [ [ "Captain America: Civil War-1" ] ], [ "no_evidence" ] ], [ [ [ "Captain America: The Winter Soldier-1" ] ], [ [ "Peter Parker (Marvel Cinematic Universe)-7", "The Falcon and the Winter Soldier-5" ], "operation" ] ], [ [ [ "Captain America: Civil War-1" ] ], [ [ "Falcon (comics)-38", "Spider-Man-27" ] ] ] ] }, { "qid": "fa472b6db8dcec9abcff", "term": "Torso", "description": "the central part of the living body", "question": "Will the torso be safe from blows to the largest and smallest bones in body?", "answer": true, "facts": [ "The three smallest bone in the body are malleus, incus, and stapes.", "Malleus, incus, and stapes are located in the ear.", "The femur is the largest bone in the body.", "The femur is located in the leg.", "The torso is located in the center of the body." ], "decomposition": [ "Which part of the human body is the torso?", "Which is the largest and smallest bone in the human body?", "Where are #2 located?", "Is any of #3 part of #1?" ], "evidence": [ [ [ [ "Torso-1" ] ], [ [ "Femur-1", "Stapes-1" ] ], [ [ "Human leg-1", "Stapes-1" ] ], [ [ "Abdomen-1", "Perineum-1", "Thorax-1" ], "operation" ] ], [ [ [ "Torso-1" ] ], [ [ "Bone-3" ] ], [ [ "Femur-7" ] ], [ [ "Femur-7" ], "operation" ] ], [ [ [ "Torso-1" ] ], [ [ "Bone-3" ] ], [ [ "Thigh-1" ] ], [ "operation" ] ] ] }, { "qid": "7840c862ca1d7455284d", "term": "Dalai Lama", "description": "Tibetan Buddhist spiritual teacher", "question": "Does the Dalai Lama believe in the divine barzakh?", "answer": false, "facts": [ "The Dalai Lama is the spiritual leader of Tibetan Buddhism", "The divine barzakh is a concept from Islam" ], "decomposition": [ "What religion contains the concept of the divine barzakh?", "Is the Dalai Lama a member of #1?" ], "evidence": [ [ [ [ "Barzakh-9" ] ], [ [ "Dalai Lama-1" ] ] ], [ [ [ "Barzakh-9" ] ], [ [ "Dalai Lama-1" ], "operation" ] ], [ [ [ "Astral plane-3" ] ], [ [ "Dalai Lama-1" ], "operation" ] ] ] }, { "qid": "e87406dacc533ee3a3af", "term": "Pig Latin", "description": "secret language game", "question": "Is Pig Latin related to real Latin?", "answer": false, "facts": [ "Pig Latin is based on English.", "It is formed by moving consonants and syllables.", "Real Latin is a separate language distinct from English." ], "decomposition": [ "Which language is Pig Latin based on?", "Is #1 Latin?" ], "evidence": [ [ [ [ "Pig Latin-1" ] ], [ "operation" ] ], [ [ [ "Pig Latin-6" ] ], [ [ "Pig Latin-6" ], "operation" ] ], [ [ [ "Pig Latin-1" ] ], [ [ "English language-1" ], "operation" ] ] ] }, { "qid": "effcb862cfb341b8774c", "term": "Shiva", "description": "One of the principal deities of Hinduism.", "question": "Is Shiva's divine dance an ancient physical fitness pose?", "answer": false, "facts": [ "In yoga as exercise, the pose called Natarajasara represents Shiva's divine dance", "This pose is new, originating in the early 20th century", "Exercise is an activity that maintains physical fitness" ], "decomposition": [ "Which yoga pose is referred to as Shiva's divine dance?", "When did #1 originate?", "Is #2 so long ago as to be considered ancient?" ], "evidence": [ [ [ [ "Natarajasana-1" ] ], [ [ "Nataraja-25" ] ], [ [ "Ancient history-2" ], "operation" ] ], [ [ [ "Natarajasana-1" ] ], [ [ "Shri Yogendra-1" ] ], [ [ "Ancient history-2" ], "operation" ] ], [ [ [ "Natarajasana-1" ] ], [ [ "Natarajasana-4" ] ], [ "operation" ] ] ] }, { "qid": "8d708927b1593d8b9b42", "term": "Woodrow Wilson", "description": "28th president of the United States", "question": "Would Woodrow Wilson support Plessy v. Ferguson decision?", "answer": true, "facts": [ "Plessy v Ferguson was a landmark case that stated segregation did not violate the constitution.", "President Woodrow Wilson escalated the discriminatory hiring policies and segregation of government offices.", "By the end of 1913, under President Wilson, many departments, including the navy, had segregated work spaces, restrooms, and cafeterias." ], "decomposition": [ "What was the topic of Plessy v. Ferguson?", "Does Woodrow Wilson's veiws on #1 agree more with Plessy or Ferguson?", "Who did the court rule in favor of in Plessy v. Ferguson?", "Are #2 and #3 the same?" ], "evidence": [ [ [ [ "Plessy v. Ferguson-1" ] ], [ [ "Woodrow Wilson-77" ] ], [ [ "Plessy v. Ferguson-3" ] ], [ "operation" ] ], [ [ [ "Plessy v. Ferguson-2" ] ], [ [ "Woodrow Wilson-77", "Woodrow Wilson-78" ], "no_evidence" ], [ [ "Plessy v. Ferguson-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Plessy v. Ferguson-9" ] ], [ [ "Woodrow Wilson-78" ] ], [ [ "Plessy v. Ferguson-7" ] ], [ "operation" ] ] ] }, { "qid": "b59da272353e651cf2eb", "term": "Christmas Eve", "description": "Evening or entire day before Christmas Day", "question": "Would a Bulgarian priest eat a four-course meal on Christmas Eve?", "answer": false, "facts": [ "A four-course meal consists of a soup, an appetizer, an entrée, and dessert.", "The Bulgarian Christmas Eve meal has an odd number of dishes and an odd number of people sitting around the table." ], "decomposition": [ "Is the number of dishes served at a Bulgarian Christmas Eve meal odd or even?", "Is the number \"four\" odd or even?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Christmas Eve-20" ] ], [ [ "4-3" ] ], [ "operation" ] ], [ [ [ "Christmas Eve-20" ] ], [ [ "4-3" ] ], [ "operation" ] ], [ [ [ "Christmas Eve-20" ] ], [ [ "Parity (mathematics)-1" ] ], [ "operation" ] ] ] }, { "qid": "cee57fee546c1d7df94b", "term": "U.S. Route 66", "description": "Former US Highway between Chicago and Los Angeles", "question": "Is Route 66 generally unknown to Americans?", "answer": false, "facts": [ "Route 66 was immortalized in the hit \"Route 66\" by Bobby Troupe.", "\"Route 66\" as a song has reached the Billboard Top Charts multiple times and is still played often." ], "decomposition": [ "In what hit song was Route 66 mentioned?", "Is #1 a little-known song in America?" ], "evidence": [ [ [ [ "(Get Your Kicks on) Route 66-1" ] ], [ [ "(Get Your Kicks on) Route 66-3" ] ] ], [ [ [ "(Get Your Kicks on) Route 66-1" ] ], [ [ "(Get Your Kicks on) Route 66-2" ] ] ], [ [ [ "U.S. Route 66-1" ] ], [ [ "(Get Your Kicks on) Route 66-1", "(Get Your Kicks on) Route 66-3" ], "operation" ] ] ] }, { "qid": "eb097fd2428405139a31", "term": "1936 Summer Olympics", "description": "games of the XI Olympiad, celebrated in Berlin in 1936", "question": "Did the Berlin Wall prevent any athletes from competing in the 1936 Summer Olympics?", "answer": false, "facts": [ "The 1936 Olympics were held in 1936.", "The Berlin Wall was not constructed until 1961." ], "decomposition": [ "When were the 1936 Olympics?", "When was the Berlin Wall built?", "Did #2 occur before #1?" ], "evidence": [ [ [ [ "1936 Summer Olympics-1" ] ], [ [ "Berlin Wall-1" ] ], [ "operation" ] ], [ [ [ "1936 Summer Olympics-1" ] ], [ [ "Berlin Wall-1" ] ], [ "operation" ] ], [ [ [ "1936 Summer Olympics-1" ] ], [ [ "Berlin Wall-1" ] ], [ "operation" ] ] ] }, { "qid": "0987df9ab860b01e3f17", "term": "Vietnamese people", "description": "ethnic group originally from northern Vietnam", "question": "Are the Vietnamese people a great untapped resource for NBA players?", "answer": false, "facts": [ "Vietnam was ranked as one of the countries with the shortest people on average, in 2019.", "The average height of a Vietnamese man is 5 feet 4.74 inches.", "The average height of an NBA player in 2018 was 6 foot 7 inches tall." ], "decomposition": [ "What is the average height of NBA players?", "What is the average height of Vietnamese males?", "Is #2 close to being the same as #1?" ], "evidence": [ [ [ [ "Basketball-85" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Wilt Chamberlain-1" ], "no_evidence" ], [ [ "Vietnamese people-7" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Basketball-85" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "69a5a14c29857fef4010", "term": "Los Angeles County, California", "description": "County in California, United States", "question": "Will every resident of Los Angeles County, California go to Jehovah Witnesses's heaven?", "answer": false, "facts": [ "Jehovah Witnesses believe that exactly 144,000 people will be saved and go to heaven.", "There are over 10 million residents of Los Angeles County, California as of 2019." ], "decomposition": [ "What is the estimated population of Los Angeles County, California?", "According to the Jehovah's Witnesses, how many people will go to heaven?", "Is #1 less than or equal to #2?" ], "evidence": [ [ [ [ "Westside (Los Angeles County)-6" ] ], [ [ "Jehovah's Witnesses-31" ] ], [ "operation" ] ], [ [ [ "Los Angeles County, California-8" ] ], [ [ "Jehovah's Witnesses-30" ] ], [ "operation" ] ], [ [ [ "Los Angeles County, California-1" ] ], [ [ "Jehovah's Witnesses and salvation-5" ] ], [ "operation" ] ] ] }, { "qid": "ca1bbc7b71d286760acd", "term": "Kangaroo", "description": "сommon name of family of marsupials", "question": "Do Australians ride Kangaroos to work?", "answer": false, "facts": [ "Kangaroos can become aggressive if they feel a human is too close or is threatening them.", "There are no parking areas or stalls for Kangaroos in Australia. ", "It would be considered animal abuse to ride on a kangaroo and leave it at one's job." ], "decomposition": [ "Do kangaroos live freely with people?", "Are there any kangaroo parking lots in Australia?", "Is #1 or #2 positive?" ], "evidence": [ [ [ [ "Kangaroo-38" ] ], [ "no_evidence" ], [ [ "Kangaroo-38" ] ] ], [ [ [ "Kangaroo-35" ], "no_evidence" ], [ [ "Parking lot-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Red kangaroo-10" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "b747938f597b09e43603", "term": "Go (game)", "description": "Abstract strategy board game for two players", "question": "Did origin dynasty of Go precede Jia Sidao?", "answer": true, "facts": [ "Go is a strategy game that originated in the Zhou dynasty.", "The Zhou dynasty lasted from 1046 BC – 256 BC.", "Jia Sidao was a chancellor during the late Song dynasty.", "The Song dynasty started in 960 AD and lasted until 1279 AD." ], "decomposition": [ "During which Chinese dynasty did the game Go originate?", "Which Chinese dynasty was Jia Sidao a part of during his lifetime?", "Did #1 precede #2?" ], "evidence": [ [ [ [ "Go (game)-1" ] ], [ [ "Jia Sidao-1" ] ], [ "operation" ] ], [ [ [ "History of Go-9" ] ], [ [ "Jia Sidao-1" ] ], [ [ "Government of the Han dynasty-1", "Song dynasty-1" ] ] ], [ [ [ "History of Go-4" ] ], [ [ "Jia Sidao-1" ] ], [ "operation" ] ] ] }, { "qid": "db7ceb3da1361288eeb2", "term": "Star Wars", "description": "Epic science fantasy space opera franchise", "question": "Are there multiple Star Wars TV shows produced by Disney?", "answer": true, "facts": [ "Star Wars Rebels and Star Wars Resistance were released after Disney bought ownership of Star Wars.", "Disney also produced the first live-action TV show set in the Star Wars galaxy, The Mandalorian.", "Disney produced one additional revival season of Star Wars Clone Wars which was originally produced before Disney owned Star Wars." ], "decomposition": [ "When did Disney acquire Star Wars?", "How many Star Wars TV shows have been produced since #1?", "Is #2 greater than one?" ], "evidence": [ [ [ [ "The Walt Disney Company-38" ] ], [ [ "Star Wars Resistance-2", "The Mandalorian-2" ] ], [ "operation" ] ], [ [ [ "The Walt Disney Company-38" ] ], [ [ "Star Wars Rebels-2", "Star Wars Resistance-2" ] ], [ "operation" ] ], [ [ [ "Star Wars-2" ] ], [ [ "Star Wars-23" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "310968736449680ee61e", "term": "Private investigator", "description": "person hired to undertake investigatory law services", "question": "Would Emma Roberts's Nancy Drew be considered a private investigator?", "answer": false, "facts": [ "Emma Roberts starred as Nancy Drew in the 2007 film titled Nancy Drew.", "A private investigator is hired by an individual to solve a crime.", "Nancy Drew from the 2007 film was described as an amateur sleuth.", "Nancy Drew in the 2007 film was interested in a case and decided to pursue it on her own, without being paid for it." ], "decomposition": [ "Who did Emma Roberts play in the 2007 film titled Nancy Drew?", "Was #1 considered a private investigator?" ], "evidence": [ [ [ [ "Nancy Drew (2007 film)-1" ] ], [ [ "Nancy Drew (2007 film)-2" ], "operation" ] ], [ [ [ "Nancy Drew (2007 film)-1" ] ], [ [ "Nancy Drew (2007 film)-2" ] ] ], [ [ [ "Nancy Drew (2007 film)-1" ] ], [ [ "Nancy Drew (2007 film)-2" ] ] ] ] }, { "qid": "aa3eaaa782c4501cd314", "term": "Seven Years' War", "description": "Global conflict between 1756 and 1763", "question": "Was the AK-47 used in the Seven Years' War?", "answer": false, "facts": [ "The Seven Years' War took place between 1756 and 1763.", "The AK-47 was developed in the 1940s." ], "decomposition": [ "Between what years did the Seven Years' War take place?", "When was the AK-47 developed?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Seven Years' War-1" ] ], [ [ "AK-47-2" ] ], [ "operation" ] ], [ [ [ "France in the Seven Years' War-1" ] ], [ [ "AK-47-2" ] ], [ "operation" ] ], [ [ [ "Seven Years' War-9" ] ], [ [ "AK-47-2" ] ], [ "operation" ] ] ] }, { "qid": "e03ec826db49319eb034", "term": "J. K. Rowling", "description": "English novelist", "question": "Did Helen Keller ever read a novel by J. K. Rowling?", "answer": false, "facts": [ "Helen Keller died in 1968.", "J. K. Rowling's first novel was published in 1997." ], "decomposition": [ "When was J. K. Rowling's first novel published?", "When did Helen Keller die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "J. K. Rowling-15" ] ], [ [ "Helen Keller-1" ] ], [ "operation" ] ], [ [ [ "Harry Potter and the Philosopher's Stone-2" ] ], [ [ "Helen Keller-1" ] ], [ "operation" ] ], [ [ [ "J. K. Rowling-2" ], "no_evidence" ], [ [ "Helen Keller-45" ] ], [ "operation" ] ] ] }, { "qid": "1f94fd775c243dc383fa", "term": "Garfield", "description": "Comic strip created by Jim Davis", "question": "Would Garfield like canid food?", "answer": false, "facts": [ "Garfield is a fictional comic strip character that is a cat. ", "Garfield loves to eat spaghetti.", "Canid refers to the species that dogs belong to.", "Dogs like to eat meat and dog food." ], "decomposition": [ "What is Garfield's favorite food?", "Is the answer to #1 a type of canned food?" ], "evidence": [ [ [ [ "Garfield (character)-1" ] ], [ [ "Lasagne-2" ] ] ], [ [ [ "Garfield-29" ] ], [ [ "Lasagne-1" ] ] ], [ [ [ "Garfield-43" ] ], [ "operation" ] ] ] }, { "qid": "f6184f20db14b18f1401", "term": "Hamlet", "description": "tragedy by William Shakespeare", "question": "Did Hamlet's author use email?", "answer": false, "facts": [ "Hamlet was written by William Shakespeare.", "William Shakespeare was born in 1564.", "Email was not widely used until the 1970s." ], "decomposition": [ "Who was the author of Hamlet?", "When did #1 pass away?", "When did email become commonly used?", "Did #3 occur before #2?" ], "evidence": [ [ [ [ "Hamlet-1" ] ], [ [ "William Shakespeare-17" ] ], [ [ "Email-1" ] ], [ "operation" ] ], [ [ [ "Hamlet-4" ] ], [ [ "William Shakespeare-5" ] ], [ [ "History of email-12" ] ], [ "operation" ] ], [ [ [ "Hamlet-2" ] ], [ [ "William Shakespeare-17" ] ], [ [ "Email-1" ] ], [ "operation" ] ] ] }, { "qid": "25cb5d3136c997326121", "term": "Larry King", "description": "American television and radio host", "question": "Can Larry King's ex-wives form a water polo team?", "answer": true, "facts": [ "Water polo is a sport played by teams of seven competitors", "Larry King has seven ex-wives" ], "decomposition": [ "How many ex wives does Larry King have?", "How many players are on a water polo team?", "Is #1 equal to #2?" ], "evidence": [ [ [ [ "Larry King-43" ] ], [ [ "Water polo-1" ] ], [ "operation" ] ], [ [ [ "Larry King-37" ] ], [ [ "Water polo-1" ] ], [ "operation" ] ], [ [ [ "Larry King-37" ] ], [ [ "Water polo-1" ] ], [ "operation" ] ] ] }, { "qid": "da08268a462c39b614e8", "term": "Hypertension", "description": "Long term medical condition", "question": "Are a dozen pickles good for easing hypertension?", "answer": false, "facts": [ "Hypertension is high blood pressure that can come from a number of factors including excess salt.", "Pickles are food that are high in sodium, or salt." ], "decomposition": [ "What nutrients cause hypertension?", "Are pickles low in #1?" ], "evidence": [ [ [ [ "Hypertension-2" ] ], [ [ "Pickled cucumber-3" ], "operation" ] ], [ [ [ "Hypertension-24" ] ], [ [ "Pickled cucumber-20" ], "operation" ] ], [ [ [ "Hypertension-2" ] ], [ [ "Pickled cucumber-3" ] ] ] ] }, { "qid": "2d2bf6bb3bf50a4513f8", "term": "Saint Peter", "description": "apostle and first pope", "question": "Was Florence Nightingale's death more painful than Saint Peter's?", "answer": false, "facts": [ "Florence Nightingale was a social reformer that is the founder of modern medicine.", "Florence Nightingale died in her sleep.", "Saint Peter was a Christian preacher.", "Saint Peter was crucified by the Romans." ], "decomposition": [ "How did Saint Peter die?", "How did Florence Nightingale die?", "Can #2 be considered more painful than #1?" ], "evidence": [ [ [ [ "Saint Peter-64" ] ], [ [ "Florence Nightingale-42" ] ], [ [ "Florence Nightingale-42", "Saint Peter-64" ], "operation" ] ], [ [ [ "Saint Peter-57" ] ], [ [ "Florence Nightingale-42" ] ], [ "operation" ] ], [ [ [ "Saint Peter-81" ] ], [ [ "Florence Nightingale-42" ] ], [ "operation" ] ] ] }, { "qid": "64689ddba750aaad911e", "term": "Amazon (company)", "description": "American electronic commerce and cloud computing company", "question": "Would Iceland lose to Amazon in a bidding war?", "answer": true, "facts": [ "Iceland had a nominal GDP of $27 billion as of a 2018 estimate.", "Amazon recorded revenues of $232.887 billion in the 2018 fiscal year." ], "decomposition": [ "What was the nominal GDP of Iceland in 2018?", "What was Amazon's recorded revenues in 2018?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Economy of Iceland-1" ] ], [ [ "Amazon (company)-64" ] ], [ "operation" ] ], [ [ [ "Economy of Iceland-1" ] ], [ [ "Amazon (company)-64" ] ], [ "operation" ] ], [ [ [ "Economy of Iceland-1" ], "operation" ], [ [ "Advertising revenue-10" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "6330413d518d44e68b94", "term": "Boolean algebra", "description": "Algebra involving variables containing only \"true\" and \"false\" (or 1 and 0) as values", "question": "Could boolean algebra be described as binary?", "answer": true, "facts": [ "Binary options tend to have 2 instead of 10 as a base. ", "Binary directly describes something composed of 2 things. " ], "decomposition": [ "How many digits are used in boolean algebra?", "How many does 'binary' denote?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Boolean algebra-1" ] ], [ [ "Binary number-1" ] ], [ "operation" ] ], [ [ [ "Boolean algebra-1" ] ], [ [ "Binary number-1" ] ], [ "operation" ] ], [ [ [ "Boolean algebra-1" ] ], [ [ "Binary number-1" ] ], [ "operation" ] ] ] }, { "qid": "c027d949f7b4a6af5869", "term": "Jujutsu", "description": "Japanese martial art", "question": "Could a Jujutsu expert hypothetically defeat a Janissary?", "answer": false, "facts": [ "Jujutsu is a form of unarmed combat.", "Janissaries were the elite infantry of the Ottoman Empire.", "Janissaries wore chain mail and armor and wielded sharp swords." ], "decomposition": [ "What equipment does Jujutsu use?", "What equipment does Janissary use?", "Would someone with #1 likely defeat someone with #2?" ], "evidence": [ [ [ [ "Jujutsu-1" ] ], [ [ "Janissaries-25" ] ], [ "no_evidence" ] ], [ [ [ "Jujutsu-1" ] ], [ [ "Janissaries-1", "Janissaries-12" ] ], [ "operation" ] ], [ [ [ "Jujutsu-1" ] ], [ [ "Janissaries-25" ] ], [ "operation" ] ] ] }, { "qid": "8ec0f7fd908451102838", "term": "JPEG", "description": "Lossy compression method for reducing the size of digital images", "question": "Does the JPEG acronym stand for a joint committee?", "answer": true, "facts": [ "The term \"JPEG\" is an initialism/acronym for the Joint Photographic Experts Group.", "They created the standard in 1992.", "The Joint Photographic Experts Group (JPEG) is the joint committee between ISO/IEC JTC 1 and ITU-T Study Group 16 (formerly CCITT) . ", "The Joint Photographic Experts Group created and maintains the JPEG, JPEG 2000, and JPEG XR standards. " ], "decomposition": [ "What does the acronym JPEG represent?", "Is #1 a coalition of different groups?" ], "evidence": [ [ [ [ "JPEG-1" ] ], [ [ "Coalition-1" ] ] ], [ [ [ "JPEG-2" ] ], [ [ "Joint Photographic Experts Group-1" ], "operation" ] ], [ [ [ "JPEG-2" ] ], [ [ "Joint Photographic Experts Group-1" ] ] ] ] }, { "qid": "fedce5dbd46bf58a4e53", "term": "Mike Tyson", "description": "American boxer", "question": "Did Mike Tyson train to use the gogoplata?", "answer": false, "facts": [ "Mike Tyson is a boxer", "The gogoplata is a chokehold used in mixed martial arts and various submission grappling disciplines" ], "decomposition": [ "In what sports is a gogoplata used?", "Did Mike Tyson participate in #1?" ], "evidence": [ [ [ [ "Gogoplata-4" ] ], [ [ "Mike Tyson-1" ], "operation" ] ], [ [ [ "Gogoplata-1" ] ], [ [ "Mike Tyson-1" ] ] ], [ [ [ "Gogoplata-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "bfc739cbf7aaf9f53ada", "term": "The Mentalist", "description": "American police procedural drama television series (2008-2015)", "question": "Was the Mentalist filmed in black and white?", "answer": false, "facts": [ "The Mentalist first aired in 2008.", "Black and white television shows were no longer being made in 2008." ], "decomposition": [ "When did The Mentalist first air?", "When did they stop filming black and white television?", "Was #1 before #2?" ], "evidence": [ [ [ [ "The Mentalist-1" ] ], [ [ "Black and white-5" ] ], [ "operation" ] ], [ [ [ "The Mentalist-1" ] ], [ [ "Black and white-5" ] ], [ "operation" ] ], [ [ [ "The Mentalist-1" ] ], [ [ "Black and white-5" ] ], [ "operation" ] ] ] }, { "qid": "d20362599347f39a08e5", "term": "Ku Klux Klan", "description": "American white supremacy group", "question": "Would the Ku Klux Klan welcome Opal Tometi into their group?", "answer": false, "facts": [ "The Ku Klux Klan is an American white supremacist hate group whose primary targets are African Americans, as well as Jews, immigrants, leftists, and homosexuals.", "Opal Tometi is an African American woman.", "Opal Tometi is a co-founder of Black Lives Matter.", "Black Lives Matter (BLM) is a decentralized movement advocating for non-violent civil disobedience in protest against incidents of police brutality and all racially motivated violence against African-American people." ], "decomposition": [ "Which groups of people are enemies of the Ku Klux Klan?", "What is Opal Tometi's ethnicity?", "Is #2 absent from #1?" ], "evidence": [ [ [ [ "Ku Klux Klan-1" ] ], [ [ "Opal Tometi-1", "Opal Tometi-4" ] ], [ "operation" ] ], [ [ [ "Ku Klux Klan-1" ] ], [ [ "Opal Tometi-1" ] ], [ "operation" ] ], [ [ [ "Ku Klux Klan-1" ] ], [ [ "Opal Tometi-8" ] ], [ "operation" ] ] ] }, { "qid": "6a756a5734139bfce297", "term": "Emu", "description": "Large flightless bird endemic to Australia", "question": "Can an emu chase a bogan?", "answer": true, "facts": [ "Emus are endemic to the continent of Australia", "Bogan is a pejorative term for certain citizens of Australia" ], "decomposition": [ "Where are emus endemic to?", "Where is a \"bogan\" found?", "Do areas #1 and #2 overlap?" ], "evidence": [ [ [ [ "Emu-1" ] ], [ [ "Bogan-2" ] ], [ "operation" ] ], [ [ [ "Emu-1" ] ], [ [ "Bogan-25" ] ], [ "operation" ] ], [ [ [ "Emu-1" ] ], [ [ "Bogan-1" ] ], [ "operation" ] ] ] }, { "qid": "54128d7439105554c9e3", "term": "ABBA", "description": "Swedish pop group", "question": "Is calling ABBA the Swedish Beatles a preposterous claim?", "answer": true, "facts": [ "ABBA was a Swedish band that had 1 Billboard number 1 hit and 4 top 10 hits.", "The Beatles had 20 Billboard number 1 hits and 34 top 10 hits." ], "decomposition": [ "How many Billboard number ones did ABBA have?", "How many Billboard number ones did the Beatles have?", "Is #1 lower than #2?" ], "evidence": [ [ [ [ "ABBA-38" ] ], [ [ "Billboard 200-25" ] ], [ "operation" ] ], [ [ [ "ABBA-120" ] ], [ [ "The Beatles-111", "The Beatles-4" ], "no_evidence" ], [ "operation" ] ], [ [ [ "ABBA-121" ] ], [ [ "Billboard 200-26" ] ], [ [ "Billboard 200-26" ], "operation" ] ] ] }, { "qid": "b3c5d591c696cda63e48", "term": "Hyena", "description": "family of mammal", "question": "Do hyenas appear in a Broadway musical?", "answer": true, "facts": [ "Scar is the evil lion in Disney's Lion King.", "Scar's minions are a group of hyenas.", "There is a Broadway stage version of Lion King." ], "decomposition": [ "Who is the main antagonist in Disney's Lion King?", "Which animals were #1's minions?", "Has the Lion King been adapted into a Broadway musical and are #2 hyenas?" ], "evidence": [ [ [ [ "Scar (The Lion King)-1" ] ], [ [ "Scar (The Lion King)-17" ] ], [ [ "The Lion King (musical)-2" ], "operation" ] ], [ [ [ "Scar (The Lion King)-1" ] ], [ [ "Scar (The Lion King)-1" ] ], [ [ "The Lion King (musical)-1" ], "operation" ] ], [ [ [ "Scar (The Lion King)-1" ] ], [ [ "Scar (The Lion King)-3" ] ], [ [ "Scar (The Lion King)-3", "The Lion King (musical)-2" ] ] ] ] }, { "qid": "9b491a31c4212e70b18c", "term": "Jane Austen", "description": "English novelist", "question": "Did Jane Austen suffer from middle child syndrome?", "answer": false, "facts": [ "Jane Austen was the second youngest of 8 children.", "Middle child syndrome is the feeling of exclusion by middle children, due directly to their placement in their family's birth order." ], "decomposition": [ "What would a child have to be to suffer from middle child syndrome?", "What is Jane Austen's position among her siblings?", "Does being #2 make her #1?" ], "evidence": [ [ [ [ "Middle child syndrome-1" ] ], [ [ "Jane Austen-7", "Timeline of Jane Austen-4" ] ], [ "operation" ] ], [ [ [ "Middle child syndrome-1" ] ], [ [ "Jane Austen-11" ] ], [ "operation" ] ], [ [ [ "Middle child syndrome-1" ] ], [ [ "Jane Austen-5", "Jane Austen-7" ] ], [ "operation" ] ] ] }, { "qid": "bfb33c51d0dd3bb6cbb8", "term": "Hunting", "description": "Searching, pursuing, catching and killing wild animals", "question": "Would a pacifist be opposed to hunting?", "answer": false, "facts": [ "Pacifists are a group opposed to violence and war.", "Amish people are well known for their pacifism.", "Amish people hunt for meat and sport." ], "decomposition": [ "What is the purpose of hunting?", "What are Pacifists opposed to?", "Is #2 listed in #1?" ], "evidence": [ [ [ [ "Hunting-1" ] ], [ [ "Pacifism-1" ] ], [ [ "Violence-1" ], "operation" ] ], [ [ [ "Hunting-16" ], "no_evidence" ], [ [ "Opposition to World War I-9" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Hunting-1" ] ], [ [ "Pacifism-1" ] ], [ "operation" ] ] ] }, { "qid": "4b1b787084fd4b19fb80", "term": "Dance", "description": "A performing art consisting of movement of the body", "question": "Is a person with St. Vitus's Dance likely to win a ballet competition?", "answer": false, "facts": [ "St. Vitus's Dance, also called Sydenham's chorea, is a disease characterized by sudden jerking movements of the body.", "Ballet is a performance dance known for elegant and fluid motions." ], "decomposition": [ "What are the characteristic movements of St. Vitus' Dance?", "What are the characteristic movements of well trained ballet dancers?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Sydenham's chorea-1" ] ], [ [ "Ballet-2" ] ], [ "operation" ] ], [ [ [ "Sydenham's chorea-1" ] ], [ [ "Ballet-22" ] ], [ "operation" ] ], [ [ [ "Vitus-15" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "4125c5f4c7a536916608", "term": "Retail", "description": "Sale of goods and services from individuals or businesses to the end-user", "question": "Would a retail associate envy the retailer's CEO's pay?", "answer": true, "facts": [ "The average retail CEO makes 14 million yearly.", "Retail associates typically make between $8 and $13 hourly." ], "decomposition": [ "How much does a retail CEO make yearly?", "How much does a retail associate make yearly?", "Is #1 greater than #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Executive compensation in the United States-5" ], "no_evidence" ], [ [ "Executive compensation in the United States-5" ], "no_evidence" ], [ [ "Executive compensation in the United States-5" ], "operation" ] ], [ [ [ "Chief risk officer-15" ], "no_evidence" ], [ [ "Minimum wage-87" ] ], [ "operation" ] ] ] }, { "qid": "a7e8f2cbec209b317b8f", "term": "Menstruation", "description": "Regular discharge of blood and tissue from the inner lining of the uterus through the vagina", "question": "Are there people who are men who experience menstruation?", "answer": true, "facts": [ "Menstruation can occur in any human being who has a uterus and vagina. ", "People who are born with a vagina may transition socially and/or medically to being male. ", "Someone with a vagina who has transitioned to being male is a man. " ], "decomposition": [ "What body organs are involved in menstruation?", "Do some men possess #1? " ], "evidence": [ [ [ [ "Menstruation-9" ] ], [ [ "Transgender pregnancy-2" ], "operation" ] ], [ [ [ "Menstruation-1" ] ], [ [ "Male menstruation-1" ], "no_evidence", "operation" ] ], [ [ [ "Menstruation-1" ] ], [ [ "Sex and gender distinction-1" ], "operation" ] ] ] }, { "qid": "367537af1b620f0e3683", "term": "Disc jockey", "description": "Person who plays recorded music for an audience", "question": "Was disc jockey Jay Thomas enemies with Clayton Moore?", "answer": false, "facts": [ "Jay Thomas was a radio disc jockey and Emmy winning actor. ", "Clayton Moore played the Lone Ranger.", "Jay Thomas was a yearly recurring guest on the Dave Letterman Show every Christmas.", "Jay Thomas told his Lone Ranger Story every year about a funny encounter he had with Clayton Moore." ], "decomposition": [ "Did Jay Thomas appear on the David Letterman Show?", "Did Jay Thomas tell a story about Clayton Moore who is the Lone Ranger?", "Is the story a humorous story?", "Is #1, #2 or #3 a no answer?" ], "evidence": [ [ [ [ "Jay Thomas-3" ] ], [ [ "Clayton Moore-9" ] ], [ "no_evidence" ], [ [ "Jay Thomas-3" ] ] ], [ [ [ "Jay Thomas-5" ] ], [ [ "Jay Thomas-6" ] ], [ [ "Jay Thomas-8" ], "operation" ], [ "operation" ] ], [ [ [ "Jay Thomas-3" ] ], [ [ "Clayton Moore-9" ] ], [ [ "Jay Thomas-8" ] ], [ "operation" ] ] ] }, { "qid": "1d923b8a2778448a9716", "term": "Cactus", "description": "Family of mostly succulent plants, adapted to dry environments", "question": "Should cactus soil always be damp?", "answer": false, "facts": [ "The amount of water a cactus needs depends on the weather and season.", "When the weather isn't hot, a cactus needs very little water." ], "decomposition": [ "What are the factors that determine the amount of water a cactus needs?", "Are #1 always constant?" ], "evidence": [ [ [ [ "Cactus-2" ] ], [ [ "Cactus-2" ] ] ], [ [ [ "Cactus-77" ] ], [ "operation" ] ], [ [ [ "Cactus-77" ] ], [ [ "Cactus-77" ] ] ] ] }, { "qid": "af2d3c137bd3f5230012", "term": "United States Department of Education", "description": "United States government department", "question": "Does the United States Department of Education oversee services benefiting undocumented migrants? ", "answer": true, "facts": [ "The United States Department of Education oversees public education across the United States.", "Public education is a service.", "Public education services are given to students of migrant families that may be undocumented." ], "decomposition": [ "Which service does the United States Department of Education oversee?", "Which services could children from undocumented migrant families benefit from?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "United States Department of Education-3", "United States Department of Education-4" ] ], [ [ "Office of Migrant Education-1" ] ], [ "operation" ] ], [ [ [ "United States Department of Education-3" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "United States Department of Education-3" ] ], [ [ "Office of Migrant Education-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "8ab34c769a8b1209b86f", "term": "Pea", "description": "species of plant", "question": "Does Soylent use Pea for their source of protein? ", "answer": false, "facts": [ "Soylent is a meal replacement drink that offers 20mg protein.", "The protein in Soylent is derived from Soy." ], "decomposition": [ "What type of protein does Soylent use?", "Is #1 the same as pea protein?" ], "evidence": [ [ [ [ "Soylent (meal replacement)-1", "Soylent (meal replacement)-16" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Soylent (meal replacement)-3" ] ], [ [ "Pea-10" ], "operation" ] ], [ [ [ "Soylent (meal replacement)-3" ] ], [ "operation" ] ] ] }, { "qid": "643a517d0d0f0d2944a3", "term": "Prime number", "description": "Integer greater than 1 that has no positive integer divisors other than itself and 1", "question": "Are Brian Cranston and Saoirse Ronan's combined Emmy Awards a prime number?", "answer": false, "facts": [ "Brian Cranston has won 6 Emmy Awards.", "Saoirse Ronan has won 0 Emmy awards.", "6 is divisible by the following numbers: 1,2,3, and 6." ], "decomposition": [ "How many Emmy Awards has Brian Cranston won?", "How many Emmy Awards has Saoirse Ronan won?", "What is #1 plus #2?", "Is #3 not evenly divisible by any other number than 1 and #3?" ], "evidence": [ [ [ [ "Bryan Cranston-2" ] ], [ [ "Saoirse Ronan-1" ] ], [ "operation" ], [ [ "Composite number-4" ], "operation" ] ], [ [ [ "Bryan Cranston-27" ] ], [ [ "Saoirse Ronan-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Bryan Cranston-12" ] ], [ [ "Saoirse Ronan-1" ], "no_evidence" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "34630e83060d07bec4e0", "term": "Plastic", "description": "material of a wide range of synthetic or semi-synthetic organic solids", "question": "Do beeswax alternatives to cling wrap use plsatic?", "answer": false, "facts": [ "Beeswax food wrapping is typically made of two ingredients.", "Beeswax food wrap is composed of cotton fabric and beeswax.", "Neither cotton nor beeswax contains plastic." ], "decomposition": [ "What are the components of Beeswax food wrap?", "Do any among #1 contain plastic?" ], "evidence": [ [ [ [ "Beeswax wrap-1" ] ], [ "operation" ] ], [ [ [ "Beeswax wrap-1" ] ], [ "operation" ] ], [ [ [ "Beeswax wrap-8" ] ], [ "operation" ] ] ] }, { "qid": "d8ef42c2c54f93d5eb78", "term": "Strawberry", "description": "edible fruit", "question": "Would an owl monkey enjoy a strawberry?", "answer": true, "facts": [ "Owl monkeys are frugivores, and they prefer small, ripe fruit when available.", "Strawberries vary in size but are generally under 2 inches across and an inch in diameter.", "Strawberries are a kind of fruit." ], "decomposition": [ "What food group does an owl monkey's diet mainly consist of?", "Is a strawberry a #1?" ], "evidence": [ [ [ [ "Night monkey-8" ] ], [ [ "Strawberry-1" ] ] ], [ [ [ "Night monkey-8" ] ], [ [ "Strawberry-1" ] ] ], [ [ [ "Night monkey-1", "Night monkey-8" ] ], [ [ "Strawberry-1" ] ] ] ] }, { "qid": "6d2b70f4dd9eec8ef932", "term": "Doctorate", "description": "academic or professional degree", "question": "Should you be skeptical of a 21 year old claiming to have a doctorate?", "answer": true, "facts": [ "The average age that someone gets their doctorate at is 33. ", "A doctorate takes an average of 8.5 years." ], "decomposition": [ "What is the average age at which people get their doctorate?", "Is 21 very much less than #1 ?" ], "evidence": [ [ [ [ "Graduate science education in the United States-6" ] ], [ "operation" ] ], [ [ [ "Doctorate-1", "Graduate science education in the United States-6" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Doctorate-1", "Doctorate-18" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "24a3aff40ef7fd04e208", "term": "Richard Dawkins", "description": "English ethologist, evolutionary biologist and author", "question": "Would Richard Dawkins hypothetically refuse an offering of the Last rites?", "answer": true, "facts": [ "Richard Dawkins is known as an outspoken atheist, well known for his criticism of creationism and intelligent design.", "The Last rites, in Catholicism, are the last prayers and ministrations given to an individual of the faith, when possible, shortly before death." ], "decomposition": [ "Which religious beliefs are the Last rites affiliated with?", "What was Richard Dawkins' belief regarding religion?", "Would a #2 refuse to practice #1?" ], "evidence": [ [ [ [ "Last rites-1" ] ], [ [ "Richard Dawkins-31", "Richard Dawkins-32" ] ], [ "operation" ] ], [ [ [ "Last rites-1" ] ], [ [ "Richard Dawkins-3" ] ], [ [ "Atheism-1", "Richard Dawkins-24" ] ] ], [ [ [ "Last rites-1" ] ], [ [ "Richard Dawkins-3" ] ], [ "operation" ] ] ] }, { "qid": "9332b9062a1b5c8109b0", "term": "Christopher Columbus", "description": "Italian explorer, navigator, and colonizer", "question": "Did Christopher Columbus condone multiple deadly sins?", "answer": true, "facts": [ "The seven deadly sins are: pride, greed, wrath, envy, lust, gluttony, and sloth.", "Under Columbus, every native of fourteen years of age or upward was to pay a large hawk's bell of gold dust or cotton and those who could not pay were punished.", " in just two years under Columbus's governorship, over 125,000 of the 250,000–300,000 natives in Haiti were dead." ], "decomposition": [ "What are the deadly sins?", "What were Christopher Columbus's actions in the New World?", "Did #2 include more than one of #1?" ], "evidence": [ [ [ [ "Seven deadly sins-1" ] ], [ [ "Christopher Columbus-56", "Christopher Columbus-68" ] ], [ "operation" ] ], [ [ [ "Seven deadly sins-1" ] ], [ [ "Christopher Columbus-93" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Seven deadly sins-1" ] ], [ [ "Christopher Columbus-43", "Christopher Columbus-98" ] ], [ "operation" ] ] ] }, { "qid": "6d124284e4c5dbc62e9d", "term": "Disneyland Paris", "description": "Theme park resort in France owned by The Walt Disney Company", "question": "Would an American feel lost due to language barriers at Disneyland Paris?", "answer": false, "facts": [ "All Disneyland Paris cast members are required to know and speak English.", "Travelers from England go to Disneyland Paris often without issue." ], "decomposition": [ "What language do Americans mainly speak?", "At Disneyland Paris, what languages are workers required to know?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Americans-34" ] ], [ [ "Disneyland Paris-15" ] ], [ "operation" ] ], [ [ [ "United States-80" ] ], [ [ "Disneyland Paris-11" ] ], [ "operation" ] ], [ [ [ "American English-2" ] ], [ [ "Disneyland Paris-11", "Disneyland Paris-15" ] ], [ "operation" ] ] ] }, { "qid": "19c069cf9329b5dc7764", "term": "Snake", "description": "limbless, scaly, elongate reptile", "question": "Can a snake swallow an M60 Patton?", "answer": false, "facts": [ "An M60 Patton is an army tank that weighs several tons.", "One of the largest animals a snake ate was an impala that weighed 130 pounds." ], "decomposition": [ "What is the largest animal that a snack has ever swallowed?", "How much does #1 weigh?", "How much does a M60 Patton weigh?", "Is #3 less than #2?" ], "evidence": [ [ [ [ "Reticulated python-26", "Reticulated python-27" ] ], [ [ "Reticulated python-22" ] ], [ [ "M60 tank-64" ] ], [ "operation" ] ], [ [ [ "African rock python-2" ], "no_evidence" ], [ [ "Antelope-12", "Crocodile-9" ], "no_evidence" ], [ [ "M60 tank-64" ] ], [ "operation" ] ], [ [ [ "Snake-1" ], "no_evidence" ], [ "no_evidence" ], [ [ "M60 tank-64" ] ], [ "operation" ] ] ] }, { "qid": "9a923e94373f79fa80e5", "term": "Celery", "description": "species of plant", "question": "Could you make the kitchen 'holy trinity' without celery?", "answer": false, "facts": [ "The 'Holy Trinity' in cooking is a base used for soups, stews, and more.", "The ingredients of the Holy Trinity base are onions, bell peppers, and celery." ], "decomposition": [ "What ingredients are part of the culinary holy trinity?", "Is celery absent from #1?" ], "evidence": [ [ [ [ "Holy trinity (cuisine)-3" ] ], [ [ "Holy trinity (cuisine)-3" ], "operation" ] ], [ [ [ "Holy trinity (cuisine)-1" ] ], [ "operation" ] ], [ [ [ "Holy trinity (cuisine)-1" ] ], [ "operation" ] ] ] }, { "qid": "dd31908b73e958cfd678", "term": "Rurouni Kenshin", "description": "1994 Japanese manga series written and illustrated by Nobuhiro Watsuki", "question": "Is Rurouni Kenshin from same country as lead character in Nobunaga's Ambition?", "answer": true, "facts": [ "Rurouni Kenshin is a manga series that comes from Japan.", "Nobunaga's Ambition is a video game series based on the experiences of Oda Nobunaga.", "Oda Nobunaga was a Japanese feudal lord." ], "decomposition": [ "Where is Rurouni Kenshin from?", "Where was Oda Nobunaga from?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Rurouni Kenshin-1" ] ], [ [ "Oda Nobunaga-1" ] ], [ "operation" ] ], [ [ [ "Rurouni Kenshin-1" ] ], [ [ "Nobunaga's Ambition-1", "Oda Nobunaga-4" ] ], [ "operation" ] ], [ [ [ "Rurouni Kenshin-1" ] ], [ [ "Oda Nobunaga-1" ] ], [ "operation" ] ] ] }, { "qid": "85a77e70dd0e86d50995", "term": "Aldi", "description": "Germany-based supermarket chain", "question": "Should you bring your own bags to Aldi?", "answer": true, "facts": [ "Unlike most grocery stores, Aldi charges customers for use of paper bags.", "Aldi does not supply shopping carts without a deposit, so shopping bags are a good alternative." ], "decomposition": [ "In US Aldi stores, how do customers get shopping bags?", "How do customers get shopping carts?", "Do #1 and #2 cost money or value?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Aldi-23" ], "no_evidence" ], [ [ "Aldi-23" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Aldi-1" ], "no_evidence" ], [ [ "Aldi-32" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2075b087e620fb920439", "term": "Conan O'Brien", "description": "American television show host and comedian", "question": "Would most children be up past their bedtime if they were watching Conan O'Brien?", "answer": true, "facts": [ "Conan O'Brien airs at 11 PM. ", "It is recommended that children are in bed before 10PM." ], "decomposition": [ "When does Conan O' Brian air?", "What is the recommended bedtime for children?", "Does #1 occur after #2?" ], "evidence": [ [ [ [ "Conan (talk show)-1" ] ], [ "no_evidence" ], [ [ "Conan (talk show)-1" ] ] ], [ [ [ "Conan O'Brien-34" ] ], [ [ "Bedtime-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Conan (talk show)-1" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "4f8054d068f6d12cd1ad", "term": "Stroke", "description": "Medical condition where poor blood flow to the brain causes cell death", "question": "Did Dale Jr.'s father crash his car due to a stroke?", "answer": false, "facts": [ "Dale Earnhardt Jr. is his late father's namesake.", "Dale Earnhardt died in a crash during a NASCAR race. ", "Dale Earnhardt's car spun out of control after it tapped the car of another driver.", "Dale Earnhardt's death was a Basilar skull fracture." ], "decomposition": [ "Who was Dale Jr's father?", "What was the cause of the car crash that killed #1?", "Is #2 a stroke?" ], "evidence": [ [ [ [ "Dale Earnhardt Jr.-4" ] ], [ [ "Dale Earnhardt-23" ] ], [ [ "Dale Earnhardt-23" ], "operation" ] ], [ [ [ "Dale Earnhardt Jr.-1" ] ], [ [ "Dale Earnhardt-23" ] ], [ "operation" ] ], [ [ [ "Dale Earnhardt Jr.-4" ] ], [ [ "Dale Earnhardt-23" ] ], [ "operation" ] ] ] }, { "qid": "076ec1a4009a6e453c69", "term": "Governor of New Jersey", "description": "head of state and of government of the U.S. state of New Jersey", "question": "Was latest Republican governor of New Jersey as of 2020 heftiest politician ever?", "answer": false, "facts": [ "Chris Christie was the latest Republican governor of New Jersey as of 2020.", "Chris Christie weighed around 322 pounds.", "President William Howard Taft weighed between 335 and 350 pounds." ], "decomposition": [ "Who was the latest Republican governor of New Jersey as of 2020?", "How much does #1 weigh?", "How much did President William Howard Taft weigh?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Chris Christie-1", "Chris Christie-4" ] ], [ "no_evidence" ], [ [ "William Howard Taft-107" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Chris Christie-1" ] ], [ [ "Chris Christie-101" ], "no_evidence" ], [ [ "William Howard Taft-107" ] ], [ "operation" ] ], [ [ [ "Chris Christie-3" ] ], [ [ "Chris Christie-123" ], "no_evidence" ], [ [ "William Howard Taft-107" ] ], [ "no_evidence" ] ] ] }, { "qid": "92087ad2756a238bad74", "term": "Fever", "description": "common medical sign characterized by elevated body temperature", "question": "Will a person survive a fever of NY's highest recorded temperature?", "answer": false, "facts": [ "The highest recorded temperature in NY was 108 degrees Fahrenheit.", "A temperature of 104 degrees Fahrenheit is life threatening and requires immediate medical attention." ], "decomposition": [ "What was NY's highest recorded temperature?", "Above what temperature will a fever become life-threatening?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Climate of New York-7" ] ], [ [ "Fever-1" ] ], [ "operation" ] ], [ [ [ "Climate of New York-7" ] ], [ [ "Human body temperature-35" ] ], [ "operation" ] ], [ [ [ "New York City-62" ] ], [ [ "Fever-1" ] ], [ "operation" ] ] ] }, { "qid": "250bb60b384ff1d91891", "term": "Glucose", "description": "A simple form of sugar", "question": "4 Krispy Kreme glazed doughnuts exceed AHA daily sugar allowance?", "answer": true, "facts": [ "Glucose is a form of sugar that humans need in order to live.", "The AHA (American Heart Association) recommends no more than 38g of sugar a day.", "One Krispy Kreme glazed doughnut has 10g of sugar." ], "decomposition": [ "What does the AHA recommend as the maximum amount of sugar a day?", "How much sugar is in a Krispy Kreme glazed doghnut?", "What is #2 multiplied by 4?", "Is #3 greater than #1?" ], "evidence": [ [ [ [ "Healthy diet-8" ] ], [ [ "Doughnut-1", "Junk food-6" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "American Heart Association-19" ], "no_evidence" ], [ [ "Doughnut-3" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Sugar-54" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e8b0130e3da885c6a5ab", "term": "Immanuel Kant", "description": "Prussian philosopher", "question": "Did Immanuel Kant ever meet the 14th president of the United States?", "answer": false, "facts": [ "Immanuel Kant died on Feb 12, 1804", "Franklin Pierce was the 14th president of the United States", "Franklin PIerce was born Nov 23, 1804" ], "decomposition": [ "On what date did Immanuel Kant die?", "Who was the 14th president of the United States?", "On what date was #2 born?", "Is #3 before #1?" ], "evidence": [ [ [ [ "Immanuel Kant-1" ] ], [ [ "Franklin Pierce-1" ] ], [ [ "Franklin Pierce-1" ] ], [ "operation" ] ], [ [ [ "Immanuel Kant-1" ] ], [ [ "Franklin Pierce-1" ] ], [ [ "Franklin Pierce-1" ] ], [ "operation" ] ], [ [ [ "Immanuel Kant-1" ] ], [ [ "Franklin Pierce-1" ] ], [ [ "Franklin Pierce-1" ] ], [ "operation" ] ] ] }, { "qid": "542b24d74ac340348171", "term": "Camel", "description": "Genus of mammals", "question": "Could a camel fit in a dog house?", "answer": false, "facts": [ "Camels are approximately 5.5 to 6 feet tall.", "The largest dog ever was 3'8\" tall.", "Dog houses are built to fit dogs." ], "decomposition": [ "How large are camels?", "How large is a dog house?", "Is #1 less than or equal to #2?" ], "evidence": [ [ [ [ "Camel-4" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Camel-4" ] ], [ [ "Dog-9" ] ], [ [ "Camel-4", "Dog-9" ], "operation" ] ], [ [ [ "Camel-4" ] ], [ [ "Doghouse-1", "Great Dane-10" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "1be5f9fda680e4e07f7c", "term": "Durian", "description": "genus of plants", "question": "Would Columbus have discovered Durian trees during his 1492 expedition?", "answer": false, "facts": [ "Columbus ended up in the Americas", "Durian only exists in Southeast Asia" ], "decomposition": [ "Which country did Columbus discover on his 1492 experdition?", "Which countries could you find Durian on?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Voyages of Christopher Columbus-1" ] ], [ [ "Durian-1" ] ], [ "operation" ] ], [ [ [ "Voyages of Christopher Columbus-1" ] ], [ [ "Durian-1" ] ], [ [ "Borneo-1", "Voyages of Christopher Columbus-27" ], "operation" ] ], [ [ [ "Christopher Columbus-2" ] ], [ [ "Borneo-2", "Durian-21", "Durian-22", "Durian-23" ] ], [ "operation" ] ] ] }, { "qid": "03f66770e93cf733da9b", "term": "Chief Justice of the United States", "description": "Presiding judge of the U.S. Supreme Court", "question": "Is the current Chief Justice of the United States forbidden from buying alcohol?", "answer": false, "facts": [ "The current Chief Justice of the United States is John Roberts.", "John Roberts is 65 years old.", "You have to be at least 21 years old to purchase alcohol in the United States." ], "decomposition": [ "How old do you have to be to buy alcohol legally in the United States?", "How old is John Roberts?", "Is #2 larger than #1?" ], "evidence": [ [ [ [ "Legal drinking age-6" ] ], [ [ "John Roberts-1" ] ], [ "operation" ] ], [ [ [ "National Minimum Drinking Age Act-6" ] ], [ [ "John Roberts-1" ] ], [ "operation" ] ], [ [ [ "Legal drinking age-6" ] ], [ [ "John Roberts-1" ] ], [ "operation" ] ] ] }, { "qid": "1463354ede2204c8a171", "term": "Mexican Revolution", "description": "major nationwide armed struggle in Mexico between 1910 and 1920", "question": "Could Barron Trump have lived through the Mexican Revolution?", "answer": true, "facts": [ "The Mexican Revolution took place over a period of ten years", "Barron Trump is 14 years old" ], "decomposition": [ "How long did the Mexican Revolution last?", "How old has Barron Trump already lived as of 2020?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Mexican Revolution-1" ] ], [ [ "Family of Donald Trump-11" ] ], [ "operation" ] ], [ [ [ "Mexican Revolution-1" ] ], [ [ "Family of Donald Trump-11" ] ], [ "operation" ] ], [ [ [ "Mexican Revolution-1" ] ], [ [ "Family of Donald Trump-11" ] ], [ "operation" ] ] ] }, { "qid": "e1f10b57579fa6a92aa9", "term": "Martin Luther", "description": "Saxon priest, monk and theologian, seminal figure in Protestant Reformation", "question": "Did Martin Luther believe in Satan?", "answer": true, "facts": [ "Martin Luther was a Protestant.", "Satan is also known as the devil.", "Protestants traditionally have believed in the devil as a being. " ], "decomposition": [ "What religion was Martin Luther?", "Do #1's believe in the existence of a non-human evil being (Satan, Beelzebub, the devil, etc)?" ], "evidence": [ [ [ [ "Martin Luther-1" ] ], [ [ "Antichrist-1" ], "no_evidence", "operation" ] ], [ [ [ "Martin Luther-12" ] ], [ [ "Augustinians-1", "Devil-9" ], "operation" ] ], [ [ [ "Martin Luther-111" ] ], [ [ "Satan-32" ] ] ] ] }, { "qid": "d313600f79f3713c9809", "term": "Snoopy", "description": "cartoon dog", "question": "Does Snoopy look like Chance from Homeward Bound?", "answer": false, "facts": [ "Chance from Homeward Bound is a golden retriever. ", "Snoopy is black and white.", "Golden Retrievers are yellow in color." ], "decomposition": [ "What kind of animal is Chance from Homeward Bound?", "What color is Snoopy?", "What color is #1 typically?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Homeward Bound: The Incredible Journey-2" ] ], [ [ "Snoopy-1" ], "no_evidence" ], [ [ "American Bulldog-7" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Homeward Bound: The Incredible Journey-2" ] ], [ [ "Snoopy-1" ], "no_evidence" ], [ [ "American Bulldog-7" ] ], [ "operation" ] ], [ [ [ "Homeward Bound: The Incredible Journey-2" ] ], [ [ "Snoopy-2" ] ], [ [ "American Bulldog-7" ] ], [ "operation" ] ] ] }, { "qid": "3c8d24e1b6e89a1534bb", "term": "Canidae", "description": "family of mammals", "question": "Were any members of Canidae in Aesop's Fables?", "answer": true, "facts": [ "Canidae is a family of mammals that includes dogs, foxes, and coyotes.", "Aesop's Fables was a collection of stories with animals as the main characters.", "One of the most famous stories involves a fox and a lion." ], "decomposition": [ "Which animals were typical characters in Aesop's Fables?", "Do any of #1 belong to the family Canidae?" ], "evidence": [ [ [ [ "Aesop's Fables-53" ] ], [ [ "Canidae-1" ], "operation" ] ], [ [ [ "Aesop-19", "The Boy Who Cried Wolf-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Aesop's Fables-65" ] ], [ [ "Canidae-1", "Dog-1" ], "operation" ] ] ] }, { "qid": "cdc5da16ca1bb2a4edf7", "term": "United States Air Force", "description": "Air and space warfare branch of the United States Armed Forces", "question": "Would United States Air Force consider Return of the Jedi's Han Solo bad hypothetical candidate?", "answer": true, "facts": [ "Han Solo is an ace pilot ally in the Star Wars universe.", "The US Air Force requires candidates to be between 18 and 35 years old.", "Return of the Jedi's Han Solo is 36 years of age.", "The US Air Force requires a candidate to be an American citizen.", "Han Solo is from the planet Corellia in a galaxy far, far, away." ], "decomposition": [ "What requirements does the US Air Force demand of potential candidates?", "What are the characteristics of character Han Solo as featured in Return of the Jedi?", "Does #2 fail to satisfy all of #1?" ], "evidence": [ [ [ [ "United States Air Force-60" ], "no_evidence" ], [ [ "Han Solo-12" ] ], [ "no_evidence" ] ], [ [ [ "United States Air Force Basic Military Training-34" ] ], [ [ "Han Solo-36" ] ], [ "operation" ] ], [ [ [ "United States Air Force Fitness Assessment-1" ], "no_evidence" ], [ [ "Han Solo-11", "Han Solo-12" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e32529b0074a1b857a85", "term": "DC Comics", "description": "U.S. comic book publisher", "question": "Would Avengers Comics be out of place in a DC Comics store?", "answer": true, "facts": [ "The Avengers are a comic produced by Marvel.", "Marvel and DC are rival companies, each having their own line of products and merchandise. " ], "decomposition": [ "Who produces the Avengers Comics?", "Are #1 and DC Comics rival companies?" ], "evidence": [ [ [ [ "Marvel Avengers Alliance-15" ] ], [ [ "DC vs. Marvel-7" ] ] ], [ [ [ "Avengers (comics)-1" ] ], [ [ "DC Comics-18", "Marvel Comics-17" ], "operation" ] ], [ [ [ "Avengers (comics)-1" ] ], [ [ "Marvel Comics-25" ] ] ] ] }, { "qid": "0024b8ff404e3b5f5c3b", "term": "Easter", "description": "Major Christian festival celebrating the resurrection of Jesus", "question": "Would Jesus understand the Easter Bunny?", "answer": false, "facts": [ "During the time of Jesus, Easter was not a holiday yet.", "Rabbits were not of any profound significance to Jesus." ], "decomposition": [ "When did Easter become a holiday?", "In what year did Jesus die?", "Did #1 occur before #2?" ], "evidence": [ [ [ [ "Easter-1" ] ], [ [ "Jesus-1" ] ], [ "operation" ] ], [ [ [ "Easter-10" ] ], [ [ "Jesus-1" ] ], [ "operation" ] ], [ [ [ "Easter-1" ], "no_evidence" ], [ [ "English festivals-15" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3faba436f8387f1b7567", "term": "Greek alphabet", "description": "script that has been used to write the Greek language", "question": "Is the Greek alphabet as common as Sumerian cuneiform?", "answer": false, "facts": [ "The Greek alphabet is still commonly used", "Sumerian cuneiform is not used contemporarily " ], "decomposition": [ "Does the Greek Alphabet still have widespread present-day use/application?", "Does the Sumerian cuneiform still have widespread present-day use/application?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Greek alphabet-41" ], "no_evidence" ], [ [ "Cuneiform-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Greek language-1", "Greek language-3" ] ], [ [ "Cuneiform-5" ] ], [ "operation" ] ], [ [ [ "Greek alphabet-35" ] ], [ [ "Cuneiform-49" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c7b343171ca9bce49241", "term": "Sainsbury's", "description": "chain of supermarkets in the United Kingdom", "question": "Could Sainsbury's buy Tesco?", "answer": false, "facts": [ "Sainsbury is a business worth £29.007 billion in 2019.", "Tesco is a business worth £63.911 billion in 2019.", "63 billion is more than 29 billion.", "A business needs to have enough revenue to buy another business." ], "decomposition": [ "What is the total value of Sainsbury's?", "What is the total value of Tesco?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Sainsbury's-3" ], "no_evidence" ], [ [ "Tesco-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sainsbury's-1", "Sainsbury's-56" ], "no_evidence" ], [ [ "Tesco-5" ] ], [ "operation" ] ], [ [ [ "Sainsbury's-1" ], "no_evidence" ], [ [ "Tesco-5" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "e797c2d7147c9e0d5c8d", "term": "Twin", "description": "One of two offspring produced in the same pregnancy. Use with P31 on items for one twin", "question": "Are all twins the same gender?", "answer": false, "facts": [ "Identical twins are always the same gender.", "However, identical twins are very rare. Most twin cases are formed from two different fertilizations during the same conception event.", "Non-identical twins can be opposite gender or same gender." ], "decomposition": [ "Which kind of twins are usually of the same gender?", "Are there other kind(s) of twins apart from #1?", "Are #2 also usually of the same gender?" ], "evidence": [ [ [ [ "Twin-20" ] ], [ [ "Twin-9" ] ], [ [ "Twin-11" ] ] ], [ [ [ "Twin-14" ], "no_evidence" ], [ [ "Twin-9" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Twin-9" ], "no_evidence" ], [ [ "Twin-14", "Twin-59" ] ], [ "operation" ] ] ] }, { "qid": "baf402d780174b669286", "term": "Rosemary", "description": "species of plant, rosemary", "question": "Are looks the easiest way to tell rosemary from lavender? ", "answer": false, "facts": [ "Before blooming, lavender and rosemary look remarkably similar.", "Rosemary has a pine-like scent.", "Lavender has a lighter, more floral scent." ], "decomposition": [ "What does rosemary look like?", "What does lavender look like?", "Are there significant differences between #1 and #2?" ], "evidence": [ [ [ [ "Rosemary-1" ] ], [ [ "Lavandula-5" ] ], [ "operation" ] ], [ [ [ "Rosemary-1" ] ], [ [ "Lavandula-5" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Rosemary-1" ] ], [ [ "Lavandula-26" ] ], [ "operation" ] ] ] }, { "qid": "ccb62fcd2b202d737f79", "term": "Nicole Kidman", "description": "Australian-American actress and film producer", "question": "Does Nicole Kidman despise Roman Josi?", "answer": false, "facts": [ "Nicole Kidman supports the Nashville Predators and has been photographed almost nightly throughout the season.", "Roman Josi is a Swiss professional ice hockey defenceman who currently serves as captain of the Nashville Predators." ], "decomposition": [ "Does Nicole Kidman hate the Nashville Predators players?", "Does Roman Josi play for the Nashville Predators?", "Is #2 the same answer as #1?" ], "evidence": [ [ [ [ "Nicole Kidman-42" ], "operation" ], [ [ "Roman Josi-1" ] ], [ "operation" ] ], [ [ [ "Nicole Kidman-42" ] ], [ [ "Roman Josi-1" ] ], [ "operation" ] ], [ [ [ "Nicole Kidman-42" ] ], [ [ "Roman Josi-1" ] ], [ "operation" ] ] ] }, { "qid": "0d22526961c82ad6ef4a", "term": "Fairy", "description": "mythical being or legendary creature", "question": "Is a fairy more prevalent in world myths than a valkyrie?", "answer": true, "facts": [ "Valkyries are female figures that choose heroes to bring to Valhalla.", "Valkyries are exclusive to Norse mythology.", "A fairy is a mystical magical being that can be found in Celtic, Slavic, German, English, and French folklore." ], "decomposition": [ "In what myths do the Valkyries appear?", "Do fairies appear in more myths than #1?" ], "evidence": [ [ [ [ "Valkyrie-1" ] ], [ [ "Fairy-12" ] ] ], [ [ [ "Valkyrie-2" ], "no_evidence" ], [ [ "Fairy-2" ], "no_evidence", "operation" ] ], [ [ [ "Valkyrie-1" ] ], [ [ "Fairyland-1", "Fairyland-3" ] ] ] ] }, { "qid": "4589898e4852e5389728", "term": "YMCA", "description": "Worldwide organization founded in 1844 on principles of muscular Christianity", "question": "Can you get Raclette in YMCA headquarters city?", "answer": true, "facts": [ "YMCA is headquartered in Geneva, Switzerland.", "Raclette is a melted cheese and potato dish.", "Raclette is one of several foods Geneva, Switzerland is famous for." ], "decomposition": [ "Where is the YMCA headquartered?", "What foods is #1 famous for?", "Is raclette in #2?" ], "evidence": [ [ [ [ "YMCA-1" ] ], [ [ "Swiss Cheese Union-9" ], "no_evidence" ], [ [ "Raclette-2" ], "operation" ] ], [ [ [ "YMCA-1" ] ], [ [ "Swiss cuisine-2" ] ], [ "operation" ] ], [ [ [ "YMCA-53" ] ], [ [ "Lincoln Park, Chicago-37" ] ], [ [ "Raclette-1" ], "operation" ] ] ] }, { "qid": "32381baac740543ecde4", "term": "Asteroid", "description": "Minor planet that is not a comet", "question": "Can I build a house on an asteroid?", "answer": false, "facts": [ "Building a house requires gravity to hold the house to the surface.", "The largest asteroid in our Solar System is Ceres, 583 miles across.", "Asteroids are not large enough to create noticeable gravity." ], "decomposition": [ "What do you need to hold a house to the surface of an asteroid?", "Are asteroids large enough to produce #1?" ], "evidence": [ [ [ [ "Gravity-1" ], "no_evidence" ], [ [ "Colonization of the asteroids-5" ], "operation" ] ], [ [ [ "Gravity-1" ] ], [ [ "Asteroid-48" ], "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "92c263f997f09dbb74a8", "term": "Maya Angelou", "description": "American poet, author, and civil rights activist", "question": "Would someone in CHE101 require a Maya Angelou book?", "answer": false, "facts": [ "CHE101 is short for Chemistry 101, a basic college class.", "Maya Angelou's writings are suited for classes in history and literature. " ], "decomposition": [ "What class is CHE101", "Are Maya Angelou books suitable for #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Maya Angelou-1" ], "no_evidence" ] ], [ [ [ "General chemistry-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Maya Angelou-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "fa7b5c8e022bb8bec2d6", "term": "Diarrhea", "description": "Loose or liquid bowel movements", "question": "Can too many oranges cause diarrhea?", "answer": true, "facts": [ "Oranges are very high in fiber and sugar.", "Too much fiber can cause diarrhea." ], "decomposition": [ "What high-level nutritional values do oranges have?", "Can excess of any of #1 cause diarrhea?" ], "evidence": [ [ [ [ "Mandarin orange-12" ] ], [ [ "Vitamin C-21" ], "operation" ] ], [ [ [ "Orange (fruit)-40", "Orange (fruit)-41" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Orange (fruit)-20" ] ], [ [ "Vitamin C-21" ], "operation" ] ] ] }, { "qid": "b200ff0fb5d8380edb14", "term": "Europa (moon)", "description": "The smallest of the four Galilean moons of Jupiter", "question": "Could the surface of Europa fry an egg?", "answer": false, "facts": [ "Europa is known for having an icy surface.", "For an egg to become firm, the ground must be at least 158 degrees Fahrenheit. ", "Ice forms at 32 degrees Fahrenheit.", "Europa's temperatures are all in the negatives on the Fahrenheit scale." ], "decomposition": [ "At what temperature will an egg become fried?", "What is the temperature on the surface of Europa?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Egg as food-28" ] ], [ [ "Europa (moon)-22" ] ], [ "operation" ] ], [ [ [ "Frying-3" ], "no_evidence" ], [ [ "Europa (moon)-22" ] ], [ "operation" ] ], [ [ [ "Boiled egg-4" ], "no_evidence" ], [ [ "Europa (moon)-22" ] ], [ "operation" ] ] ] }, { "qid": "a6be49211a3ea2956f2e", "term": "August", "description": "eighth month in the Julian and Gregorian calendars", "question": "Can I ski in Steamboat Springs, Colorado in August?", "answer": false, "facts": [ "Skiing requires snow. ", "Snow melts at temperatures higher than 0 degrees Celsius. ", "Average temperature for Steamboat Springs, Colorado in August is 27.3 degrees Celsius." ], "decomposition": [ "What is the average temperature in Steamboat Springs, CO in August?", "What is the melting point of snow?", "Is #1 lower than #2?" ], "evidence": [ [ [ [ "Steamboat Springs, Colorado-17" ], "no_evidence" ], [ [ "Melting point-3" ] ], [ [ "Frost (temperature)-1" ], "operation" ] ], [ [ [ "Steamboat Springs, Colorado-17" ] ], [ [ "Melting point-3" ] ], [ "operation" ] ], [ [ [ "Steamboat Springs, Colorado-17" ], "no_evidence" ], [ [ "Water-95" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "519de84458db613f0364", "term": "Dopamine", "description": "chemical compound", "question": "Is dopamine snorted nasally by drug users?", "answer": false, "facts": [ "Dopamine is a hormone and a neurotransmitter.", "Neurotransmitters are produced endogenously by the body and are not consumed externally." ], "decomposition": [ "What kind of substance is dopamine?", "Are #1 usually taken through the nose by drug users?" ], "evidence": [ [ [ [ "Dopamine-6" ] ], [ [ "Dopamine-15" ] ] ], [ [ [ "Dopamine-1" ] ], [ "operation" ] ], [ [ [ "Dopamine-1" ] ], [ "operation" ] ] ] }, { "qid": "05e9cdc44f1b066badd7", "term": "Moustache", "description": "Facial hair grown on the upper lip", "question": "Is it common for women to have moustaches?", "answer": false, "facts": [ "Facial hair doesn't normally grow on women like it does on men.", "A little bit of hair can grow between the upper lip and nose but it's a very small amount and generally not enough to be noticeable." ], "decomposition": [ "Which gender grows sizable moustaches more commonly?", "Is #1 the same as women?" ], "evidence": [ [ [ [ "Moustache-9" ] ], [ "operation" ] ], [ [ [ "Beard-27" ] ], [ [ "Beard-27" ], "operation" ] ], [ [ [ "Facial hair-2" ] ], [ "operation" ] ] ] }, { "qid": "697789a9ee6a5b2f4e0f", "term": "Michael", "description": "male given name", "question": "Is Michael an unpopular name in the United States?", "answer": false, "facts": [ "More boys were named Michael in the United States than any other name between 1954 and 1998.", "Michael and its foreign variants were within the top 20 names in Canada, Australia, UK, and Europe in the 2010s." ], "decomposition": [ "What are the most popular names in the USA?", "Is Michael absent from #1?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Michael-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "John (given name)-2", "Michael-5", "Richard-2", "Robert-3" ] ], [ "operation" ] ] ] }, { "qid": "ae723d6788b21ccdd2d7", "term": "Central Park Zoo", "description": "Zoo in Central Park, Manhattan, New York City", "question": "Is Central Park Zoo located on an island?", "answer": true, "facts": [ "Central Park Zoo is located in Manhattan.", "Manhattan is an island." ], "decomposition": [ "Where is the Central Park Zoo located?", "Is #1 an island?" ], "evidence": [ [ [ [ "Central Park Zoo-1", "Central Park-1" ] ], [ [ "Manhattan-1" ], "operation" ] ], [ [ [ "Central Park Zoo-4" ] ], [ [ "Central Park-2", "Manhattan-34" ] ] ], [ [ [ "Central Park Zoo-1", "Central Park-1" ] ], [ [ "Manhattan-1" ] ] ] ] }, { "qid": "69f1599823635bfc075b", "term": "Cosmic microwave background", "description": "Universe events since the Big Bang 13.8 billion years ago", "question": "Can food be cooked in the cosmic microwave background?", "answer": false, "facts": [ "The cosmic microwave background is faint electromagnetic radiation in space that is a remnant of the Big Bang.", "Food can be cooked in a microwave oven, but not in the remnants of space radiation." ], "decomposition": [ "What kind of radiation is used in microwave ovens?", "What kind of radiation is produced in the cosmic microwave background?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Microwave oven-1" ] ], [ [ "Cosmic microwave background-1" ] ], [ "operation" ] ], [ [ [ "Microwave oven-1" ] ], [ [ "Cosmic microwave background-1" ] ], [ "operation" ] ], [ [ [ "Microwave oven-1" ] ], [ [ "Cosmic microwave background-1" ] ], [ "operation" ] ] ] }, { "qid": "81af1391cbdbf67be3f7", "term": "Parachuting", "description": "action sport of exiting an aircraft and returning to Earth using a parachute", "question": "Is coal needed to practice parachuting?", "answer": true, "facts": [ "Parachuting requires a parachute.", "Parachutes are made from nylon.", "Nylon is made from coal. " ], "decomposition": [ "What is one of the most important items that you need to go parachuting?", "What is #1 made out of?", "Is #2 originally made from coal?" ], "evidence": [ [ [ [ "Parachute-1" ] ], [ [ "Nylon riots-3" ] ], [ [ "Nylon-21" ] ] ], [ [ [ "Parachute-1" ] ], [ [ "Gerard Bérchet-2" ] ], [ [ "Nylon-16" ] ] ], [ [ [ "Parachute-1" ] ], [ [ "Parachute-1" ] ], [ [ "Nylon-16" ], "operation" ] ] ] }, { "qid": "5db17a562505d84c0c74", "term": "Cactus", "description": "Family of mostly succulent plants, adapted to dry environments", "question": "Would an oil painter avoid reds from scale insects that live on a cactus?", "answer": true, "facts": [ "Carmine is the product of an insect that lives on some cacti", "Carmine is not stable in oil paints and its usage has been discontinued", "Carmine is red" ], "decomposition": [ "What red pigments are made from insects?", "What scale insects live on cacti?", "What pigments overlap with #1 and #2?", "Is #3 unstable in oil paint?" ], "evidence": [ [ [ [ "Carmine-1" ] ], [ [ "Cochineal-10" ] ], [ [ "Cochineal-10" ] ], [ [ "Carmine-9" ] ] ], [ [ [ "Cochineal-1" ], "no_evidence" ], [ [ "Scale insect-15" ], "no_evidence" ], [ [ "Carmine-1" ] ], [ [ "Carmine-9" ], "operation" ] ], [ [ [ "Red-60" ] ], [ [ "Cochineal-1", "Opuntia-31" ] ], [ "operation" ], [ [ "Oil paint-18" ], "no_evidence" ] ] ] }, { "qid": "4d20bb8dc217f39ee929", "term": "New York Public Library", "description": "Public library system in New York City", "question": "Could you go to New York Public Library and the Six Flags Great Escape in the same day?", "answer": true, "facts": [ "Six Flags Great Escape is located in Lake George, NY.", "New York Public Library is located in New York City.", "Lake George is 3.5 driving hours from New York City." ], "decomposition": [ "Where is Six Flags Great Escape located?", "Where is The New York Public Library located?", "How long does it take to drive from #1 to #2?", "Is #3 less than 24 hours?" ], "evidence": [ [ [ [ "The Great Escape and Hurricane Harbor-1" ] ], [ [ "New York Public Library-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "The Great Escape and Hurricane Harbor-1" ] ], [ [ "New York Public Library Main Branch-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "The Great Escape and Hurricane Harbor-1" ] ], [ [ "New York Public Library-1" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2cc610b9b07a0ac0f378", "term": "Himalayas", "description": "Mountain range in Asia", "question": "Did any of religions in which Himalayas are sacred originate in 19th century?", "answer": false, "facts": [ "The Himalaya mountains are sacred to three religions: Hinduism, Buddhism, and Jainism.", "Hinduism was first synthesized around 500 BC.", "Jainism began in the 6th century BC.", "Buddhism originated around the 5th century BC." ], "decomposition": [ "Which religions believe that the Himalayas are sacred?", "When did #1 originate?", "Are any of #2 equal to the 19th century?" ], "evidence": [ [ [ [ "Himalayas-36" ], "no_evidence" ], [ [ "Hinduism-1", "Jainism-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Himalayas-40" ] ], [ [ "Buddhism-11", "Hinduism-7", "Jainism-29", "Sikhism-6" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Himalayas-36" ], "no_evidence" ], [ [ "Hindu art-7" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "ec6b6fdf9c72f77dac67", "term": "Goofy", "description": "Disney cartoon character", "question": "Did brother of Goofy creator's employer commit an excommunicable offense?", "answer": true, "facts": [ "Goofy was created by Art Babbitt who worked for Walt Disney.", "Walt Disney's brother, Roy Disney, was a member of the Freemasons.", "The Catholic Church has a litany of offenses that can get someone excommunicated.", "Being initiated to Freemasonry, is listed as an excommunicable offense." ], "decomposition": [ "Who created the character Goofy?", "Who did #1 work for?", "Who are #2's siblings?", "Are any of #3 Freemasons?", "Is #4 a reason for excommunication?" ], "evidence": [ [ [ [ "Goofy-4" ] ], [ [ "Pinto Colvig-8" ] ], [ [ "Roy O. Disney-1" ] ], [ "no_evidence" ], [ [ "Freemasonry-62" ], "operation" ] ], [ [ [ "Goofy-2" ] ], [ [ "Goofy-2", "Goofy-43" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Goofy-4" ], "no_evidence" ], [ [ "Walt Disney-1" ] ], [ [ "Flora Call Disney-4" ], "no_evidence" ], [ "no_evidence" ], [ [ "Papal ban of Freemasonry-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "d9aeae10998c093c0cc9", "term": "Toyota Supra", "description": "A sports car and grand tourer manufactured by Toyota Motor Corporation", "question": "Can a Toyota Supra make a vlog?", "answer": false, "facts": [ "A vlog is a \"video blog\" about one's experience", "A Toyota Supra does not have consciousness to recount any experiences" ], "decomposition": [ "What is a vlog?", "Who makes #1?", "What is a Toyota Supra?", "Is #3 the same as #2?" ], "evidence": [ [ [ [ "Vlog-1" ] ], [ [ "Vlog-20" ] ], [ [ "Toyota Supra-1" ] ], [ "operation" ] ], [ [ [ "Vlog-1" ] ], [ [ "Vlog-14" ] ], [ [ "Toyota Supra-1" ] ], [ "operation" ] ], [ [ [ "Vlog-1" ] ], [ [ "Vlog-2" ] ], [ [ "Toyota Supra-1" ] ], [ "operation" ] ] ] }, { "qid": "18dbd6d87964d2b0a443", "term": "New Testament", "description": "Second division of the Christian biblical canon", "question": "Was Daniel thrown into the lion's den in the New Testament?", "answer": false, "facts": [ "The Book of Daniel is a book in the Old Testament of the Bible.", "The Bible is divided into the Old Testament and the New Testament.", "The New Testament focuses on four Gospels regarding the life of Jesus." ], "decomposition": [ "Which book of the Bible has the story of Daniel in the lions' den?", "Is #1 in the New Testament of the Bible?" ], "evidence": [ [ [ [ "Daniel in the lions' den-1" ] ], [ "operation" ] ], [ [ [ "Book of Daniel-13" ] ], [ [ "Old Testament-16" ], "operation" ] ], [ [ [ "Daniel in the lions' den-1" ] ], [ [ "Book of Daniel-2" ] ] ] ] }, { "qid": "5fd0dc99fdf46de79b6a", "term": "Citrus", "description": "genus of fruit-bearing plants (source of fruit such as lemons and oranges)", "question": "Would someone on antidepressants need to be cautious of some citrus fruits?", "answer": true, "facts": [ "Grapefruit is a citrus fruit.", "Grapefruit can cause some medications to reach unintentionally high levels in the body. ", "SSRI's are a medication type that can be affected by grapefruit." ], "decomposition": [ "Which fruits can affect antidepressant medications?", "Is #1 a citrus fruit?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Antidepressant-30" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Grapefruit–drug interactions-1", "Grapefruit–drug interactions-2" ] ], [ [ "Grapefruit–drug interactions-2" ] ] ] ] }, { "qid": "6faf37c8d91482f1c025", "term": "Diarrhea", "description": "Loose or liquid bowel movements", "question": "Do people take laxatives because they enjoy diarrhea?", "answer": false, "facts": [ "People take laxatives to relieve constipation and associated pain.", "People with eating disorders take laxatives to lose weight." ], "decomposition": [ "What is the primary reason for taking laxatives?", "Is #1 to induce diarrhea?" ], "evidence": [ [ [ [ "Laxative-1" ] ], [ "operation" ] ], [ [ [ "Laxative-1" ] ], [ [ "Laxative-1" ], "operation" ] ], [ [ [ "Laxative-1" ] ], [ [ "Laxative-2" ], "operation" ] ] ] }, { "qid": "cf0a1e2e39a6af9a79a9", "term": "Durian", "description": "genus of plants", "question": "Could Durian cause someone's stomach to feel unwell?", "answer": true, "facts": [ "Durian has a pungent odor that many people describe as being similar to feet and onions.", "Unpleasant smells can make people feel nauseous. " ], "decomposition": [ "What would some people describe durian's smell as?", "Would #1 cause some people to feel unwell?" ], "evidence": [ [ [ [ "Durian-3" ] ], [ [ "Durian-50" ] ] ], [ [ [ "Durian-3" ] ], [ "operation" ] ], [ [ [ "Durian-29" ] ], [ [ "Durian-50" ] ] ] ] }, { "qid": "947a089ce6992869815a", "term": "Swallow", "description": "family of birds", "question": "Did the swallow play a role in a famous film about King Arthur?", "answer": true, "facts": [ "Monty Python and the Holy Grail was a famous film about King Arthur", "In Monty Python and the Holy Grail, swallows are mentioned several times" ], "decomposition": [ "What Monty Python film is about King Arthur?", "Are swallows mentioned several times in #1?" ], "evidence": [ [ [ [ "Monty Python and the Holy Grail-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Monty Python and the Holy Grail-1" ] ], [ [ "Monty Python and the Holy Grail-4" ] ] ], [ [ [ "Monty Python and the Holy Grail-2", "Monty Python and the Holy Grail-4", "Monty Python and the Holy Grail-9" ] ], [ "operation" ] ] ] }, { "qid": "1a65b1ecd37a63767bf7", "term": "Persian Gulf", "description": "An arm of the Indian Ocean in western Asia", "question": "Can the Persian Gulf fit in New Jersey?", "answer": false, "facts": [ "The Persian Gulf has an area of 96,912 square miles.", "New Jersey has a land area of 7,417 square miles." ], "decomposition": [ "How much area does the Persian Gulf cover?", "How much area does New Jersey cover?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Persian Gulf-6" ] ], [ [ "New Jersey-1" ] ], [ "operation" ] ], [ [ [ "Persian Gulf-6" ] ], [ [ "New Jersey-1" ] ], [ "operation" ] ], [ [ [ "Persian Gulf-6" ] ], [ [ "New Jersey-1" ] ], [ "operation" ] ] ] }, { "qid": "e175b012fc9b5db8da3f", "term": "Pan (god)", "description": "Ancient Greek god of the wilds, shepherds, and flocks", "question": "Does the Boy Who Cried Wolf hypothetically have reason to pray to Pan?", "answer": true, "facts": [ "Pan is the ancient Greek god of the wild, shepherds and flocks.", "The Boy Who Cried Wolf, from Aesop's Fables, was a shepherd boy." ], "decomposition": [ "What is the profession of The Boy Who Cried Wolf?", "What profession is Pan the god of?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "The Boy Who Cried Wolf-2" ] ], [ [ "Pan (god)-1" ] ], [ "operation" ] ], [ [ [ "The Boy Who Cried Wolf-2" ] ], [ [ "Pan (god)-1" ] ], [ "operation" ] ], [ [ [ "The Boy Who Cried Wolf-2" ] ], [ [ "Pan (god)-1" ] ], [ "operation" ] ] ] }, { "qid": "a03a7cdebe516f521fbc", "term": "Balkans", "description": "Geopolitical and cultural region of southeastern Europe", "question": "Are there enough people in the Balkans to match the population of Japan?", "answer": false, "facts": [ "There are approximately 55 million people in the Balkans", "There are more than 125 million people in Japan" ], "decomposition": [ "What is the population of the Balkans?", "What is the population of Japan?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Balkans-16" ] ], [ [ "Demographics of Japan-5" ] ], [ "operation" ] ], [ [ [ "Balkans-16" ] ], [ [ "Japan-2" ] ], [ "operation" ] ], [ [ [ "Balkans-16" ] ], [ [ "Japan-2" ] ], [ "operation" ] ] ] }, { "qid": "261863cb4eeef4df279d", "term": "Surveying", "description": "The technique, profession, and science of determining the positions of points and the distances and angles between them", "question": "Would you hire someone with dyscalculia to do surveying work?", "answer": false, "facts": [ "Dyscalculia is a learning disability in math. People with dyscalculia have trouble with math at many levels. ", "Surveyors work with elements of geometry, trigonometry, regression analysis, physics, engineering, metrology, programming languages, and the law. ", "Geometry and trigonometry are types of advanced mathematics." ], "decomposition": [ "What do people with dyscalculia struggle with?", "What skills are necessary to be a competent surveyor?", "Is #1 not listed in #2?" ], "evidence": [ [ [ [ "Dyscalculia-1" ] ], [ [ "Surveying-2" ] ], [ "operation" ] ], [ [ [ "Dyscalculia-1" ] ], [ [ "Surveying-2" ] ], [ "operation" ] ], [ [ [ "Dyscalculia-1" ] ], [ [ "Surveying-2" ] ], [ "operation" ] ] ] }, { "qid": "2930044a09f13c95fb91", "term": "Conducting", "description": "Directing a musical performance by way of visible gestures", "question": "Is a paraplegic suitable for conducting an orchestra?", "answer": true, "facts": [ "Musical conductors use their hands to wave a baton and guide the orchestra.", "Paraplegics are people that are paralyzed from the waist down.", "Paraplegics are able to play sports that involve their upper body, such as basketball." ], "decomposition": [ "What part(s) of the body is/are needed to conduct an orchestra?", "Which portion of a paraplegic's body is paralyzed?", "Are #1 separate from #2?" ], "evidence": [ [ [ [ "Conducting-1" ] ], [ [ "Paraplegia-1" ] ], [ "operation" ] ], [ [ [ "Conducting-1" ] ], [ [ "Paraplegia-2" ] ], [ [ "Conducting-1", "Paraplegia-2" ] ] ], [ [ [ "Conducting-1" ] ], [ [ "Paraplegia-1" ] ], [ "operation" ] ] ] }, { "qid": "b0ee9781a2840b582d9d", "term": "Nicole Kidman", "description": "Australian-American actress and film producer", "question": "Is Nicole Kidman ideal choice to play Psylocke based on height and weight?", "answer": true, "facts": [ "Psylocke is a Marvel super hero whose real name is Betsy Braddock.", "Betsy Braddock is 5'11 and 155 lbs.", "Actress Nicole Kidman is 5'11 and weighs 137 lbs.", "Actresses gain weight all the time for roles, such as Charlize Theron who gained 30 pounds for the movie Monster." ], "decomposition": [ "What is Psylocke's height?", "What is Psylocke's wieght?", "Does Nicole Kidman have similar attributes as #1 and #2?" ], "evidence": [ [ [ [ "Psylocke-2" ], "no_evidence" ], [ "no_evidence" ], [ [ "Nicole Kidman-1" ], "no_evidence", "operation" ] ], [ [ [ "Psylocke-2" ], "no_evidence" ], [ [ "Psylocke-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Psylocke-4" ], "no_evidence" ], [ [ "Psylocke-4" ], "no_evidence" ], [ [ "Model (person)-19", "Model (person)-20", "Nicole Kidman-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "35a5b27d84294ebf1fe0", "term": "Satanism", "description": "group of ideological and philosophical beliefs based on Satan", "question": "Is Capricorn the hypothetical zodiac sign of Satanism?", "answer": true, "facts": [ "Satanism is a group of beliefs based on Satan and has numerous symbols.", "Satan (The Devil) is often depicted as the deity Baphomet, a Sabbatic Goat with a human body and goat head.", "The Capricorn is the zodiac sign symbolized by the goat.", "The Tarot card attributed to the Zodiac sign Capricorn is The Devil." ], "decomposition": [ "What animal represents the zodiac sign Capricorn?", "What are some symbols in Satanism?", "Is #1 among #2?" ], "evidence": [ [ [ [ "Capricorn (astrology)-1" ] ], [ [ "Baphomet-1", "Baphomet-3" ] ], [ "operation" ] ], [ [ [ "Capricorn (astrology)-1" ] ], [ [ "Sigil of Baphomet-2" ] ], [ "operation" ] ], [ [ [ "Capricorn (astrology)-2" ] ], [ [ "Baphomet-3", "LaVeyan Satanism-40" ] ], [ "operation" ] ] ] }, { "qid": "037acdd213ada618830f", "term": "Sesame", "description": "species of plant", "question": "Would a sesame seed be mistaken for a wood frog egg?", "answer": false, "facts": [ "A sesame seed is a flat 3 to 4 mm size seed.", "Wood frog eggs are globe looking masses about 2 to 5 inches in diameter." ], "decomposition": [ "What shape and size is a sesame seed?", "What is the shape and size of a wood frog egg?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Sesame-11" ] ], [ [ "Wood frog-14" ] ], [ [ "Sesame-11", "Wood frog-14" ] ] ], [ [ [ "Sesame-11" ] ], [ [ "Wood frog-14" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sesame-11" ] ], [ [ "Wood frog-14", "Wood frog-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "cdc41c9bae19c441248d", "term": "Easter", "description": "Major Christian festival celebrating the resurrection of Jesus", "question": "Does Adam Sandler skip celebrating Easter?", "answer": true, "facts": [ "Adam Sandler is Jewish.", "Jewish religious people do not celebrate Easter." ], "decomposition": [ "Easter is usually celebrated by people of which religion?", "What is Adam Sandler's religion?", "Is #1 different from #2?" ], "evidence": [ [ [ [ "Easter-6" ] ], [ [ "Adam Sandler-26" ] ], [ "operation" ] ], [ [ [ "Easter-58" ] ], [ [ "Adam Sandler-26" ] ], [ "operation" ] ], [ [ [ "Easter-2" ] ], [ [ "Adam Sandler-5" ] ], [ "operation" ] ] ] }, { "qid": "077cce9150f60c6576ea", "term": "Alcatraz Island", "description": "United States historic place", "question": "Is there historic graffiti on Alcatraz?", "answer": true, "facts": [ "Native Americans occupied the island of Alcatraz in 1969.", "Wall writings from the Native American occupation has been preserved and in some cases, restored completely." ], "decomposition": [ "Who were the occupants of the island of Alcatraz in 1969?", "Did #1 make wall writings?" ], "evidence": [ [ [ [ "Alcatraz Island-1" ] ], [ [ "Alcatraz Island-23" ], "operation" ] ], [ [ [ "Alcatraz Island-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Alcatraz Island-1" ] ], [ [ "Alcatraz Island-23" ], "operation" ] ] ] }, { "qid": "46d5eda453734f1d4f98", "term": "Jean-Paul Sartre", "description": "French existentialist philosopher, playwright, novelist, screenwriter, political activist, biographer, and literary critic", "question": "Did Queen Elizabeth I read the works of Jean-Paul Sartre?", "answer": false, "facts": [ "Jean-Paul Sartre was born in 1905.", "Queen Elizabeth I died in 1603." ], "decomposition": [ "When did Queen Elizabeth I die?", "When was Jean-Paul Sartre bron?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Elizabeth I of England-1" ] ], [ [ "Jean-Paul Sartre-1" ] ], [ "operation" ] ], [ [ [ "Elizabeth I of England-1" ] ], [ [ "Jean-Paul Sartre-4" ] ], [ "operation" ] ], [ [ [ "Elizabeth I of England-1" ] ], [ [ "Jean-Paul Sartre-1" ] ], [ "operation" ] ] ] }, { "qid": "cd1e50e79d07a0320120", "term": "Ancient Greece", "description": "Civilization belonging to an early period of Greek history", "question": "Did Polar Bears roam around in Ancient Greece?", "answer": false, "facts": [ "Polar Bears live in the Arctic, with temperatures that can get as low as -35 degrees celsius.", "Ancient Greece had an average temperature of 24 degrees celsius." ], "decomposition": [ "Where do polar bears live?", "What is the average temperature of #1?", "What was the average temperature of Ancient Greece?", "Is #3 the same as #2?" ], "evidence": [ [ [ [ "Polar bear-9" ] ], [ [ "Arctic Circle-12" ] ], [ [ "Ancient Greece-42" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Polar bear-1" ] ], [ [ "Climate of the Arctic-40" ] ], [ [ "Climate of Greece-7" ] ], [ [ "Climate of Greece-7" ], "operation" ] ], [ [ [ "Polar bear-1" ] ], [ [ "Arctic Circle-12" ] ], [ [ "Greece-60" ] ], [ "operation" ] ] ] }, { "qid": "be5ba924348f83b5df96", "term": "United Airlines", "description": "Airline in the United States", "question": "Was United Airlines blameless in worst crash in history?", "answer": true, "facts": [ "The Tenerife Airport disaster is the deadliest crash in aviation history.", "The Tenerife Airport disaster involved a Boeing plane and a Pan Am plane.", "Pan Am airlines competed with United Airlines and other US companies.", "Boeing is an American multinational corporation that designs and sells airplanes, rockets, satellites,and missiles." ], "decomposition": [ "Which aviation accident is considered the worst in aviation history?", "Is United Airlines excluded from #1?" ], "evidence": [ [ [ [ "Tenerife airport disaster-1" ] ], [ "operation" ] ], [ [ [ "Tenerife airport disaster-1" ] ], [ [ "United Airlines-1" ], "operation" ] ], [ [ [ "Tenerife-36" ] ], [ [ "Tenerife airport disaster-3" ] ] ] ] }, { "qid": "d38d93ba50044cce053c", "term": "New York Harbor", "description": "harbor in the New York City, U.S.A. metropolitan area", "question": "Does New York Harbor sit on a craton without volcanic activity?", "answer": false, "facts": [ "New York Harbor is located on Laurentia craton. ", "The southwestern portion of Laurentia contains numerous large volcanic eruptions." ], "decomposition": [ "What craton is New York Harbor on?", "Is #1 devoid of volcanic activity?" ], "evidence": [ [ [ [ "Laurentia-3" ] ], [ [ "Laurentia-6" ], "operation" ] ], [ [ [ "Staten Island-42" ], "no_evidence" ], [ [ "The Palisades (Hudson River)-5" ], "no_evidence" ] ], [ [ [ "New York Harbor-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "625080d6e74261c523f6", "term": "Bohai Sea", "description": "The innermost gulf of the Yellow Sea and Korea Bay on the coast of Northeastern and North China", "question": "Would Statue of Liberty be visible if submerged in Bohai Sea?", "answer": true, "facts": [ "The Bohai Sea is 230 feet deep.", "The Statue of Liberty is 305 feet tall." ], "decomposition": [ "How deep is the Bohai Sea?", "How tall is the Statue of Liberty?", "Is #2 greater than #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Statue of Liberty-18" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Statue of Liberty-18" ] ], [ "no_evidence" ] ], [ [ "no_evidence" ], [ [ "Statue of Liberty-18" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f56ebcf7b0a06bcb7d97", "term": "Fake news", "description": "Hoax or deliberate spread of misinformation", "question": "Have Jamie Lee Curtis been the subject of fake news?", "answer": true, "facts": [ "Fake news is a hoax that is circulated and spreads to get people to believe a falsehood.", "Jamie Lee Curtis is an American actress known for the Halloween series.", "Rumors have been spread about Jamie Lee Curtis that she was born a hermaphrodite." ], "decomposition": [ "What is fake news?", "Has Jamie Lee Curtis ever been the victim of #1?" ], "evidence": [ [ [ [ "Fake news in India-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Fake news-7" ] ], [ [ "Jamie Lee Curtis-25" ], "no_evidence" ] ], [ [ [ "Fake news-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "0fde234771ccc0bf0f6a", "term": "Golden Gate Bridge", "description": "suspension bridge on the San Francisco Bay", "question": "Will the Stanford Linear Accelerator fit on the Golden Gate Bridge?", "answer": false, "facts": [ "The Golden Gate Bridge is a famous suspension bridge in the San Francisco bay area.", "The Golden Gate Bridge is 1.7miles long.", "The Stanford Linear Accelerator is part of a particle physics lab in Menlo Park, California. ", "The Sanford Linear Accelerator is 2miles long." ], "decomposition": [ "What is the length of the Golden Gate Bridge?", "How long is the Stanford Linear Accelerator?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Golden Gate Bridge-1" ] ], [ [ "SLAC National Accelerator Laboratory-1" ] ], [ "operation" ] ], [ [ [ "Golden Gate Bridge-25" ] ], [ [ "Mile-1", "SLAC National Accelerator Laboratory-1" ] ], [ "operation" ] ], [ [ [ "SLAC National Accelerator Laboratory-1" ] ], [ [ "Golden Gate Bridge-1" ] ], [ "operation" ] ] ] }, { "qid": "4ad7adb3122739af2458", "term": "JPMorgan Chase", "description": "American multinational banking and financial services holding company", "question": "Could JPMorgan Chase give every American $10?", "answer": true, "facts": [ "JPMorgan Chase has total assets of US$2.687 trillion.", "As of November 8, 2018, the United States is estimated to have a population of 328,953,020.", "One trillion is equal to 1,000 billions.", "One billion is equal to 1,000 millions." ], "decomposition": [ "How much are the total assets of JPMorgan Chase?", "What is the population of the United States?", "Is #2 times $10 less than #1?" ], "evidence": [ [ [ [ "JPMorgan Chase-1" ] ], [ [ "Demographics of the United States-1" ] ], [ "operation" ] ], [ [ [ "JPMorgan Chase-1" ] ], [ [ "United States-1" ] ], [ "operation" ] ], [ [ [ "JPMorgan Chase-1" ] ], [ [ "United States-1" ] ], [ "operation" ] ] ] }, { "qid": "ba04b5ebc2edd682c4dd", "term": "The Matrix", "description": "1999 science fiction action film directed by the Wachowskis", "question": "Is the Matrix a standalone movie?", "answer": false, "facts": [ "The Matrix ends in a cliffhanger.", "The story is then resolved in two sequels, making a trilogy.", "There are also supplemental works adding to the story, such as a video game and the Animatrix." ], "decomposition": [ "How many movies are in The Matrix franchise?", "Is #1 equal to one?" ], "evidence": [ [ [ [ "The Matrix (franchise)-9" ] ], [ [ "The Matrix (franchise)-9" ], "operation" ] ], [ [ [ "The Matrix (franchise)-1" ] ], [ "operation" ] ], [ [ [ "The Matrix-49" ] ], [ "operation" ] ] ] }, { "qid": "a50ae7ec93a5ac585694", "term": "Noah's Ark", "description": "the vessel in the Genesis flood narrative", "question": "Is Noah's Ark an upgrade for Golden Age of Piracy pirates?", "answer": true, "facts": [ "The Golden Age of Piracy took place between 1650 and 1720 and had many famous pirates including Blackbeard.", "Noah's Ark had dimensions in feet of 450 x 75 x 45.", "Blackbeard's ship, Queen Anne's Revenge, had a length of 103 feet.", "William Kidd's ship, Adventure Galley, was 124 feet long." ], "decomposition": [ "Which pirates were famously known during the Golden Age of Piracy?", "What were the dimensions of Noah's Ark?", "Is #2 greater than the dimensions of the ships of most of #1?" ], "evidence": [ [ [ [ "William Kidd-1" ] ], [ [ "Noah's Ark-3" ] ], [ [ "Noah's Ark-3", "William Kidd-9" ], "no_evidence" ] ], [ [ [ "Whydah Gally-6" ], "no_evidence" ], [ [ "Noah's Ark-3" ] ], [ "no_evidence" ] ], [ [ [ "Blackbeard-1", "Henry Morgan-1", "Samuel Bellamy-1" ], "no_evidence" ], [ [ "Noah's Ark-3" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "abd771e9a20b107e7e56", "term": "Nikola Tesla", "description": "Serbian American inventor", "question": "Has Nikola Tesla's name inspired multiple brands?", "answer": true, "facts": [ "Nikola Tesla was a famous inventor born in 1856.", "The electric car company Tesla was named after Nikola Tesla.", "The hard rock band Tesla is named after Nikola Tesla." ], "decomposition": [ "How many brands are named after Nikola Tesla?", "Is #1 much more than one?" ], "evidence": [ [ [ [ "Tesla, Inc.-4" ] ], [ "operation" ] ], [ [ [ "Nikola Motor Company-1", "Tesla, Inc.-4" ] ], [ "operation" ] ], [ [ [ "Nikola Motor Company-1", "Tesla Electric Light and Manufacturing-2", "Tesla, Inc.-1", "Tesla-2" ] ], [ "operation" ] ] ] }, { "qid": "f03fe75dde01742e5a03", "term": "Metropolitan Museum of Art", "description": "Art museum in New York City, New York", "question": "Could Bernie Sanders visit the Metropolitan Museum of Art twenty times for under two hundred dollars?", "answer": false, "facts": [ "Bernie Sanders is a senior citizen", "Senior citizens from outside NY, NJ, or CT must pay $17 per visit" ], "decomposition": [ "What age group would Bernie Sanders be classifed as?", "How much must #1 pay to enter the Metropolitan Museum of Art?", "Is seventeen times #2 less than 200? " ], "evidence": [ [ [ [ "Bernie Sanders-1" ] ], [ [ "Metropolitan Museum of Art-61" ] ], [ [ "Metropolitan Museum of Art-61" ] ] ], [ [ [ "Bernie Sanders-1", "Discounts and allowances-33" ] ], [ [ "Metropolitan Museum of Art-50" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Bernie Sanders-5", "Old age-12" ] ], [ [ "Bernie Sanders-111", "Metropolitan Museum of Art-50" ] ], [ "operation" ] ] ] }, { "qid": "2b45a624da84a288d37e", "term": "Wednesday", "description": "Day of the week", "question": "Are all Wednesdays in a year enough to read Bible 15 times?", "answer": true, "facts": [ "There are 52 Wednesdays in a year.", "There are 1,248 hours over all the Wednesdays in a year.", "The Old Testament of the Bible takes an average of 52 hours to read.", "The New Testament of the Bible takes an average of 18 hours to read." ], "decomposition": [ "How many Wednesdays are there in a year?", "What is #1 multiplied by 24?", "How long does it take to read the old testament?", "How long does it take to read the new testament?", "Is #2 greater than or equal to: #3 plus #4?" ], "evidence": [ [ [ [ "Year-57" ] ], [ "operation" ], [ [ "Old Testament-2" ], "no_evidence" ], [ [ "New Testament-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Week-10" ] ], [ "operation" ], [ [ "New Testament-11", "Old Testament-2" ], "no_evidence" ], [ [ "New Testament-9" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Year-66" ], "no_evidence" ], [ "operation" ], [ [ "Protestant Bible-15" ], "no_evidence" ], [ [ "Protestant Bible-15" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "c6b927da57fb0b45b830", "term": "Bicycle", "description": "Pedal-driven two-wheel vehicle", "question": "Do children's bicycles often have extra wheels?", "answer": true, "facts": [ "Training wheels are a set of two wheels to attach to bicycles of new bike riders for additional support.", "Training wheels are marketed primarily at children." ], "decomposition": [ "What types of bicycles have more than two wheels?", "Are any of #1 customarily bought for children?" ], "evidence": [ [ [ [ "Training wheels-1" ] ], [ [ "Training wheels-1" ] ] ], [ [ [ "Tricycle-3" ] ], [ "operation" ] ], [ [ [ "Tricycle-1" ] ], [ [ "Tricycle-3" ] ] ] ] }, { "qid": "5fa29655003d347cb493", "term": "Groundhog Day", "description": "Traditional method of weather prediction", "question": "Would most school children in New York be wearing jackets on groundhog day?", "answer": true, "facts": [ "Groundhog day takes place on February second.", "New York is typically very cold in February." ], "decomposition": [ "What month does Groundhog day occur?", "What is the season in #1?", "Do people typically wear jackets during #2?" ], "evidence": [ [ [ [ "Groundhog Day-1" ] ], [ [ "Groundhog Day-1" ] ], [ [ "Winter clothing-2" ] ] ], [ [ [ "Groundhog Day-1" ] ], [ [ "February-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Groundhog Day-1" ] ], [ [ "Groundhog Day-1" ] ], [ [ "Jacket-1" ] ] ] ] }, { "qid": "8c550764a42b5810b430", "term": "Thirty Years' War", "description": "War between 1618 and 1648; with over 8 million fatalities", "question": "Could a white cockatoo have lived through the entire Thirty Years' War?", "answer": true, "facts": [ "The Thirty Years' War lasted 30 years", "White cockatoos have been reported to live between 40-60 years in captivity" ], "decomposition": [ "How long did the Thirty Years' War last?", "How long can white cockatoos live?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Thirty Years' War-1" ] ], [ [ "White cockatoo-9" ] ], [ "operation" ] ], [ [ [ "Thirty Years' War-1" ] ], [ [ "White cockatoo-9" ] ], [ "operation" ] ], [ [ [ "Germany in the early modern period-9" ] ], [ [ "White cockatoo-9" ] ], [ "operation" ] ] ] }, { "qid": "3c354ae95bfcbd8e8664", "term": "Railroad engineer", "description": "person who operates a train on a railroad or railway", "question": "Did Jesus go to school to study railroad engineering?", "answer": false, "facts": [ "The steam locomotive to drive a train was invented in the 19th century.", "Jesus lived around 0 AD. " ], "decomposition": [ "When was the steam locomotive invented?", "When did Jesus die?", "Was #1 before #2?" ], "evidence": [ [ [ [ "Steam locomotive-2" ] ], [ [ "Jesus-1" ] ], [ "operation" ] ], [ [ [ "Steam locomotive-2" ] ], [ [ "Jesus-1" ] ], [ "operation" ] ], [ [ [ "Steam locomotive-6" ] ], [ [ "Crucifixion of Jesus-1" ] ], [ "operation" ] ] ] }, { "qid": "3c95fe5ad91a9c38bc15", "term": "Arnold Schwarzenegger", "description": "Austrian-American actor, businessman, bodybuilder and politician", "question": "Can Arnold Schwarzenegger deadlift an adult Black rhinoceros?", "answer": false, "facts": [ "Arnold Schwarzenegger deadlifted 710 pounds in a competition.", "The world deadlift record is 1,104 pounds, set by Game of Thrones actor Hafthor Bjornsson.", "The weight of an adult Black rhinoceros is between 1,800 – 3,100 pounds." ], "decomposition": [ "How much can Arnold Schwarzenegger deadlift?", "How much does an adult Black rhino weigh?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Arnold Schwarzenegger-24" ] ], [ [ "Black rhinoceros-8" ] ], [ "operation" ] ], [ [ [ "Arnold Schwarzenegger-24" ] ], [ [ "Black rhinoceros-8" ] ], [ "operation" ] ], [ [ [ "Arnold Schwarzenegger-24" ] ], [ [ "Black rhinoceros-8" ] ], [ "operation" ] ] ] }, { "qid": "8de3fccde94a5f8448bb", "term": "Palace of Westminster", "description": "Meeting place of the Parliament of the United Kingdom,", "question": "Can the Palace of Westminster tell time in the dark?", "answer": true, "facts": [ "The Palace of Westminster has Big Ben, a striking clock tower", "Big Ben communicates the time via bells" ], "decomposition": [ "What is the clock tower of the Palace of Westminster?", "What does #1 use to communicate time?", "Can #2 work without light?" ], "evidence": [ [ [ [ "Palace of Westminster-29" ] ], [ [ "Palace of Westminster-30" ] ], [ "operation" ] ], [ [ [ "Big Ben-1" ] ], [ [ "Big Ben-50" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Palace of Westminster-29" ] ], [ [ "Palace of Westminster-30" ] ], [ [ "Palace of Westminster-30" ] ] ] ] }, { "qid": "1d95c8b85665773de38f", "term": "Herpes simplex virus", "description": "Species of virus", "question": "Can Planned Parenthood tell your University that you have Herpes simplex virus?", "answer": false, "facts": [ "Planned Parenthood specializes in reproductive healthcare.", "Planned Parenthood practitioners are bound by HIPAA to not disclose any patient information. " ], "decomposition": [ "Who works at Planned Parenthood?", "Are #1 bound by any laws in regards to patient information?", "Does #2 allow for patient information to be disclosed?" ], "evidence": [ [ [ [ "Clinical Research Bureau-1" ] ], [ [ "Confidentiality-16", "Medical privacy-52" ] ], [ "operation" ] ], [ [ [ "Planned Parenthood-2" ] ], [ "no_evidence" ], [ [ "Planned Parenthood-36" ], "no_evidence", "operation" ] ], [ [ [ "Planned Parenthood-2" ] ], [ [ "Health Insurance Portability and Accountability Act-11", "Health Insurance Portability and Accountability Act-13" ] ], [ "operation" ] ] ] }, { "qid": "300927fd135e8d94af16", "term": "Prime number", "description": "Integer greater than 1 that has no positive integer divisors other than itself and 1", "question": "Can a prime number be represented by the number of days in a week?", "answer": true, "facts": [ "There are seven days in a week.", "Seven is a prime number." ], "decomposition": [ "How many days are there in a week?", "Is #1 a prime number?" ], "evidence": [ [ [ [ "Week-1" ] ], [ [ "Prime number-1", "Prime number-13" ], "operation" ] ], [ [ [ "Week-1" ] ], [ [ "7-1" ] ] ], [ [ [ "Week-8" ] ], [ [ "Prime number-7" ] ] ] ] }, { "qid": "9c0111d03557271a54fc", "term": "Reformation", "description": "Schism within the Christian Church in the 16th century", "question": "Would a tool used for Martin Luther's Reformation opening salvo aid in a crucifixion?", "answer": true, "facts": [ "Martin Luther began the Reformation with the defiant act of nailing 95 grievances to the door of the Wittenberg church.", "Roman crucifixions required several tools including nails and wooden beams." ], "decomposition": [ "What did Martin Luther begin his Reformation with?", "What tools were used in #1?", "What are the tools required to preform Roman crucifixions?", "Is there any overlap between #2 and #3?" ], "evidence": [ [ [ [ "Ninety-five Theses-1" ] ], [ [ "Wittenberg-7" ] ], [ [ "Crucifixion of Jesus-44" ] ], [ "operation" ] ], [ [ [ "Martin Luther-19" ] ], [ [ "Nail (fastener)-1", "Nail (fastener)-2" ] ], [ [ "Crucifixion-1" ] ], [ "operation" ] ], [ [ [ "Martin Luther-19" ] ], [ [ "Nail (fastener)-2" ] ], [ [ "Crucifixion-1", "Nail (fastener)-2" ] ], [ "operation" ] ] ] }, { "qid": "f24fefb2c5328aa6daf8", "term": "Achilles", "description": "Greek mythological hero", "question": "Would Achilles dominate Legolas in a hypothetical fight?", "answer": false, "facts": [ "Achilles was a Greek hero that was killed by an arrow to the heel.", "Legolas is an elf archer from Lord of the Rings that can shoot arrows incredibly fast.", "Achilles's companions included human warriors such as Ajax and Odysseus.", "Legolas's companions include Gandalf who is a high level wizard, capable of casting many deadly spells." ], "decomposition": [ "What is Achilles weakspot?", "What is Legolas strength?", "Will #1 easily dominate #2" ], "evidence": [ [ [ [ "Achilles' heel-2" ] ], [ [ "Legolas-2" ] ], [ [ "Legolas-1" ] ] ], [ [ [ "Achilles-2" ] ], [ [ "Legolas-4" ] ], [ "operation" ] ], [ [ [ "Achilles-2" ] ], [ [ "Legolas-18" ] ], [ "operation" ] ] ] }, { "qid": "4d880ab760e6a3094f01", "term": "Horror fiction", "description": "genre of fiction", "question": "Is Edgar Allan Poe obscure in the world of horror fiction?", "answer": false, "facts": [ "Edgar Allan Poe's writing has endured for over 150 years. ", "Edgar Allan Poe's horror writing has been included in classroom curriculum for decades. " ], "decomposition": [ "How long have Edgar Allan Poe's writings remained in common use?", "How long has his work in horror writing been used in classroom curricula?", "Is #1 or #2 less than a decade?" ], "evidence": [ [ [ [ "Edgar Allan Poe-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Edgar Allan Poe-1", "Edgar Allan Poe-35" ], "no_evidence" ], [ [ "The Masque of the Red Death (1964 film)-1", "The Pit and the Pendulum (1991 film)-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Edgar Allan Poe-3" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2a62cf7622c2a406fa77", "term": "Shooting sports", "description": "sports involving firearms used to hit targets", "question": "Do all shooting sports involve bullets?", "answer": false, "facts": [ "Paintball is a shooting sport that uses paint pellets in lieu of bullets. ", "Crossbow is a shooting sport that uses 'bolts' or arrows instead of bullets." ], "decomposition": [ "What are some common projectiles used in shooting sports?", "Are all of #1 bullets?" ], "evidence": [ [ [ [ "Bow and arrow-1", "Bullet-1", "Shooting sports-1" ] ], [ "operation" ] ], [ [ [ "Archery-1" ] ], [ [ "Arrow-1", "Bullet-1" ], "operation" ] ], [ [ [ "Shooting sports-1" ] ], [ [ "Crossbow bolt-1", "Shooting sports-1" ] ] ] ] }, { "qid": "7b15ca4cc2a7bf3fe1b9", "term": "Ahura Mazda", "description": "highest deity of Zoroastrianism", "question": "Will Ahura Mazda have to look down to see Abaddon's dwelling??", "answer": true, "facts": [ "Abaddon is a demon that is said to dwell in a bottomless pit below the earth that is a realm of the dead.", "Ahura Mazda is the chief deity of Zoroastrianism.", "Ahura Mazda is the Supreme Being located in heaven, high above the sky." ], "decomposition": [ "Where does Abaddon dwell?", "Where is Ahura Mazda known to reside?", "Is #2 located physically above #1?" ], "evidence": [ [ [ [ "Abaddon-1" ] ], [ [ "Ahura Mazda-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Abaddon-1" ] ], [ [ "Ahura Mazda-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Abaddon in popular culture-1" ] ], [ [ "Ahura Mazda-1" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "f79d13cdc96dd01af216", "term": "Alaska Purchase", "description": "1867 sale of Alaska to the USA by Russia", "question": "Was Alaska part of the Northern Army during the Civil War?", "answer": false, "facts": [ "The Civil War was from 1861 - 1865.", "The Northern Army consisted of soldiers from states north of the Mason-Dixon line.", "Alaska did not become part of the United States until 1867." ], "decomposition": [ "When did the American Civil War take place?", "When did Alaska become part of the United States?", "Is #2 before #1?" ], "evidence": [ [ [ [ "American Civil War-1" ] ], [ [ "Alaska-42" ] ], [ "operation" ] ], [ [ [ "American Civil War-1" ] ], [ [ "Alaska Purchase-1" ] ], [ "operation" ] ], [ [ [ "American Civil War-1" ] ], [ [ "Alaska Statehood Act-3" ] ], [ "operation" ] ] ] }, { "qid": "18586794a7000980c6a8", "term": "Bing (search engine)", "description": "Web search engine from Microsoft", "question": "Do Bing (search engine) searches earn the searcher more than competitors do?", "answer": true, "facts": [ "Bing (search engine) has a search rewards program that gives the user points, from conducting searches, to redeem for prizes.", "Bing (search engine) has several competitors such as Google, and DuckDuckGo.", "Google and DuckDuckGo do not have search rewards programs." ], "decomposition": [ "What does Bing give to people who use the search engine?", "Who are Bing's major competitors?", "What do the companies in #2 give people for using their service?", "Is #1 of greater value than #3?" ], "evidence": [ [ [ [ "Bing (search engine)-10", "Bing (search engine)-26" ] ], [ [ "Bing (search engine)-52" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Bing (search engine)-57" ] ], [ [ "Bing (search engine)-67" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bing (search engine)-57" ] ], [ [ "Bing (search engine)-54" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "5a5ccd78ba4d4af27bc6", "term": "Bucharest", "description": "Capital of Romania", "question": "Is Bucharest located south of Egypt?", "answer": false, "facts": [ "Bucharest, Romania is located in Eastern Europe.", "Egypt is located in Africa.", "Most of Africa is south of Europe." ], "decomposition": [ "What country is Bucharest located in?", "Is #1 south of Egypt?" ], "evidence": [ [ [ [ "Bucharest-5" ] ], [ [ "Romania-1" ] ] ], [ [ [ "Bucharest-1" ] ], [ [ "Egypt-1" ], "operation" ] ], [ [ [ "Bucharest-1" ] ], [ "operation" ] ] ] }, { "qid": "4e1b50a3da3dca8fc255", "term": "Loudspeaker", "description": "a microphone which contains the Bluetooth as at the moment as the microphone is contained the energy; expected energy cooling expect that early in the alphabet (early out of it contains one of these earlier)", "question": "Would a loudspeaker be useful for most Gallaudet students?", "answer": false, "facts": [ "Gallaudet is a school for the deaf in the USA.", "Most deaf students would not be able to accurately use or rely on information conveyed via loudspeaker." ], "decomposition": [ "What disability do Gallaudet students suffer from?", "What does a loudspeaker do?", "Would #2 be beneficial for people who have #1?" ], "evidence": [ [ [ [ "Gallaudet University-1" ] ], [ [ "Loudspeaker-1" ] ], [ "operation" ] ], [ [ [ "Gallaudet University-1" ] ], [ [ "Loudspeaker-1" ] ], [ "operation" ] ], [ [ [ "Gallaudet University-1" ] ], [ [ "Loudspeaker-1" ] ], [ "operation" ] ] ] }, { "qid": "653e7f9907cc581803b6", "term": "Amtrak", "description": "Intercity rail operator in the United States", "question": "Would three newborn kittens fit on a standard Amtrak coach seat?", "answer": true, "facts": [ "Newborn kittens are small enough to fit in an average human hand.", "The average human hand is 7 inches.", "An Amtrak coach seat is 39\" x 23\"." ], "decomposition": [ "What is the size of a newborn kitten?", "How big would #1 times three kittens be?", "How large is an Amtrak coach seat?", "Is #2 smaller than #3?" ], "evidence": [ [ [ [ "Cat-28" ], "no_evidence" ], [ "operation" ], [ [ "Airline seat-29" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Kitten-4" ], "no_evidence" ], [ "no_evidence" ], [ [ "Amtrak-43" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Cat-28" ], "no_evidence" ], [ "no_evidence", "operation" ], [ [ "Amtrak-48" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "a54b16376f82a6e14bf9", "term": "Atmosphere of Earth", "description": "Layer of gases surrounding the planet Earth", "question": "Will a rock float in the atmosphere of Earth?", "answer": false, "facts": [ "Things only float if they are less dense than the surrounding material.", "Rocks are denser than air.", "The atmosphere of Earth is made up of air." ], "decomposition": [ "Which substance does the earth's atmosphere contain?", "What is the average density of #1?", "What is the average density of rocks?", "Is #3 less than #2?" ], "evidence": [ [ [ [ "Atmosphere-14" ] ], [ [ "Nitrogen-26" ] ], [ [ "Sandstone-10" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Atmosphere of Earth-1", "Atmosphere-14" ] ], [ [ "Atmosphere of Earth-9" ], "no_evidence" ], [ [ "Rock (geology)-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Troposphere-5" ] ], [ [ "Nitrogen-26" ] ], [ [ "Granite-3" ] ], [ "operation" ] ] ] }, { "qid": "774fbe2b25beb3db5456", "term": "Horseradish", "description": "species of plant", "question": "Could a newborn look over the top of a fully grown horseradish plant?", "answer": false, "facts": [ "A fully grown horseradish plant can reach a height of 4.9 feet.", "Newborn children are typically between 14-20 inches tall in first world countries." ], "decomposition": [ "How tall are newborn babies on average?", "How tall is the average horseradish plant?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Infant-5" ] ], [ [ "Horseradish-2" ] ], [ "operation" ] ], [ [ [ "Infant-5" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Infant-9" ] ], [ [ "Horseradish-2" ] ], [ "operation" ] ] ] }, { "qid": "29d785535d6462d9e711", "term": "Red Sea", "description": "Arm of the Indian Ocean between Arabia and Africa", "question": "Does the Red Sea have biblical significance? ", "answer": true, "facts": [ "During the biblical Exodus, the Israelite had to cross the Red Sea.", "Moses parted the Red Sea to allow the Israelite group to escape from the Egyptians. " ], "decomposition": [ "What bodies of water are important to Biblical stories?", "Is the Red Sea among #1?" ], "evidence": [ [ [ [ "Red Sea-8" ] ], [ [ "Red Sea-8" ], "operation" ] ], [ [ [ "Jordan River-2", "Mediterranean Sea-11", "Red Sea-8", "Sea of Galilee-4" ] ], [ "operation" ] ], [ [ [ "Crossing the Red Sea-1" ] ], [ "operation" ] ] ] }, { "qid": "c8bf91bdcde3eb7501e2", "term": "Chinatown, Manhattan", "description": "Neighborhood of Manhattan in New York City", "question": "Would moon cakes be easy to find in Chinatown, Manhattan?", "answer": true, "facts": [ "Moon cakes are a Chinese traditional desert.", "Chinatown offers many 'tastes of home' to Chinese locals and travelers. " ], "decomposition": [ "What cuisine are moon cakes from?", "Does Chinatown, Manhattan sell food products of #1?" ], "evidence": [ [ [ [ "Mooncake-1" ] ], [ [ "Chinatown-14" ], "no_evidence", "operation" ] ], [ [ [ "Moon shrimp cake-1" ] ], [ [ "Moon shrimp cake-1" ] ] ], [ [ [ "Mooncake-1" ] ], [ [ "Chinatown-1" ], "operation" ] ] ] }, { "qid": "f046ac1a972a64c13501", "term": "Bugs Bunny", "description": "Warner Bros. cartoon character", "question": "Is Bugs Bunny known for carrying a root vegetable around with him?", "answer": true, "facts": [ "Bugs Bunny carries a carrot around with him.", "Carrots are considered root vegetables." ], "decomposition": [ "What does Bugs Bunny carry around with him?", "Is #1 a root vegetable?" ], "evidence": [ [ [ [ "Bugs Bunny-20" ] ], [ [ "Carrot-31" ] ] ], [ [ [ "Bugs Bunny-20" ] ], [ [ "Carrot-1" ] ] ], [ [ [ "Bugs Bunny-37" ] ], [ [ "Carrot-1" ] ] ] ] }, { "qid": "e9e2b22b193fcca2a976", "term": "Cricket (insect)", "description": "small insects of the family Gryllidae", "question": "Would someone buying crickets be likely to own pets?", "answer": true, "facts": [ "Reptiles are a popular pet for people.", "Reptiles enjoy eating crickets. ", "Crickets are sold at many pet stores." ], "decomposition": [ "What are some common animal classes that people keep as pets?", "Do any of #1 usually eat crickets?" ], "evidence": [ [ [ [ "Pet-22" ] ], [ [ "Crickets as pets-4" ] ] ], [ [ [ "Crickets as pets-26", "Pet-2" ], "no_evidence" ], [ [ "Cricket (insect)-3" ], "no_evidence", "operation" ] ], [ [ [ "Pet-2" ] ], [ [ "Lizard-27" ] ] ] ] }, { "qid": "2f49d601ea00411ef3d5", "term": "Christianity in China", "description": "Religious community", "question": "Are some adherents to Christianity in China historic enemies of Catholic Church?", "answer": true, "facts": [ "Christianity in China is comprised of several different groups including: Protestants, Catholics, Evangelicals, and Orthodox Christians.", "Catholics have been at war with Protestants throughout history.", "The 1572 St. Bartholomew's Day Massacre saw thousands of Protestants killed by Catholic mobs.", "English Protestant rulers killed many Irish Catholics during the Reformation." ], "decomposition": [ "Which Christian denominations are historic enemies of the Catholic Church?", "Do any of the denominations in #1 currently have adherents in China?" ], "evidence": [ [ [ [ "Catholic Church-119" ] ], [ [ "Protestantism by country-3" ] ] ], [ [ [ "St. Bartholomew's Day massacre-1" ] ], [ [ "Christianity in China-35", "Robert Morrison (missionary)-50" ], "operation" ] ], [ [ [ "Protestantism-1" ] ], [ [ "Protestantism by country-3" ] ] ] ] }, { "qid": "e681e7f094a45ab8f72d", "term": "Oval Office", "description": "office of the U.S. President", "question": "Can a Kia Rio fit inside the Oval Office?", "answer": true, "facts": [ "The Oval Office is 35' long and 29' wide.", "A Kia Rio is 14.3' long and 5.6' wide. " ], "decomposition": [ "How large is the Oval Office?", "How large is a Kia Rio?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Oval Office-2" ], "no_evidence" ], [ [ "Kia Rio-28" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Eric Gugler-9" ], "no_evidence" ], [ [ "Kia Rio-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "665445900ef590e47ed0", "term": "War in Vietnam (1945–46)", "description": "Prelude to the Indochina Wars", "question": "Were veterans of the War in Vietnam (1945–46) given free education by the Soviet Union?", "answer": false, "facts": [ "The Soviet Union provided free education to children of those who died in the Vietnam War with America.", "The War in Vietnam (1945–46) was twenty years before the Vietnam War with America." ], "decomposition": [ "The Soviet Union gave free education to children of people who died in which war?", "When did #1 end?", "When did the War in Vietnam (1945-46) end?", "Was #3 before #2?" ], "evidence": [ [ [ [ "Orphans in the Soviet Union-14" ], "no_evidence" ], [ [ "World War II-1" ] ], [ [ "Vietnam War-1" ] ], [ "operation" ] ], [ [ [ "Orphans in the Soviet Union-2", "Orphans in the Soviet Union-6" ], "no_evidence" ], [ [ "Russian Civil War-2", "World War I-1" ], "no_evidence" ], [ [ "War in Vietnam (1945–1946)-1", "War in Vietnam (1945–1946)-2" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Vietnam War-1" ] ], [ "operation" ] ] ] }, { "qid": "b64be915de0e671e8548", "term": "Easter Bunny", "description": "Folkloric figure and symbol of Easter", "question": "Is the Easter Bunny popular in September?", "answer": false, "facts": [ "The Easter Bunny is a symbol of the Christian holiday of Easter", "Easter occurs in March or April each year" ], "decomposition": [ "What holiday does the Easter Bunny symbolize?", "Is #1 celebrated in September?" ], "evidence": [ [ [ [ "Easter Bunny-1" ] ], [ [ "Easter-14", "Easter-15" ] ] ], [ [ [ "Easter Bunny-1" ] ], [ [ "Easter-14", "Easter-15" ], "operation" ] ], [ [ [ "Easter Bunny-1" ] ], [ [ "Easter-17" ] ] ] ] }, { "qid": "eb7c254ac7ae82656aee", "term": "Rhinoceros", "description": "family of mammals", "question": "Have rhinoceroses been killed to improve human sex lives?", "answer": true, "facts": [ "Rhinoceros horns are used for folk treatment of sexual impotency.", "Rhinoceroses are killed to remove their horns." ], "decomposition": [ "Which part of the Rhinoceros do most poachers hunt and kill it for?", "What are some common traditional uses of #1?", "Is treatment of sexual impotency included in #2?" ], "evidence": [ [ [ [ "Rhinoceros-31" ] ], [ [ "Rhinoceros-32" ] ], [ "operation" ] ], [ [ [ "Rhinoceros-3" ] ], [ [ "Rhinoceros-32", "Rhinoceros-34" ] ], [ "operation" ] ], [ [ [ "Rhinoceros-3" ] ], [ [ "Rhinoceros-32" ] ], [ [ "Aphrodisiac-1" ], "operation" ] ] ] }, { "qid": "32b82c383b3a92966d07", "term": "United States Army Rangers", "description": "Elite military formation of the United States Army", "question": "Is Mozambique Drill an easy shot for United States Army Ranger?", "answer": true, "facts": [ "The Mozambique Drill is a close quarters combat technique involving firing two shots to the body and one to the head.", "United States Army Rangers are equipped with M4A1 guns.", "M4A1 guns can fire up to 600 meters." ], "decomposition": [ "What is the The Mozambique Drill?", "What guns are United States Army Rangers equipped with?", "What is the shooting range of #2?", "Would a gun with the range of #3 be helpful in #1?" ], "evidence": [ [ [ [ "Mozambique Drill-1" ] ], [ [ "75th Ranger Regiment-2", "United States Army-68" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Mozambique Drill-1" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Mozambique Drill-1" ] ], [ [ "M4 carbine-31" ] ], [ [ "M4 carbine-31" ] ], [ [ "Close-quarters combat-1" ], "operation" ] ] ] }, { "qid": "076f905ae36408b5ee69", "term": "Rainbow", "description": "meteorological phenomenon", "question": "Is lunch on the beach a good activity to spot the full circle of a rainbow?", "answer": false, "facts": [ "The full circle of a rainbow cannot usually be seen from ground level", "Sometimes the full circle of a rainbow can be seen from a high building or aircraft", "You can see more of a rainbow the closer to the horizon the sun is", "Lunch occurs at midday when the sun is likely high in the sky" ], "decomposition": [ "At what point in the sky is the sun most likely to create a full circle rainbow?", "At what altitudes are full rainbows more likely to be seen?", "Is lunchtime at the beach relatively close to conditions #1 and #2?" ], "evidence": [ [ [ [ "Halo (optical phenomenon)-1" ] ], [ [ "Halo (optical phenomenon)-2" ] ], [ [ "Atmospheric optics-17", "Beach-16" ] ] ], [ [ [ "Rainbow-2" ], "no_evidence" ], [ [ "Rainbow-11" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Rainbow-11" ], "no_evidence" ], [ [ "Sea level-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "3428fcbe9ffdcd50d234", "term": "ABBA", "description": "Swedish pop group", "question": "Does ABBA have similar gender configuration to The Mamas & The Papas?", "answer": true, "facts": [ "ABBA was a Swedish pop group composed of four members.", "The Mamas & The Papas was an American band composed of four members.", "The members of ABBA are Björn Ulvaeus (male), Benny Andersson (male), Agnetha Fältskog (female), and Anni-Frid Lyngstad (female)..", "The members of The Mamas & The Papas are John Phillips (male), Denny Doherty (male), Cass Elliot (female), and Michelle Phillips (female)." ], "decomposition": [ "How many men and women each make up the Mamas and the Papas?", "How many men and women each make up ABBA?", "Is #1 equal to #2?" ], "evidence": [ [ [ [ "The Mamas and the Papas-1" ] ], [ [ "ABBA-1" ] ], [ "operation" ] ], [ [ [ "The Mamas and the Papas-1" ] ], [ [ "ABBA-1" ] ], [ "operation" ] ], [ [ [ "The Mamas and the Papas-1" ] ], [ [ "ABBA-1" ] ], [ "operation" ] ] ] }, { "qid": "ac543a4e99396d4f8132", "term": "Mona Lisa", "description": "Painting by Leonardo da Vinci", "question": "Is the Mona Lisa based on a real person?", "answer": true, "facts": [ "There are two main theories about the origin of the Mona Lisa.", "The first is that a wealthy noblewoman, the wife of one of Leonardo's friends, sat as the model.", "Another popular theory is that Leonardo painted her as a cross-gendered self portrait." ], "decomposition": [ "Who was the Mona Lisa painting based on?", "Is #1 a real person?" ], "evidence": [ [ [ [ "Mona Lisa-2" ] ], [ [ "Lisa del Giocondo-1" ] ] ], [ [ [ "Mona Lisa-2" ] ], [ [ "Lisa del Giocondo-1" ], "operation" ] ], [ [ [ "Mona Lisa-12" ] ], [ [ "Isabella of Aragon, Queen of Germany-1" ] ] ] ] }, { "qid": "47900c93aec6ff71b73b", "term": "Johann Sebastian Bach", "description": "German composer", "question": "Did Johann Sebastian Bach leave his first wife for his second wife?", "answer": false, "facts": [ "Johann Sebastian Bach was married to Maria Barbara Bach from 1707–1720.", "Johann Sebastian Bach was married to Anna Magdalena Bach from 1721–1750.", "Maria Barbara Bach died suddenly in 1720." ], "decomposition": [ "What dates was Johann Sebastian Bach married to Anna Magdalena Bach?", "When did Maria Barbara Bach die?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Anna Magdalena Bach-10", "Anna Magdalena Bach-5" ] ], [ [ "Maria Barbara Bach-1" ] ], [ "operation" ] ], [ [ [ "Johann Sebastian Bach-20" ] ], [ [ "Johann Sebastian Bach-12", "Maria Barbara Bach-1" ] ], [ "operation" ] ], [ [ [ "Anna Magdalena Bach-5" ] ], [ [ "Maria Barbara Bach-4" ] ], [ "operation" ] ] ] }, { "qid": "033147a022dd41b58cd9", "term": "Cheeseburger", "description": "hamburger topped with cheese", "question": "Could Eddie Hall hypothetically deadlift the world's largest cheeseburger?", "answer": false, "facts": [ "The largest cheeseburger ever made weighed 2,014 pounds.", "Eddie Hall is the former world record deadlift holder, lifting 1,102 pounds under strongman rules." ], "decomposition": [ "What is Eddie Hall's record deadlist?", "What is the weight of the world largest cheeseburger?", "Is #1 larger than #2?" ], "evidence": [ [ [ [ "Eddie Hall-1" ] ], [ [ "Cheeseburger-8" ] ], [ "operation" ] ], [ [ [ "Eddie Hall-1" ] ], [ [ "Cheeseburger-8" ] ], [ "operation" ] ], [ [ [ "Eddie Hall-1" ] ], [ [ "Cheeseburger-8" ] ], [ "operation" ] ] ] }, { "qid": "c234a28480510591980d", "term": "Justin Timberlake", "description": "American singer, record producer, and actor", "question": "Has Justin Timberlake ever written a song about Britney Spears?", "answer": true, "facts": [ "Justin Timberlake and Britney Spears dated in 1999.", "In 2002, Justin Timberlake released a music video for his breakup song 'Cry Me A River' and featured an actress who resembled his then ex Britney Spears." ], "decomposition": [ "Who did Justin Timberlake date in 1999?", "Who was the song 'Cry Me A River' by Justin timberlake about?", "Is #1 and #2 the same?" ], "evidence": [ [ [ [ "Justin Timberlake-32" ] ], [ [ "Cry Me a River (Justin Timberlake song)-1" ] ], [ "operation" ] ], [ [ [ "Justin Timberlake-32" ] ], [ [ "Cry Me a River (Justin Timberlake song)-1" ] ], [ "operation" ] ], [ [ [ "Justin Timberlake-32" ] ], [ [ "Cry Me a River (Justin Timberlake song)-1" ] ], [ "operation" ] ] ] }, { "qid": "71ca1a8a75c5c582ab3f", "term": "Head coach", "description": "Senior coach or manager of a sports team", "question": "Do most high school head coaches make as much as the Head Coach at NCSU?", "answer": false, "facts": [ "The average high school makes about $41,000.", "The head coach for NCSU makes about $1.8 million dollars." ], "decomposition": [ "What is the average salary for a high school head coach?", "What is the salary of the head football coach at NCSU?", "Is #1 within 5% of #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Head coach-4" ], "no_evidence" ], [ [ "NC State Wolfpack-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "NC State Wolfpack football-34" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2915baf6fba0c1b5100f", "term": "Christopher Columbus", "description": "Italian explorer, navigator, and colonizer", "question": "Did Christopher Columbus break the fifth commandment in Christianity?", "answer": true, "facts": [ "The fifth commandment states that Thou Shalt Not Kill.", "Christopher Columbus ordered a brutal crackdown in which many natives were killed, and then paraded their dismembered bodies through the streets." ], "decomposition": [ "What is the fifth commandment in Christianity?", "What activities did Christopher Columbus subject Native Americans too?", "Is the activity in #1 also in #2?" ], "evidence": [ [ [ [ "Thou shalt not kill-29" ] ], [ [ "Christopher Columbus-48" ] ], [ [ "Christopher Columbus-48" ], "operation" ] ], [ [ [ "Ten Commandments-16" ] ], [ [ "Christopher Columbus-93" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Thou shalt not kill-36" ], "no_evidence" ], [ [ "Christopher Columbus-48" ] ], [ "operation" ] ] ] }, { "qid": "8c0614f44911693572b5", "term": "Transport", "description": "Human-directed movement of things or people between locations", "question": "Can you transport a primate in a backpack?", "answer": true, "facts": [ "Primates include lemurs, monkeys, apes, and humans.", "A Capuchin is a type of monkey that are an average height of twelve inches and weighs nine pounds.", "A school ruler is twelve inches.", "One school textbook can weigh up to six pounds." ], "decomposition": [ "What is the average size and weight capacity of a backpack?", "What are some common primates?", "Is any of #2 such that its size and weight is less than or equal to #1?" ], "evidence": [ [ [ [ "Backpack-3" ] ], [ [ "Primate-1" ] ], [ [ "Madame Berthe's mouse lemur-1" ] ] ], [ [ [ "Backpack-3" ] ], [ [ "Primate-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Backpack-18" ], "no_evidence" ], [ [ "Primates (journal)-1" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "3ae8644164c547560eda", "term": "King Arthur", "description": "legendary British leader of the late 5th and early 6th centuries", "question": "Was King Arthur at the beheading of Anne Boleyn?", "answer": false, "facts": [ "King Arthur was a legendary British leader who, according to medieval histories and romances, led the defence of Britain against Saxon invaders in the late 5th and early 6th centuries.", "Anne Boleyn was beheaded May 19, 1536." ], "decomposition": [ "When is King Arthur thought to have died?", "When was Anne Boleyn born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "King Arthur-1" ], "no_evidence" ], [ [ "Anne Boleyn-1" ] ], [ "operation" ] ], [ [ [ "Battle of Camlann-1" ] ], [ [ "Anne Boleyn-1" ] ], [ "operation" ] ], [ [ [ "King Arthur-6" ] ], [ [ "Anne Boleyn-6" ] ], [ "operation" ] ] ] }, { "qid": "11714d45205c86d31910", "term": "Uppsala", "description": "Place in Uppland, Sweden", "question": "Can the city of Miami fit inside Uppsala?", "answer": false, "facts": [ "Miami measures 55.25 mi².", "Uppsala has an area of 18.83 mi². " ], "decomposition": [ "What is the area of Miami?", "What is the area of Uppsala?", "Is #1 less than or equal to #2?" ], "evidence": [ [ [ [ "Miami-15" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Miami-1" ] ], [ [ "Uppsala-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Miami-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "cc072feedd07aee23d2a", "term": "Samsung Galaxy", "description": "series of Android mobile computing devices", "question": "Does Iphone have more iterations than Samsung Galaxy?", "answer": false, "facts": [ "As of 2020 the latest Iphone is Iphone 11.", "As of 2020 the latest Samsung Galaxy phone is the Samsung Galaxy S20." ], "decomposition": [ "How many models of the iPhone have been released as of 2020?", "How many models of the Samsung Galaxy have been released as of 2020?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "IPhone-178" ] ], [ [ "Samsung Galaxy-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "IPhone-178" ] ], [ [ "Samsung Galaxy-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "IPhone SE (2nd generation)-1" ] ], [ [ "Samsung Galaxy S20-1" ] ], [ "operation" ] ] ] }, { "qid": "2fe218415da196ecab68", "term": "Hanuman", "description": "The divine monkey companion of Rama in Hindu mythology", "question": "Is Hanuman associated with a Norse god?", "answer": false, "facts": [ "Hanuman is a companion of the god Rama.", "Rama is an avatar of the god Vishnu in Hindu mythology." ], "decomposition": [ "Which god is Hanuman associated with?", "Is #1 in Norse mythology?" ], "evidence": [ [ [ [ "Hanuman-1" ] ], [ [ "Norse mythology-2" ], "no_evidence", "operation" ] ], [ [ [ "Hanuman-1" ] ], [ "operation" ] ], [ [ [ "Hanuman-1" ] ], [ [ "Norse mythology-1", "Rama-1" ] ] ] ] }, { "qid": "6f0b33d71d2e65d3b376", "term": "Sonnet", "description": "form of poetry with fourteen lines; by the thirteenth century it signified a poem of fourteen lines that follows a strict rhyme scheme and specific structure", "question": "Did a Polish poet write sonnets about Islamic religion?", "answer": true, "facts": [ "Adam Mickiewicz was a Polish poet. ", "Adam Mickiewicz 's sonnet sequence focuses heavily on the culture and Islamic religion of the Crimean Tatars." ], "decomposition": [ "What were the major focus of Adam Mickiewicz's sonnets?", "Is #1 about Islamic religion?", "Was Adam Mickiewicz a Polish poet?", "Are #2 and #3 positive?" ], "evidence": [ [ [ [ "The Crimean Sonnets-1" ] ], [ [ "Orientalism-1", "The Crimean Sonnets-2" ] ], [ [ "Adam Mickiewicz-1" ] ], [ "operation" ] ], [ [ [ "The Crimean Sonnets-2" ] ], [ "no_evidence" ], [ [ "Adam Mickiewicz-1" ] ], [ "operation" ] ], [ [ [ "The Crimean Sonnets-1" ], "no_evidence" ], [ [ "Adam Mickiewicz-23" ], "no_evidence", "operation" ], [ [ "Adam Mickiewicz-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "df0429c062ac5aaa9b99", "term": "Bugs Bunny", "description": "Warner Bros. cartoon character", "question": "Can you find Bugs Bunny at Space Mountain?", "answer": false, "facts": [ "Space Mountain is an attraction at Disney theme parks", "Bugs Bunny is a Warner Bros. character", "Warner Bros. characters appear at Six Flags theme parks" ], "decomposition": [ "Where is Space Mountain located?", "Which animation studio created Bugs Bunny?", "Which entertainment company is #1 related to?", "Is #2 part of #3?" ], "evidence": [ [ [ [ "Space Mountain-1" ] ], [ [ "Bugs Bunny-1" ] ], [ [ "Space Mountain (Disneyland)-1" ] ], [ "operation" ] ], [ [ [ "Space Mountain-1" ] ], [ [ "Bugs Bunny-1" ] ], [ [ "Disney Parks, Experiences and Products-1" ] ], [ "operation" ] ], [ [ [ "Space Mountain (Disneyland)-1" ] ], [ [ "Bugs Bunny-1" ] ], [ [ "Disneyland-1" ] ], [ "operation" ] ] ] }, { "qid": "c4a9e56df5b83e483769", "term": "Brazilian Navy", "description": "Naval warfare branch of Brazil's military forces", "question": "Could modern Brazilian Navy have hypothetically turned the tide in Battle of Actium?", "answer": true, "facts": [ "The Battle of Actium saw Mark Antony's army lose to Octavian.", "Octavian's army had 400 ships, 16000 infantry, and 3,000 archers.", "The Brazilian Navy has over 80,000 personnel, including 16,000 marines.", "Several Brazilian Navy ships are armed with explosive torpedoes. " ], "decomposition": [ "What was the result of the Battle of Actium?", "In #1, how many resources did the Octavian's army have?", " How many resources does the Brazilian Navy have? ", "Is #3 significantly more than #2?" ], "evidence": [ [ [ [ "Battle of Actium-26" ] ], [ [ "Battle of Actium-14" ] ], [ [ "Brazilian Navy-55", "Brazilian Navy-56" ] ], [ "operation" ] ], [ [ [ "Battle of Actium-2" ] ], [ [ "Battle of Actium-12" ] ], [ [ "Brazilian Navy-55" ] ], [ "operation" ] ], [ [ [ "Battle of Actium-2" ] ], [ [ "Battle of Actium-12" ] ], [ [ "Brazilian Navy-56" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "347eea662abc9467ed18", "term": "Jason", "description": "Greek mythological hero", "question": "Could the children of Greek hero Jason hypothetically fill a polo team?", "answer": true, "facts": [ "The Greek mythological hero is known for his quest to obtain the Golden Fleece.", "The Greek mythological hero had four children: Euneus, Nebrophonus, Mermerus, and Pheres.", "Polo is a sport played between two teams of 4 players." ], "decomposition": [ "How many children did Greek mythological hero Jason have?", "How many people are needed to make a polo team?", "Is #1 equal to or more than #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Polo-51" ] ], [ "operation" ] ], [ [ [ "Medea-10" ] ], [ [ "Polo-4" ] ], [ "operation" ] ], [ [ [ "Medea-10" ] ], [ [ "Water polo-1" ] ], [ "operation" ] ] ] }, { "qid": "4bbf4b169b4981ad5a34", "term": "Miami", "description": "", "question": "Can you swim to Miami from New York?", "answer": false, "facts": [ "The longest distance swam by a person is 139.8 miles.", "It is over 1,000 miles from New York to Miami." ], "decomposition": [ "What is the longest distance that a human has ever swum?", "How far does one need to swim to get from New York to Miami?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Veljko Rogošić-1" ] ], [ [ "Silver Meteor-19" ] ], [ "operation" ] ], [ [ [ "Veljko Rogošić-1" ] ], [ [ "Miami River (New York)-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Long-distance swimming-1" ] ], [ [ "Miami-1", "New York City-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "0a32d7cfde6cec332fd6", "term": "ABBA", "description": "Swedish pop group", "question": "Could ABBA play a mixed doubles tennis game against each other?", "answer": true, "facts": [ "ABBA contained two male and two female members.", "Mixed doubles tennis games consist of two teams of one man and one woman on each." ], "decomposition": [ "How many men and women are required to participate in a mixed doubles tennis game?", "How many men and women are members of the ABBA group?", "Is #2 at least equal to #1?" ], "evidence": [ [ [ [ "Mixed doubles-1", "Mixed-sex sports-12" ] ], [ [ "ABBA-1", "ABBA-2" ] ], [ "operation" ] ], [ [ [ "Types of tennis match-4" ] ], [ [ "ABBA-2" ] ], [ "operation" ] ], [ [ [ "Mixed doubles-1" ] ], [ [ "ABBA-1", "Agnetha Fältskog-11" ] ], [ "operation" ] ] ] }, { "qid": "65247fed8b1775f3b91c", "term": "Aloe", "description": "genus of plants", "question": "Is material from an aloe plant sometimes enclosed in petroleum-derived products?", "answer": true, "facts": [ "Aloe vera gel is sometimes kept in plastic packaging.", "Plastic packaging is derived from petroleum." ], "decomposition": [ "What kind of products are derived from petroleum?", "What products are made from aloe plants?", "Is #2 ever be packaged inside #1?" ], "evidence": [ [ [ [ "Petroleum product-4" ] ], [ [ "Aloe vera-19" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Plastic-3" ], "no_evidence" ], [ [ "Aloe vera-15" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Petroleum-2" ] ], [ [ "Petroleum-2" ] ], [ [ "Petroleum-2" ], "operation" ] ] ] }, { "qid": "a2c4d25423867e3fc195", "term": "Hurricane Harvey", "description": "Category 4 Atlantic hurricane in 2017", "question": "Could Hurricane Harvey catch a Peregrine falcon?", "answer": false, "facts": [ "Hurricane Harvey had maximum winds of 130 MPH.", "The Peregrine falcon is the fastest animal on Earth.", "A Peregrine falcon can reach a maximum speed of 240 MPH." ], "decomposition": [ "What was the top speed of Hurricane Harvey?", "What is the top speed of a Peregrine falcon?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Hurricane Harvey-24" ], "no_evidence" ], [ [ "Peregrine falcon-1" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Hurricane Harvey-5" ] ], [ [ "Peregrine falcon-15" ] ], [ "operation" ] ], [ [ [ "Hurricane Harvey-6" ] ], [ [ "Peregrine falcon-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e183f97699de5a056823", "term": "Hanging", "description": "execution or suicide method involving suspension of a person by a ligature", "question": "Do bodies movie during hanging?", "answer": true, "facts": [ "Electrochemical nerve signals are fired after death that can cause a body to twitch.", "If death by hanging is accomplished due to asphyxia, the victim may attempt to free themselves or may appear to struggle." ], "decomposition": [ "What does death by hanging usually induce in victims?", "What processes could occur in the nervous system immediately after death?", "Do #1 or #2 result in body movement?" ], "evidence": [ [ [ [ "Hanging-25" ] ], [ [ "Hanging-26" ] ], [ "operation" ] ], [ [ [ "Hanging-19" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Hanging-1" ] ], [ [ "Hanging-25" ] ], [ "operation" ] ] ] }, { "qid": "61dd8e3c0e1e6aa4422f", "term": "Little Women", "description": "1860s novel by Louisa May Alcott", "question": "Would the author of Little Women have remembered the ratification of the 13th Amendment?", "answer": true, "facts": [ "The 13th Amendment was ratified in 1865.", "Louisa May Alcott died in 1888." ], "decomposition": [ "When was the 13th Amendment ratified?", "Who wrote Little Women?", "What years was #2 alive?", "Did #1 occur sometime during #3?" ], "evidence": [ [ [ [ "Thirteenth Amendment to the United States Constitution-1" ] ], [ [ "Little Women-1" ] ], [ [ "Louisa May Alcott-1" ] ], [ "operation" ] ], [ [ [ "Thirteenth Amendment to the United States Constitution-1" ] ], [ [ "Little Women-1" ] ], [ [ "Little Women-1" ] ], [ "operation" ] ], [ [ [ "Thirteenth Amendment to the United States Constitution-1" ] ], [ [ "Little Women-1" ] ], [ [ "Little Women-1" ] ], [ "operation" ] ] ] }, { "qid": "efffc81b286925c40b89", "term": "Benito Mussolini", "description": "Fascist leader of Italy", "question": "Would Benito Mussolini hypothetically play well in the NBA?", "answer": false, "facts": [ "Height is an important factor in playing basketball at a high level.", "The average NBA player is 6 feet 7 inches tall.", "Benito Mussolini was 5 feet 6.5 inches tall." ], "decomposition": [ "What is the height of Benito Mussolini?", "On average, what is the height of an NBA player?", "Is #1 comparable to #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Basketball-85" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Basketball-85" ] ], [ "no_evidence" ] ], [ [ [ "Benito Mussolini-1" ], "no_evidence" ], [ [ "Basketball-85" ] ], [ "operation" ] ] ] }, { "qid": "b53d6f0058183ba7cefe", "term": "Western honey bee", "description": "Species of insect", "question": "Does US brand Nice depend on Western honey bee?", "answer": true, "facts": [ "US brand Nice creates a number of products including honey.", "The Western honey bee can be found on every continent except Antarctica.", "The Western honey bee is the most common pollinator in US, making it the most important bee to domestic agriculture." ], "decomposition": [ "Which insect does US brand Nice need to obtain the honey they sell from?", "Is the Western honey bee a common example of #1 that can be found in the US?" ], "evidence": [ [ [ [ "Honey-1" ], "no_evidence" ], [ [ "Honey bee-20", "Western honey bee-1" ], "operation" ] ], [ [ [ "Honey-5" ] ], [ [ "Western honey bee-5" ], "operation" ] ], [ [ [ "Walgreens-13" ], "no_evidence" ], [ [ "Western honey bee-5" ] ] ] ] }, { "qid": "e91ba24305ae9f8850ed", "term": "Hulk", "description": "Superhero appearing in Marvel Comics publications and related media", "question": "Can Hulk's alter ego explain atomic events?", "answer": true, "facts": [ "Hulk's alter ego is Dr. Robert Bruce Banner", "Dr. Robert Bruce Banner is a nuclear physicist. ", "Nuclear physics is the field of physics that studies atomic nuclei and their constituents and interactions. " ], "decomposition": [ "Who is the Hulk's alter ego?", "What is the profession of #1?", "What do people in #2 have a knowledge of?", "Is atomic events included in #3?" ], "evidence": [ [ [ [ "Hulk-1" ] ], [ [ "Hulk-45" ] ], [ [ "Physicist-1" ] ], [ [ "Elementary event-1" ] ] ], [ [ [ "Hulk-9" ] ], [ [ "Hulk-1" ] ], [ [ "Scientist-1" ] ], [ [ "Atomic Age (design)-1" ], "operation" ] ], [ [ [ "Hulk-1" ] ], [ [ "Hulk-57" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "8cc7153ec5a527748adc", "term": "Firewall (computing)", "description": "Software or hardware-based network security system", "question": "Can a firewall protect against a short circuit?", "answer": false, "facts": [ "A firewall is a computer program that protects unwanted attacks from penetrating a computer.", "Firewalls are installed on computers and conduct routine background maintenance.", "A short circuit is an electrical failure resulting from wires unable to conduct currents.", "Short circuits, especially during updates can lead to the dreaded Windows Blue Screen of Death in which a computer is unable to restart." ], "decomposition": [ "What kind of threats does a firewall protect a computer system against?", "What are the possible causes and results of a short circuit as concerning computers?", "Is any of #2 included in #1?" ], "evidence": [ [ [ [ "Firewall (computing)-13" ] ], [ [ "Short circuit-7", "Short circuit-9" ] ], [ "operation" ] ], [ [ [ "Windows Firewall-2" ] ], [ [ "Short circuit-7" ] ], [ [ "Short circuit-7" ], "operation" ] ], [ [ [ "Firewall (computing)-1" ] ], [ [ "Short circuit-1", "Short circuit-10", "Short circuit-7" ] ], [ "operation" ] ] ] }, { "qid": "34d6c4937d1aa9a70ef5", "term": "Swiss Guard", "description": "Military of Vatican City", "question": "Would Swiss Guard defeat the Marines?", "answer": false, "facts": [ "The Swiss Guard is the military of Vatican City and consists of 135 members.", "There are 186,000 active duty Marines as of 2017." ], "decomposition": [ "How many people are in the Swiss Guard?", "How many people are in the US Marine Corp?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Swiss Guard-31" ] ], [ [ "United States Marine Corps-3" ] ], [ "operation" ] ], [ [ [ "Military in Vatican City-14" ] ], [ [ "United States Marine Corps-3" ] ], [ "operation" ] ], [ [ [ "Swiss Guards-18", "Swiss Guards-3" ], "no_evidence" ], [ [ "United States Marine Corps-3" ] ], [ "operation" ] ] ] }, { "qid": "9fee16a5e703c2fdcd33", "term": "Conservatism", "description": "Political philosophy focused on retaining traditional social institutions", "question": "Does conservatism repulse Blaire White?", "answer": false, "facts": [ "Blaire White is a Youtuber.", "Blaire White posts content that leans towards conservative politics." ], "decomposition": [ "What is Blaire White's political orientation as evidenced by her expressions?", "Is #1 completely devoid of conservatism?" ], "evidence": [ [ [ [ "Blaire White-1" ] ], [ [ "Liberal conservatism-9" ], "operation" ] ], [ [ [ "Blaire White-1" ] ], [ "operation" ] ], [ [ [ "Blaire White-1" ] ], [ [ "Liberal conservatism-9" ] ] ] ] }, { "qid": "f4256dffd78da0d7fdf8", "term": "Hair", "description": "protein filament that grows from follicles found in the dermis, or skin", "question": "Do skeletons have hair?", "answer": false, "facts": [ "Hair grows from the skin.", "Skeletons are a structure of multiple bones.", "Bones do not grow hair. " ], "decomposition": [ "Where does hair grow from?", "What are skeletons made out of?", "Does #2 have #1?" ], "evidence": [ [ [ [ "Dermis-1", "Hair-1" ] ], [ [ "Skeleton-1", "Skeleton-19" ] ], [ "operation" ] ], [ [ [ "Hair-6" ] ], [ [ "Skeleton-19" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Hair-1" ] ], [ [ "Skeleton-14" ] ], [ "operation" ] ] ] }, { "qid": "56aec97a66ab66236aba", "term": "Cosmic ray", "description": "High-energy particle, mainly originating outside the Solar system", "question": "Did H.G. Wells' \"War of the Worlds\" include cosmic rays?", "answer": false, "facts": [ "The book \"War of the Worlds\" was published in 1898.", "Cosmic rays were not discovered until 1912." ], "decomposition": [ "When was the War of the Worlds published?", "When were cosmic rays discovered?", "Did #2 come before #1?" ], "evidence": [ [ [ [ "The War of the Worlds-1" ] ], [ [ "Cosmic ray-7" ] ], [ "operation" ] ], [ [ [ "The War of the Worlds-1" ] ], [ [ "Cosmic ray-7" ] ], [ "operation" ] ], [ [ [ "The War of the Worlds-1" ] ], [ [ "Cosmic ray-7" ] ], [ "operation" ] ] ] }, { "qid": "f41b809dcfb764234ce8", "term": "Reddit", "description": "Online news aggregator", "question": "Are the majority of Reddit users familiar with the Pledge of Allegiance?", "answer": true, "facts": [ "55% of the Reddit user base comes from the United States.", "Congressional sessions open with the recital of the Pledge, as do many government meetings at local levels, and meetings held by many private organizations.", "All states except California, Hawaii, Iowa, Vermont, and Wyoming require a regularly scheduled recitation of the pledge in public schools." ], "decomposition": [ "What country do most Reddit users come from?", "What country is the Pledge of Allegiance associated with?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Reddit-2" ] ], [ [ "Pledge of Allegiance-1" ] ], [ "operation" ] ], [ [ [ "Reddit-2" ] ], [ [ "Pledge of Allegiance-1" ] ], [ "operation" ] ], [ [ [ "Reddit-2" ] ], [ [ "Pledge of Allegiance-1" ] ], [ "operation" ] ] ] }, { "qid": "453f198e758c82ae85d9", "term": "50 Cent", "description": "American rapper, singer, songwriter, actor, television producer, entrepreneur and investor", "question": "Does 50 Cent get along with Jeffrey Atkins?", "answer": false, "facts": [ "Jeffrey Atkins is a rapper better known as Ja Rule.", "Ja Rule released the diss track \"Loose Change\" in April 2003, where he attacks 50 Cent.", "In 2018 50 cent responded to a Ja Rule diss by purchasing and vacating the first 4 rows of Ja Rule's concert." ], "decomposition": [ "What artists was Jeffrey Atkins critical of in his song Loose Change?", "Is 50 Cent outside the group of #1?" ], "evidence": [ [ [ [ "Ja Rule-1", "Ja Rule-12" ] ], [ "operation" ] ], [ [ [ "Ja Rule-12" ] ], [ "operation" ] ], [ [ [ "Ja Rule-12" ] ], [ "operation" ] ] ] }, { "qid": "353bf34cd9457f69b870", "term": "YMCA", "description": "Worldwide organization founded in 1844 on principles of muscular Christianity", "question": "Is it normal for people to sing when the YMCA is mentioned?", "answer": true, "facts": [ "The YMCA was written about in a widely popular song by \"The Village People\"", "The Village People's song \"YMCA\" had an easy to do and very popular dance routine that went with it. ", "The song \"YMCA\" is extremely well known." ], "decomposition": [ "In what song was the YMCA written about?", "Is #1 a very popular song?" ], "evidence": [ [ [ [ "Y.M.C.A. (song)-1" ] ], [ [ "Y.M.C.A. (song)-2" ] ] ], [ [ [ "Y.M.C.A. (song)-1" ] ], [ [ "Y.M.C.A. (song)-4" ], "no_evidence" ] ], [ [ [ "Y.M.C.A. (song)-1" ] ], [ [ "Y.M.C.A. (song)-1", "Y.M.C.A. (song)-2" ] ] ] ] }, { "qid": "5232dce14487fa43d416", "term": "Infantry", "description": "military personnel who travel and fight on foot", "question": "Do members of NFL teams receive infantry training?", "answer": false, "facts": [ "Members of NFL teams play football", "Infantry training is provided to members of the US armed forces" ], "decomposition": [ "Which group(s) are entitled to infantry training?", "Are members of the NFL team one of #1?" ], "evidence": [ [ [ [ "Infantry-47" ] ], [ [ "Infantry-47" ] ] ], [ [ [ "Infantry-47" ] ], [ [ "National Football League-1" ], "operation" ] ], [ [ [ "United States Marine Corps School of Infantry-1" ] ], [ [ "National Football League-1" ] ] ] ] }, { "qid": "26a27cc7a77f192f0077", "term": "Quran", "description": "The central religious text of Islam", "question": "Do most religious people in Quebec refer to the Quran?", "answer": false, "facts": [ "Christianity is the major religion in Quebec.", "Christians refer to the Bible as their book of reference." ], "decomposition": [ "What is the dominant religion in Quebec?", "Do adherents of #1 commonly refer to the Quran?" ], "evidence": [ [ [ [ "Demographics of Quebec-21" ] ], [ [ "Catholic Bible-4", "Quran-1" ], "operation" ] ], [ [ [ "Quebec-76" ] ], [ [ "Bible-1", "Quran-1" ] ] ], [ [ [ "Demographics of Quebec-21" ] ], [ [ "Catholic Bible-25" ] ] ] ] }, { "qid": "b72204fd58a0bfaa5283", "term": "Gulf of Mexico", "description": "An Atlantic Ocean basin extending into southern North America", "question": "Is a Halloween cruise in the Gulf of Mexico likely to be safe from storms?", "answer": false, "facts": [ "Hurricanes often strike the Gulf of Mexico", "Hurricane season in the gulf lasts until the end of November", "Halloween is October 31" ], "decomposition": [ "Which storms are a common occurrence in the Gulf of Mexico?", "What time of the year is Halloween celebrated?", "According to known patterns, are any of #1 likely to happen during #2?" ], "evidence": [ [ [ [ "Atlantic hurricane-35" ] ], [ [ "Halloween-1" ] ], [ [ "Tropical cyclone-46" ], "operation" ] ], [ [ [ "Gulf of Mexico-26" ] ], [ [ "Halloween-1" ] ], [ [ "Atlantic hurricane season-2" ], "operation" ] ], [ [ [ "Tropical cyclone-46", "Tropical cyclone-60", "Tropical cyclone-86" ] ], [ [ "All Hallows' Eve (disambiguation)-1" ] ], [ "operation" ] ] ] }, { "qid": "0c6e530c3581d6cf0193", "term": "Porsche", "description": "automotive brand manufacturing subsidiary of Volkswagen", "question": "Can Billie Eilish afford a Porsche?", "answer": true, "facts": [ "Billie Eilish is a famous female singer.", "Billie Eilish is 18 years old and has a net worth of $25 Million. ", "A Porsche Boxster is a car that starts at $59,000.", "$25,000,000 is greater than $59,000." ], "decomposition": [ "What is Billie Eilish's net worth?", "How much does a Porsche cost?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Billie Eilish-3" ], "no_evidence" ], [ [ "Porsche Carrera GT-5", "Porsche-16" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Porsche-16" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Billie Eilish-23" ], "no_evidence" ], [ [ "Porsche Panamera-19" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "081689c549d238e5f98a", "term": "Formula One", "description": "Auto racing championship held worldwide", "question": "Did Secretariat win a Formula One championship?", "answer": false, "facts": [ "Secretariat is a famous race horse", "Formula One is an auto racing championship" ], "decomposition": [ "What is Secretariat?", "What is #1's top speed?", "What is the top speed for a Formula One car?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Secretariat (horse)-1" ] ], [ [ "Secretariat (horse)-3" ], "no_evidence" ], [ [ "Formula One car-60" ] ], [ "operation" ] ], [ [ [ "Secretariat (horse)-1" ] ], [ [ "Secretariat (horse)-3" ], "no_evidence" ], [ [ "Formula One-3" ] ], [ "operation" ] ], [ [ [ "Secretariat (horse)-1" ] ], [ [ "Secretariat (horse)-27" ] ], [ [ "Formula One-3" ] ], [ "operation" ] ] ] }, { "qid": "53ddbc5daaa0bb43606e", "term": "Robert Downey Jr.", "description": "American actor", "question": "Does Robert Downey Jr's Marvel Cinematic Universe character survive the Infinity War?", "answer": false, "facts": [ "Robert Downey Jr. plays Iron Man in the MCU.", "At the end of Avengers: Endgame, Iron Man uses the Infinity Stones against Thanos.", "The stones are too powerful for his human body to handle, so he dies after using them." ], "decomposition": [ "What is Robert Downey Jr's character in Infinity War?", "Does #1 survive to the end of Infinity War?" ], "evidence": [ [ [ [ "Robert Downey Jr.-38" ] ], [ [ "Avengers: Infinity War-10" ], "no_evidence" ] ], [ [ [ "Tony Stark (Marvel Cinematic Universe)-36" ] ], [ [ "Avengers: Infinity War-9", "Tony Stark (Marvel Cinematic Universe)-26" ], "operation" ] ], [ [ [ "Avengers: Infinity War-5", "Tony Stark (Marvel Cinematic Universe)-1" ] ], [ [ "Tony Stark (Marvel Cinematic Universe)-26" ] ] ] ] }, { "qid": "0b7782218af2a65a6094", "term": "Astronomer", "description": "Scientist who studies celestial bodies", "question": "Does James Webb Space Telescope fail astronomer in locating planet Krypton?", "answer": true, "facts": [ "The James Webb Space Telescope is the most powerful telescope created.", "Krypton is a planet in the fictional Superman comic book series." ], "decomposition": [ "Which universe does the planet Krypton exist in?", "Does the James Webb Space Telescope as we know it exist in a universe different from #1?" ], "evidence": [ [ [ [ "Krypton (comics)-1" ] ], [ [ "James Webb Space Telescope-1" ], "no_evidence" ] ], [ [ [ "Krypton (comics)-1" ] ], [ [ "James Webb Space Telescope-1" ] ] ], [ [ [ "Krypton (comics)-1" ] ], [ [ "James Webb Space Telescope-1" ], "operation" ] ] ] }, { "qid": "7be51fdef30345de666e", "term": "Crustacean", "description": "subphylum of arthropods", "question": "Do all crustaceans live in the ocean?", "answer": false, "facts": [ "The woodlice family of crustaceans is terrestrial.", "There are also many crustacean species living in fresh water rivers and lakes." ], "decomposition": [ "What are some common families of crustaceans?", "Is there any of #1 that lives in a terrestrial habitat?", "Do any of #1 that are aquatic also live in freshwater?", "Are #2 and #3 negative?" ], "evidence": [ [ [ [ "Crab-1", "Crustacean-4" ] ], [ [ "Woodlouse-2" ] ], [ [ "Potamon fluviatile-1" ] ], [ "operation" ] ], [ [ [ "Crustacean-2" ] ], [ [ "Crustacean-4" ] ], [ [ "Crustacean-10" ] ], [ "no_evidence" ] ], [ [ [ "Crustacean-1" ] ], [ [ "Crustacean-10", "Woodlouse-2" ], "no_evidence" ], [ [ "Shrimp-11", "Shrimp-2" ] ], [ "operation" ] ] ] }, { "qid": "91da868cfe707bfa9d4e", "term": "South Pole", "description": "Southern point where the Earth's axis of rotation intersects its surface", "question": "Do children send their Christmas letters to the South Pole?", "answer": false, "facts": [ "Children send Christmas letters to Santa Claus.", "Santa Claus is fabled to live in the North Pole." ], "decomposition": [ "Who do children send their Christmas letters to?", "Does #1 supposedly live in the South Pole?" ], "evidence": [ [ [ [ "North Pole-61", "Santa's workshop-9" ] ], [ "operation" ] ], [ [ [ "Santa Claus-55" ] ], [ [ "North Pole-61" ], "operation" ] ], [ [ [ "Santa Claus-55" ] ], [ [ "Santa Claus-3" ] ] ] ] }, { "qid": "bcf47774877e8bdd07a4", "term": "Warsaw Ghetto", "description": "Ghetto in Nazi occupied Poland", "question": "Did the population of the Warsaw Ghetto record secret police on cell phones?", "answer": false, "facts": [ "The Warsaw Ghetto existed during the second world war.", "Cell phones with video recording capability did not exist until the 2000s." ], "decomposition": [ "When was the Warsaw Ghetto in existence?", "When was the first cell phone capable of recording developed?", "Is #2 before the end of #1?" ], "evidence": [ [ [ [ "Warsaw Ghetto-6" ] ], [ [ "Digital electronics-11" ] ], [ [ "Digital electronics-11", "Warsaw Ghetto-6" ], "operation" ] ], [ [ [ "Warsaw Ghetto-1" ] ], [ [ "Camera phone-22" ] ], [ "operation" ] ], [ [ [ "Warsaw Ghetto-3" ] ], [ [ "Mobile phone-4" ] ], [ [ "Mobile phone-4" ], "operation" ] ] ] }, { "qid": "c20289cc17128f27feab", "term": "John Lennon", "description": "English singer and songwriter, founding member of the Beatles", "question": "Was John Lennon known to be a good friend to Sasha Obama?", "answer": false, "facts": [ "John Lennon died in 1980.", "Sasha Obama was born in 2001." ], "decomposition": [ "When was Sasha Obama born?", "When did John Lennon die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Family of Barack Obama-5" ] ], [ [ "John Lennon-1" ] ], [ "operation" ] ], [ [ [ "Family of Barack Obama-5" ] ], [ [ "John Lennon-36" ] ], [ "operation" ] ], [ [ [ "Michelle Obama-21" ] ], [ [ "John Lennon-1" ] ], [ "operation" ] ] ] }, { "qid": "a8088fd4d533c2eaa365", "term": "Eggplant", "description": "plant species Solanum melongena", "question": "Can spiders help eggplant farmers control parasites?", "answer": true, "facts": [ "The potato tuber moth is a parasite that targets the plant family Solanaceae, including eggplant ", "Selenops radiatus is a spider genus in South Africa that effectively controls the potato tuber moth" ], "decomposition": [ "Which major parasite insect are eggplants host plants to?", "What are the natural enemies of #1 that farmers can use to control them?", "Is any of #2 a spider?" ], "evidence": [ [ [ [ "Eggplant-50" ] ], [ [ "Spider behavior-2" ] ], [ "operation" ] ], [ [ [ "Eggplant-46" ] ], [ [ "Spider-4" ] ], [ "operation" ] ], [ [ [ "Eggplant-46", "Eggplant-47" ] ], [ [ "Aphid-36", "Spider-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "c862ea1f376d312249b8", "term": "Darth Vader", "description": "fictional character in the Star Wars franchise", "question": "Does Darth Vader's character resemble Severus Snape?", "answer": false, "facts": [ "Darth Vader is portrayed as a man who always appears in black full-body armor and a mask.", "Severus Snape is portrayed as a white man with long, greasy black hair who often wears a cloak. " ], "decomposition": [ "What type of clothing does Darth Vader wear?", "What type of clothing does Severus Snape wear?", "Are there any significant similarities between #1 and #2?" ], "evidence": [ [ [ [ "Darth Vader-16" ] ], [ [ "Severus Snape-35" ] ], [ "operation" ] ], [ [ [ "Darth Vader-15", "Darth Vader-33" ] ], [ [ "Severus Snape-35" ] ], [ "operation" ] ], [ [ [ "Darth Vader-15" ] ], [ [ "Severus Snape-35" ] ], [ "operation" ] ] ] }, { "qid": "d2af3c2b2eee3ce311ea", "term": "Organ transplantation", "description": "moving of an organ from one body or body region to another", "question": "Can a carrot receive an organ transplant?", "answer": false, "facts": [ "Organs are groups of tissues that perform a similar function.", "The whole of a carrot is a root.", "A root is a plant organ.", "You cannot transplant the entire carrot into another carrot." ], "decomposition": [ "What part of the plant is the carrot?", "Does #1 have organs?" ], "evidence": [ [ [ [ "Carrot-1" ] ], [ [ "Organ (anatomy)-1", "Taproot-1" ] ] ], [ [ [ "Carrot-1" ] ], [ "operation" ] ], [ [ [ "Carrot-1" ] ], [ "operation" ] ] ] }, { "qid": "44a7b241fe98fc12b215", "term": "Memory", "description": "information stored in the mind, or the mental processes involved in receiving, storing, and retrieving this information", "question": "Do people with DID have a good memory?", "answer": false, "facts": [ "DID is an abbreviation for 'Dissociative Identity Disorder.'", "DID is characterized by gaps in memory, as well as altered states or 'personalities' during these dissociative moments." ], "decomposition": [ "What does DID stand for?", "What is #1 characterized by?", "Would be with #2 have good memory?" ], "evidence": [ [ [ [ "Dissociative identity disorder-1" ] ], [ [ "Dissociative identity disorder-1" ] ], [ [ "Dissociative identity disorder-1" ] ] ], [ [ [ "Dissociative identity disorder-1" ] ], [ [ "Dissociative identity disorder-1" ] ], [ [ "Dissociative identity disorder-1", "Psychogenic amnesia-1" ] ] ], [ [ [ "Dissociative identity disorder-1" ] ], [ [ "Dissociative identity disorder-7" ] ], [ "operation" ] ] ] }, { "qid": "48648780e56f136fdae5", "term": "Pandora", "description": "Mythological figure", "question": "Were items released from Pandora's box at least two of the names of Four Horsemen?", "answer": true, "facts": [ "Pandora was a mythical figure that opened a box and released several ills on the world including famine, sickness, and death.", "The Four Horsemen of the Apocalypse are: Pestilence, War, Famine, and Death." ], "decomposition": [ "What items were released from Pandora's box?", "What were the names of the Four Horsemen", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Pandora's box-2" ] ], [ [ "Four Horsemen of the Apocalypse-3" ] ], [ [ "Four Horsemen of the Apocalypse-3", "Pandora's box-2" ] ] ], [ [ [ "Pandora's box-2", "Pandora's box-31" ] ], [ [ "Four Horsemen of the Apocalypse-5", "Horsemen of Apocalypse-7" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Pandora's box-2" ] ], [ [ "Four Horsemen of the Apocalypse-3" ] ], [ "operation" ] ] ] }, { "qid": "e1463ddc36ea8cfc9074", "term": "Scientific Revolution", "description": "Beginnings of modern science that occured in Europe towards the end of the Renaissance", "question": "Did the iPhone usher in the scientific revolution?", "answer": false, "facts": [ "The scientific revolution took place in the 16th and 17th centuries.", "The iPhone came out in the 21st century." ], "decomposition": [ "When did the Scientific Revolution begin?", "When did the iPhone come out?", "Did #2 occur before #1?" ], "evidence": [ [ [ [ "Scientific Revolution-1" ] ], [ [ "IPhone-1" ] ], [ "operation" ] ], [ [ [ "Scientific Revolution-4" ] ], [ [ "IPhone-1" ] ], [ "operation" ] ], [ [ [ "Scientific Revolution-1" ] ], [ [ "IPhone-1" ] ], [ "operation" ] ] ] }, { "qid": "3b3547ab6ae47d840f37", "term": "Nature", "description": "Hominin events for the last 10 million years", "question": "Would someone go to San Francisco for a nature escape?", "answer": false, "facts": [ "San Francisco is a major US city with over 800,000 people.", "San Francisco is known for mass transit and being a metropolitan area." ], "decomposition": [ "What kind of developed human settlement is San Francisco?", "Are #1's known for nature?" ], "evidence": [ [ [ [ "San Francisco-1" ] ], [ [ "San Francisco-1" ] ] ], [ [ [ "San Francisco-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "San Francisco-95" ] ], [ "operation" ] ] ] }, { "qid": "7e80945a93300f55f479", "term": "Parsley", "description": "species of plant, herb", "question": "Does parsley sink in milk?", "answer": false, "facts": [ "Items sink if they are denser than the surrounding material.", "Parsley has a density of 0.26 g/cm^3 when fresh.", "Milk has a density of 1.026 g/cm^3." ], "decomposition": [ "What is the density of parsley?", "What is the density of milk?", "Is #1 greater than #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Parsley-1" ], "no_evidence" ], [ [ "Milk-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "23bde1810c6afe6f916c", "term": "1912 United States presidential election", "description": "Election of 1912", "question": "Did anyone in the 1912 election take a majority of the popular vote?", "answer": false, "facts": [ "Woodrow Wilson took 41% of the vote.", "Theodore Roosevelt took 27% of the vote.", "William Howard Taft took 23% of the vote.", "Eugene Debs took 6% of the vote.", "A majority is more than 50%." ], "decomposition": [ "What percentage of votes would constitute a majority?", "What percentage of votes did the winner of the 1912 presidential election receive?", "Does #2 fall within the range of #1?" ], "evidence": [ [ [ [ "Majority-1" ] ], [ [ "1912 United States presidential election-4" ] ], [ "operation" ] ], [ [ [ "Double majority-11" ], "no_evidence" ], [ [ "1912 United States presidential election-4" ] ], [ "operation" ] ], [ [ [ "Majority-1" ] ], [ [ "1912 United States presidential election-4" ] ], [ "operation" ] ] ] }, { "qid": "37f33f40f86886a334d8", "term": "Amoeba", "description": "polyphyletic group including different eucariot taxons", "question": "Could amoebas have played a part in the Black Death?", "answer": true, "facts": [ "The Black Death is a bacterial disease called bubonic plague.", "Yersinia pestis has been found to grow and thrive inside amoebas.", "Bubonic plague is caused by Yersinia pestis." ], "decomposition": [ "Which disease is referred to as the Black Death?", "Which specific organism is responsible for #1?", "Could #2 be hosted by amoeba?" ], "evidence": [ [ [ [ "Black Death-1" ] ], [ [ "Black Death-3" ] ], [ [ "Amoeba-1" ] ] ], [ [ [ "Black Death-1" ] ], [ [ "Yersinia pestis-1" ] ], [ [ "Amoeba-10" ], "operation" ] ], [ [ [ "Black Death-1" ] ], [ [ "Black Death-3" ] ], [ [ "Amoeba-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "da8b34432133127c4a97", "term": "P. G. Wodehouse", "description": "English author", "question": "Did P. G. Wodehouse like the internet as a child?", "answer": false, "facts": [ "P. G. Wodehouse was born in 1881.", "The internet was not conceived of until 1965. " ], "decomposition": [ "When was P. G. Wodehouse born?", "When was the internet invented?", "Did #1 come before or during #2?" ], "evidence": [ [ [ [ "P. G. Wodehouse-1" ] ], [ [ "Internet-2" ] ], [ "operation" ] ], [ [ [ "P. G. Wodehouse-5" ], "operation" ], [ [ "When Radio Was-6" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "P. G. Wodehouse-5" ], "operation" ], [ [ "Internet-13" ], "operation" ], [ "operation" ] ] ] }, { "qid": "1337be5d470d68393db7", "term": "The Invisible Man", "description": "1897 science fiction novella by H. G. Wells", "question": " Is The Invisible Man more prevalent in films than Picnic at Hanging Rock?", "answer": true, "facts": [ "H.G. Wells's book The Invisible Man has been adapted into more than 7 films.", "Joan Lindsay's book Picnic at Hanging Rock was adapted into one film and one TV series." ], "decomposition": [ "How many films have been made of \"The Invisible Man\"", "How many films have been made of Picnic at Hanging Rock?", "Is #1 larger than #2?" ], "evidence": [ [ [ [ "Griffin (The Invisible Man)-11" ] ], [ [ "Picnic at Hanging Rock (novel)-25" ] ], [ "operation" ] ], [ [ [ "The Invisible Man-2" ] ], [ [ "Picnic at Hanging Rock (film)-11" ] ], [ "operation" ] ], [ [ [ "The Invisible Man-13" ], "no_evidence" ], [ [ "Picnic at Hanging Rock (film)-1" ] ], [ "operation" ] ] ] }, { "qid": "7a1c9d5ad2a66bf90240", "term": "Disco", "description": "music genre", "question": "Is ABBA's 1970's genre still relevant today?", "answer": true, "facts": [ "ABBA was a 1970's music group that specialized in Disco music.", "Pop artist Dua Lipa's 2020 album, Future Nostalgia, was described by Rolling Stone as , \"The Disco Liberation We Need.\"", "Magnetic Magazine released an article in 2020 entitled, \"The Best Disco and Funk Tracks of 2020.\"" ], "decomposition": [ "What genre of music did music group ABBA specialize in in the 1970's?", "Are #1 still relevant today?" ], "evidence": [ [ [ [ "ABBA-1" ] ], [ [ "Mamma Mia! Here We Go Again-1", "Mamma Mia! Here We Go Again-21" ] ] ], [ [ [ "ABBA-1" ] ], [ [ "Popular music-1" ], "operation" ] ], [ [ [ "ABBA-43" ] ], [ [ "Disco-5" ], "operation" ] ] ] }, { "qid": "a9238d2bba6ea325fb41", "term": "Cactus", "description": "Family of mostly succulent plants, adapted to dry environments", "question": " Is cactus fruit an important menu item for a restaurant based on Cuauhtémoc?", "answer": true, "facts": [ "The Aztecs cultivated cacti for the fruit", "Tenochtitlan was the capital of the Aztec empire", "Cuauhtémoc was the last king of Tenochtitlan " ], "decomposition": [ "Where city was Cuauhtémoc the king of?", "What empire was #1 the capital of?", "Did people in #2 eat cacti?" ], "evidence": [ [ [ [ "Cuauhtémoc-1" ] ], [ [ "Tenochtitlan-1" ] ], [ [ "Aztec Empire-8" ], "no_evidence" ] ], [ [ [ "Cuauhtémoc-1" ] ], [ [ "Tenochtitlan-1" ] ], [ [ "Cactus-61" ], "operation" ] ], [ [ [ "Cuauhtémoc-1" ] ], [ [ "Cuauhtémoc-1" ] ], [ [ "Aztec cuisine-4" ] ] ] ] }, { "qid": "b24d019c3f205bafdea5", "term": "Euphoria", "description": "mental and emotional condition in which a person experiences intense feelings of well-being, elation, happiness and excitement", "question": "Did Rumi spend his time in a state of euphoria?", "answer": true, "facts": [ "Euphoria is a state in which people experience intense feelings that overwhelm their body.", "Rumi was a 13th century Persian poet that was also a dervish.", "Dervishes participated in ceremonies in which they experienced religious ecstasy.", "Religious ecstasy is an altered state of consciousness characterized by visions and emotional (and sometimes physical) euphoria." ], "decomposition": [ "What religious practices did Rumi engage in?", "What emotional experiences are associated with #1?", "Is euphoria among #2?" ], "evidence": [ [ [ [ "Rumi-1", "Sufi whirling-1" ] ], [ [ "Sufi whirling-5" ] ], [ "operation" ] ], [ [ [ "Rumi-1" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Rumi-1" ] ], [ [ "Sufism-75" ] ], [ [ "Ecstasy (emotion)-8" ], "operation" ] ] ] }, { "qid": "43c636907c6b92d251d5", "term": "Silicon", "description": "Chemical element with atomic number 14", "question": "Is silicon important in California?", "answer": true, "facts": [ "There is a region in California called the Silicon Valley.", "Silicon Valley is home to a large number of technology corporations.", "Silicon Valley was originally named after the large number of corporations there that manufactured silicon-based circuit chips." ], "decomposition": [ "Which industrial area in California is named after silicon?", "What kind of companies are prevalent in #1?", "What kind of products do #2 make?", "Is silicon an important raw material for #3?" ], "evidence": [ [ [ [ "Silicon Valley-1" ] ], [ [ "Silicon Valley-38" ] ], [ [ "Silicon Valley-29" ] ], [ [ "Silicon Valley-2" ] ] ], [ [ [ "Silicon Valley-1" ] ], [ [ "Silicon Valley-2" ] ], [ [ "Silicon Valley-2" ] ], [ [ "Integrated circuit-1", "Transistor-48" ] ] ], [ [ [ "Silicon Valley-1" ] ], [ [ "Silicon Valley-2" ] ], [ [ "Transistor-48" ] ], [ [ "Semiconductor-1" ], "operation" ] ] ] }, { "qid": "f81316ea85357f58284b", "term": "Lord Voldemort", "description": "Fictional character of Harry Potter series", "question": "Would Lord Voldemort have been barred from Hogwarts under his own rules?", "answer": true, "facts": [ "Lord Voldemort wanted to rid the wizarding world of half blood wizards.", "Lord Volemort was born a half blood, part muggle part wizard." ], "decomposition": [ "What kinds of people did Lord Voldemort want to prohibit from Hogwarts?", "What was Lord Voldemort born as?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Lord Voldemort-4" ] ], [ [ "Lord Voldemort-4" ] ], [ "operation" ] ], [ [ [ "Lord Voldemort-2" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Lord Voldemort-2" ] ], [ [ "Lord Voldemort-33" ] ], [ "operation" ] ] ] }, { "qid": "9850eb1534206facd067", "term": "Central processing unit", "description": "Central component of any computer system which executes input/output, arithmetical, and logical operations", "question": "Would a modern central processing unit circuit chip fit on a housekey?", "answer": false, "facts": [ "A CPU circuit chip is about an inch across.", "A housekey is generally less than a half-inch across." ], "decomposition": [ "What is the size of a CPU Circuit chip across?", "How long is an average house key?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Central processing unit-20" ], "no_evidence" ], [ [ "Lock and key-15" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Integrated circuit-2" ], "no_evidence" ], [ [ "Lock and key-7" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "be3458c61d6f0deee9d8", "term": "Pancake", "description": "Thin, round cake made of eggs, milk and flour", "question": "Are pancakes a bad snack for cats?", "answer": true, "facts": [ "Pancakes contain the dairy product milk as one of the main ingredients.", "After 6 months cats lose the enzyme lactase that breaks down lactose, which makes them lactose intolerant.", "Cats that drink milk can suffer from upset stomach and vomiting." ], "decomposition": [ "What are the three major ingredients of pancake?", "Which substance do cats lose the ability to break down after six months?", "Does any of #1 contain #2?" ], "evidence": [ [ [ [ "Pancake-1" ] ], [ [ "Cat-59" ] ], [ "operation" ] ], [ [ [ "Pancake-64" ] ], [ [ "Cat-59" ] ], [ [ "Cat-59" ], "operation" ] ], [ [ [ "Pancake-1" ] ], [ [ "Cat-59" ] ], [ [ "Cat-59" ], "operation" ] ] ] }, { "qid": "4b38110513ff191805c1", "term": "Maroon 5", "description": "American pop punk band", "question": "Did Maroon 5 go on tour with Nirvana?", "answer": false, "facts": [ "Maroon 5 formed in 2001.", "Nirvana's final performances were in 1994." ], "decomposition": [ "When was Maroon 5 formed?", "What span of years was the rock band Nirvana active?", "Is #1 before or within #2?" ], "evidence": [ [ [ [ "Maroon 5-1" ] ], [ [ "Nirvana (band)-1", "Nirvana (band)-3" ] ], [ "operation" ] ], [ [ [ "Maroon 5-1" ] ], [ [ "Nirvana (band)-1", "Nirvana (band)-21" ] ], [ "operation" ] ], [ [ [ "Maroon 5-1" ] ], [ [ "Nirvana (band)-1", "Nirvana (band)-3" ] ], [ "operation" ] ] ] }, { "qid": "fb0f995e63ec3ba8836d", "term": "Lunch", "description": "meal, usually served at midday", "question": "Are all students guaranteed lunch at school in the US?", "answer": false, "facts": [ "Schools across the US have been struggling with school lunch debts.", "News articles have been published about students being turned away from the cafeteria due to outstanding debts for lunches." ], "decomposition": [ "How effectively have schools across the US been managing lunch debts?", "According to news articles, how are students with lunch debts treated at the cafeteria?", "Does 'excellently' describe #1 and #2?" ], "evidence": [ [ [ [ "National School Lunch Act-35" ], "no_evidence" ], [ [ "Poverty-56" ], "no_evidence" ], [ "operation" ] ], [ [ [ "School meal programs in the United States-36" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "School meal-72" ], "no_evidence" ], [ [ "Shooting of Philando Castile-48" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "22955a0a82aacacd37eb", "term": "Methane", "description": "Simplest organic molecule with one carbon atom and four hydrogen", "question": "Can methane be seen by the naked eye?", "answer": false, "facts": [ "Methane is a gas.", "Methane is colorless.", "Methane is odorless." ], "decomposition": [ "What is the color of methane?", "Can #1 gases be seen by the naked eye?" ], "evidence": [ [ [ [ "Methane-4" ] ], [ "operation" ] ], [ [ [ "Methane-4" ] ], [ "operation" ] ], [ [ [ "Methane-4" ] ], [ "operation" ] ] ] }, { "qid": "7ea958a52fc626ac5577", "term": "Evander Holyfield", "description": "American boxer", "question": "Would an Evander Holyfield 2020 boxing return set age record?", "answer": false, "facts": [ "Evander Holyfield will turn 58 years old at the end of 2020.", "Steve Ward holds the world's oldest boxer title at age 59." ], "decomposition": [ "How old will Evander Holyfield be at the end of 2020?", "What is the oldest age a boxer won a title bout?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Evander Holyfield-1" ] ], [ [ "Steve Ward (boxer)-1" ] ], [ "operation" ] ], [ [ [ "Evander Holyfield-1" ] ], [ [ "Bernard Hopkins-3" ] ], [ "operation" ] ], [ [ [ "Evander Holyfield-7" ] ], [ [ "Steve Ward (boxer)-1" ] ], [ "operation" ] ] ] }, { "qid": "e78288175362ef630c25", "term": "Alcatraz Island", "description": "United States historic place", "question": "Could an escapee swim nonstop from Alcatraz island to Siberia?", "answer": false, "facts": [ "Alcatraz Island was a San Francisco Bay prison.", "Siberia is over 5,217 miles away from San Francisco.", "The longest continuous swim record was 139 miles." ], "decomposition": [ "How far is Alcatraz from Siberia?", "How far is the record longest swim?", "Is #2 equal to or greater than #1?" ], "evidence": [ [ [ [ "Alcatraz Island-1", "Siberia-1" ], "no_evidence" ], [ [ "Veljko Rogošić-1" ] ], [ "operation" ] ], [ [ [ "Alcatraz Island-1", "Pacific Ocean-1", "Siberia-37" ], "no_evidence" ], [ [ "Long-distance swimming-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Veljko Rogošić-1" ] ], [ "operation" ] ] ] }, { "qid": "913c1014198f2e584dc7", "term": "Ice", "description": "water frozen into the solid state", "question": "Is there a popular Disney character made from living ice?", "answer": true, "facts": [ "Olaf is a popular character in Disney's Frozen series.", "Olaf is a snowman, accidentally enchanted to life by Elsa while she magically builds her ice tower." ], "decomposition": [ "Which popular Disney character did Elsa accidentally enchant to life while building her ice tower?", "Was #1 made of snow/ice?" ], "evidence": [ [ [ [ "Frozen (2013 film)-7" ] ], [ [ "Snowman-4" ], "operation" ] ], [ [ [ "Olaf (Frozen)-1" ] ], [ [ "Snowman-1" ] ] ], [ [ [ "Olaf (Frozen)-5" ] ], [ [ "Olaf (Frozen)-1" ], "operation" ] ] ] }, { "qid": "bf4bc8f3c5953306874d", "term": "Onion", "description": "vegetable", "question": "Would a blooming onion be possible with a shallot?", "answer": false, "facts": [ "A blooming onion is a dish for sharing, featuring a sliced and deep fried onion made to resemble petals.", "A shallot is very small and would only make a few \"petals\"" ], "decomposition": [ "What characteristics of onions are important when making blooming onion dish?", "Do the characteristics of shallot match with all of #1?" ], "evidence": [ [ [ [ "Blooming onion-1" ] ], [ [ "Shallot-1" ], "no_evidence", "operation" ] ], [ [ [ "Blooming onion-1" ], "no_evidence" ], [ [ "Shallot-7" ], "no_evidence", "operation" ] ], [ [ [ "Blooming onion-1" ] ], [ [ "Shallot-14" ], "no_evidence", "operation" ] ] ] }, { "qid": "82e32a0627566be76a90", "term": "Friday", "description": "day of the week", "question": "Did goddess Friday is named after despise felines?", "answer": false, "facts": [ "Felines are a species of animals that include lions, tigers, and domestic cats.", "Friday is named after the Norse goddess Freya. ", "Freya is often depicted in art with cats.", "Freya had two cats that pulled her magical chariot." ], "decomposition": [ "Which goddess is Friday named after?", "Which animals pulled #1's chariots?", "Are felines excluded from #2?" ], "evidence": [ [ [ [ "Friday-3" ] ], [ [ "Frigg-27" ] ], [ "operation" ] ], [ [ [ "Friday-3" ] ], [ [ "Venus (mythology)-1" ] ], [ [ "Venus (mythology)-1" ] ] ], [ [ [ "Friday-3" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c8ff0437780c4cb6d951", "term": "Gunpowder", "description": "explosive most commonly used as propellant in firearms", "question": "Did the Gunpowder plot eliminate Mary, Queen of Scots bloodline?", "answer": false, "facts": [ "Mary, Queen of Scots was the queen of Scotland whose rivalry with Elizabeth I led to her beheading.", "Mary, Queen of Scots son became King of England as James I.", "The Gunpowder plot was a 1605 plot to blow up Parliament and King James I.", "The Gunpowder plot failed and the conspirators were executed.", "King James I was succeeded by his son, Charles I of England." ], "decomposition": [ "In what year did the Gunpowder Plot happen?", "Of the descendants of Mary, Queen of Scots, what is the birth year of the person who died most recently?", "Is #1 within the range of #2?" ], "evidence": [ [ [ [ "Gunpowder Plot-1" ] ], [ [ "Arthur Stuart, 7th Earl Castle Stewart-1", "House of Stuart-1", "House of Stuart-4" ] ], [ "operation" ] ], [ [ [ "Gunpowder Plot-1" ] ], [ [ "James VI and I-1", "James VI and I-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Gunpowder Plot-2" ] ], [ [ "Mary, Queen of Scots-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "9205ac05975118d93717", "term": "Onion", "description": "vegetable", "question": "Does Sockington enjoy onions?", "answer": false, "facts": [ "Sockington is a domestic cat", "Onions can cause toxicity in cats by breaking down their red blood cells" ], "decomposition": [ "What kind of creature is Sockington?", "Are onions harmless to #1?" ], "evidence": [ [ [ [ "Sockington-1" ] ], [ [ "Hemolytic anemia-23" ], "operation" ] ], [ [ [ "Sockington-1" ] ], [ [ "Onion-25" ], "operation" ] ], [ [ [ "Sockington-1" ] ], [ [ "Onion-25" ] ] ] ] }, { "qid": "674c5dac91798a609ec8", "term": "Monarch", "description": "Person at the head of a monarchy", "question": "Would Hapshetsut be considered a monarch?", "answer": true, "facts": [ "A monarch is the head of a monarchy and was appointed for life.", "Hapshetsut was the second known female pharaoh.", "Pharaohs ruled for life and their wealth was even buried with them to take into the afterlife." ], "decomposition": [ "What kind of leader was Hatshepsut?", "What was the nature of the leadership of a #1?", "What is the nature of a monarch's rule?", "Is #2 very similar to #3?" ], "evidence": [ [ [ [ "Hatshepsut-1" ] ], [ [ "Pharaoh-1" ] ], [ [ "Monarch-1" ] ], [ [ "Monarch-1", "Pharaoh-1" ] ] ], [ [ [ "Hatshepsut-1" ] ], [ [ "Pharaoh-1", "Pharaoh-2" ] ], [ [ "Monarch-1" ] ], [ "operation" ] ], [ [ [ "Hatshepsut-1" ] ], [ [ "Pharaoh-5" ] ], [ [ "Monarch-1" ] ], [ "operation" ] ] ] }, { "qid": "55ce9ac27a4bd4e627dc", "term": "Rand Paul", "description": "American politician, ophthalmologist, and United States Senator from Kentucky", "question": "Can a New Yorker get their eyes checked by Rand Paul legally?", "answer": false, "facts": [ "Rand Paul is a senator from Kentucky.", "Rand Paul was an ophthalmologist in Kentucky with ABO certification.", "The National Board of Ophthalmology does not recognize ABO certification.", "Kentucky does not require ophthalmologists to be certified.", "NY ophthalmologists must have approved application for licensure certifications." ], "decomposition": [ "What certifications does NY require of ophthalmologists?", "Does Rand Paul have #1?" ], "evidence": [ [ [ [ "Ophthalmology-42" ], "no_evidence" ], [ [ "Rand Paul-12", "Rand Paul-13" ], "operation" ] ], [ [ [ "Ophthalmology-1" ] ], [ [ "Rand Paul-10" ] ] ], [ [ [ "Ophthalmology-41" ] ], [ "operation" ] ] ] }, { "qid": "08233b4241a011e86cea", "term": "Hulk", "description": "Superhero appearing in Marvel Comics publications and related media", "question": "Hypothetically, will an African elephant be crushed by Hulk on its back?", "answer": false, "facts": [ "The Hulk is a Marvel comics character.", "The Hulk is said to be around 8 feet tall and weigh around 1400 pounds.", "An African elephant can carry up to 9,000 kg, or 19,841 pounds." ], "decomposition": [ "How big is the Hulk?", "How much can an African elephant carry?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Hulk-1" ], "no_evidence" ], [ [ "African elephant-16" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Hulk-44" ] ], [ [ "African elephant-18" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Hulk-44" ] ], [ [ "African elephant-16" ] ], [ "operation" ] ] ] }, { "qid": "acb372237880dc80c4a1", "term": "Post Malone", "description": "American singer, rapper, songwriter, and record producer", "question": "Does Post Malone have a fear of needles?", "answer": false, "facts": [ "Post Malone's body is covered with many tattoos.", "The most common method of tattooing in modern times is the electric tattoo machine, which inserts ink into the skin via a single needle or a group of needles that are soldered onto a bar, which is attached to an oscillating unit." ], "decomposition": [ "What is Post Malone known for having on his body and face?", "Does getting #1 not involve the use of needles?" ], "evidence": [ [ [ [ "Post Malone-27" ] ], [ [ "Tattoo-49" ] ] ], [ [ [ "Post Malone-27" ] ], [ [ "Tattoo-51" ], "operation" ] ], [ [ [ "Post Malone-27" ] ], [ [ "Tattoo-51" ] ] ] ] }, { "qid": "7730e9a15ff9315c24fe", "term": "Spanish–American War", "description": "Conflict in 1898 between Spain and the United States", "question": "Did US President during Spanish-American War suffer similar demise to Abraham Lincoln?", "answer": true, "facts": [ "The Spanish-American War lasted from April 21, 1898 to August 13, 1898.", "William McKinley was President of the United States from March 4, 1897 to September 14, 1901.", "William McKinley died from gun related injuries after an assassination attempt.", "Abraham Lincoln died shortly after being shot by John Wilkes Booth." ], "decomposition": [ "What years were the Spanish–American War?", "Who was the US President during #1?", "How was #2 killed?", "How was Abraham Lincoln killed?", "Is #3 the same as #4?" ], "evidence": [ [ [ [ "Spanish–American War-1" ] ], [ [ "Spanish–American War-2" ] ], [ [ "William McKinley-1" ] ], [ [ "Abraham Lincoln-4" ] ], [ "operation" ] ], [ [ [ "Spanish–American War-1" ] ], [ [ "Spanish–American War-2" ] ], [ [ "William McKinley-1" ] ], [ [ "Maryland in the American Civil War-55" ] ], [ "operation" ] ], [ [ [ "Spanish–American War-1" ] ], [ [ "William McKinley-1" ] ], [ [ "Assassination of William McKinley-1" ] ], [ [ "Assassination of Abraham Lincoln-2" ] ], [ "operation" ] ] ] }, { "qid": "46b3927a42ca343d71c3", "term": "Suicide", "description": "Intentional act of causing one's own death", "question": "Would Modafinil be effective in completing a suicide?", "answer": false, "facts": [ "Modafinil is a powerful wakefulness drug, typically prescribed at 100mg or 200mg per day doses.", "Suicide attempts with up to 5,000mg of Modafinil have been unsuccessful. " ], "decomposition": [ "What is Modafinil?", "What are the effects of taking too much #1?", "Would someone who wanted to commit suicide want to #2?" ], "evidence": [ [ [ [ "Modafinil-1" ] ], [ [ "Modafinil-12" ] ], [ "operation" ] ], [ [ [ "Modafinil-1" ] ], [ [ "Modafinil-19" ] ], [ "operation" ] ], [ [ [ "Modafinil-1" ] ], [ [ "Modafinil-19" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c51fd5841358ded6eaec", "term": "Ten Commandments", "description": "Set of biblical principles relating to ethics and worship, which play a fundamental role in the Abrahamic religions", "question": "Were the Ten commandments the part of the bible that Jewish people do not believe in?", "answer": false, "facts": [ "The Jewish religion regards the Old Testament as their holy book.", "The New Testament of the bible is not acknowledged by Jewish religious people.", "The Ten Commandments are in the Old Testamanet." ], "decomposition": [ "What parts of the Bible do Jews not accept?", "What part of the Bible are the Ten Commandments in?", "Is #2 also listed in #1?" ], "evidence": [ [ [ [ "Old Testament-32" ] ], [ [ "Ten Commandments-50" ] ], [ "operation" ] ], [ [ [ "Christianity and Judaism-13" ] ], [ [ "Ten Commandments-1" ] ], [ "operation" ] ], [ [ [ "Old Testament-1" ] ], [ [ "Book of Exodus-12" ] ], [ "operation" ] ] ] }, { "qid": "63fb8f40ca6c2226dd01", "term": "Dr. Seuss", "description": "American children's writer and illustrator", "question": "Did the death of Helen Palmer have a significant effect on Dr. Seuss?", "answer": true, "facts": [ "Dr. Seuss's real name was Theodor Geisel.", "Theodor Geisel was married to Helen Palmer at the time of her suicide.", "Theodor Geisel is quoted having said he considered suicide after the death of his wife." ], "decomposition": [ "What relatives did Helen Palmer have when she died?", "What is Dr. Suess's real name?", "Is #2 one of #1?" ], "evidence": [ [ [ [ "Helen Palmer (author)-1" ] ], [ [ "Dr. Seuss-1" ] ], [ "operation" ] ], [ [ [ "Helen Palmer (author)-1", "Helen Palmer (author)-8" ], "no_evidence" ], [ [ "Dr. Seuss-1" ] ], [ "operation" ] ], [ [ [ "Helen Palmer (author)-1", "Helen Palmer (author)-8" ] ], [ [ "Helen Palmer (author)-1" ] ], [ "operation" ] ] ] }, { "qid": "06215d4b6f1a4b975d9c", "term": "Federal Reserve", "description": "Central banking system of the United States", "question": "Is the Federal Reserve a quick walk from Space Needle?", "answer": false, "facts": [ "The Federal Reserve building is headquartered in Washington, D.C.", "The Space Needle is located in Seattle, Washington.", "There are over 2700 miles from Seattle, Washington to Washington, D.C." ], "decomposition": [ "Where is the Space Needle located?", "Where is the headquarters of Federal Reserve located?", "Can the distance between #1 and #2 be covered quickly by walking?" ], "evidence": [ [ [ [ "Space Needle-1" ] ], [ [ "Marriner S. Eccles-8" ] ], [ "operation" ] ], [ [ [ "Space Needle-1" ] ], [ [ "Eccles Building-4" ] ], [ "no_evidence" ] ], [ [ [ "Space Needle-1" ] ], [ [ "Eccles Building-1" ] ], [ "operation" ] ] ] }, { "qid": "46428dac0ceefa1ad73b", "term": "Eric Clapton", "description": "English musician, singer, songwriter, and guitarist", "question": "Would Eric Clapton's mother hypothetically be unable to legally purchase cigarettes in the USA at his birth?", "answer": true, "facts": [ "Eric Clapton's mother was 16 years old at the time of his birth.", "As of 2020, federal law required states comply with a minimum age of 21 years for sale/purchase of tobacco products." ], "decomposition": [ "How old was Eric Clapton's mom when he was born?", "How old must you be to legally buy cigarettes in the USA?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Eric Clapton-4" ] ], [ [ "Cigarette-19" ] ], [ [ "Cigarette-19", "Eric Clapton-4" ], "operation" ] ], [ [ [ "Eric Clapton-4" ] ], [ [ "Tobacco 21-16" ] ], [ "operation" ] ], [ [ [ "Eric Clapton-4" ] ], [ [ "Cigarette-22" ] ], [ "operation" ] ] ] }, { "qid": "ee6e0bcf7be93da46dcc", "term": "Casio", "description": "Japanese electronics company", "question": "Could Casio's first invention be worn around the ankle?", "answer": false, "facts": [ "Casio's first invention was the yubiwa pipe.", "The yubiwa pipe was a ring worn that held a cigarette in place worn on the finger.", "Ankles are several inches thicker than fingers." ], "decomposition": [ "What was Casio's first invention?", "Where was #1 worn?", "What is the largest diameter of #2?", "What is the smallest diameter of ankle?", "Is #4 less than or equal to #3?" ], "evidence": [ [ [ [ "Casio-2" ] ], [ [ "Casio-2" ] ], [ [ "Ring (jewellery)-17" ], "no_evidence" ], [ [ "Ankle-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Casio-2" ] ], [ [ "Casio-2" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Casio-2" ] ], [ [ "Casio-2" ] ], [ [ "Hand-3" ], "no_evidence" ], [ [ "Ankle-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "8d6e3aeb1a0c7ea1d1d6", "term": "Cousin", "description": "any descendant of an ancestor's sibling", "question": "Does Zelda Williams have any cousins on her father's side?", "answer": false, "facts": [ "Robin Williams was the father of Zelda Williams. ", "Robin Williams was an only child.", "A cousin is the child of a parent's siblings. ", "Only children do not have siblings." ], "decomposition": [ "Who was Zelda Williams's father?", "Does #1 have any siblings?" ], "evidence": [ [ [ [ "Zelda Williams-1" ] ], [ [ "Robin Williams-4" ], "no_evidence", "operation" ] ], [ [ [ "Zelda Williams-2" ] ], [ [ "Robin Williams-4" ] ] ], [ [ [ "Marsha Garces Williams-1" ] ], [ [ "Robin Williams-39" ] ] ] ] }, { "qid": "de287a3a87a5ac5197ff", "term": "Toronto Star", "description": "Newspaper in Toronto, Ontario, Canada", "question": "Can someone sell their time through the Toronto Star?", "answer": true, "facts": [ "The Toronto Star has a classifieds section", "Readers can advertise their own labor or services and thus their time " ], "decomposition": [ "What section of the Toronto Star lists things for sale?", "Can someone's services or labor be sold in #1?" ], "evidence": [ [ [ [ "Toronto Star-31" ] ], [ [ "Classified advertising-1" ] ] ], [ [ [ "Toronto Star-31" ] ], [ [ "Classified advertising-1" ], "operation" ] ], [ [ [ "Toronto Star-31" ] ], [ "operation" ] ] ] }, { "qid": "dda75d90553b54b13562", "term": "Snickers", "description": "brand name chocolate bar made by Mars, Incorporated", "question": "Is it wise to feed a Snickers bar to a poodle?", "answer": false, "facts": [ "A Snickers bar contains chocolate.", "Chocolate is harmful or even toxic to dogs.", "Poodles are a breed of dog." ], "decomposition": [ "What are poodles a breed of?", "What substances are harmful to #1?", "What is a Snickers make out of?", "Is there no overlap between #2 and #3?" ], "evidence": [ [ [ [ "Poodle-1" ], "no_evidence" ], [ [ "Dog food-60" ], "no_evidence" ], [ [ "Snickers-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Poodle-1" ] ], [ [ "Dog-19" ] ], [ [ "Snickers-1" ] ], [ "operation" ] ], [ [ [ "Poodle-18" ], "operation" ], [ "no_evidence" ], [ [ "Snickers-3" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "ea3f5363f250cb9c9969", "term": "United Airlines", "description": "Airline in the United States", "question": "Does United Airlines have a perfect operation record?", "answer": false, "facts": [ "An airline with a perfect operation record has no crashes or other damaging incidents.", "United Airlines has had over 30 crash incidents over several decades." ], "decomposition": [ "What must an airline avoid if they want a perfect operation record?", "Is United Airlines free of #1?" ], "evidence": [ [ [ [ "Freedoms of the air-3" ], "no_evidence" ], [ [ "United Airlines-62" ], "no_evidence" ] ], [ [ "no_evidence" ], [ [ "United Airlines-62" ], "no_evidence", "operation" ] ], [ [ [ "Incident management (ITSM)-1" ], "no_evidence" ], [ [ "United Airlines-62" ], "operation" ] ] ] }, { "qid": "bb8107d755f91675e0a0", "term": "Sea otter", "description": "A species of marine mammal from the northern and eastern coasts of the North Pacific Ocean", "question": "Does Long John Silver's serve sea otter?", "answer": false, "facts": [ "Sea Otters are endangered marine mammals.", "Long John Silver's is an american fast food chain that serves seafood.", "Long John Silver's does not serve meat from mammals." ], "decomposition": [ "What classes of animals does Long John Silver's serve food from?", "To which class of animals does the sea otter belong?", "Is #2 listed in #1?" ], "evidence": [ [ [ [ "Long John Silver's-1" ] ], [ [ "Sea otter-10" ] ], [ "operation" ] ], [ [ [ "Long John Silver's-4" ] ], [ [ "Sea otter-10" ] ], [ [ "Sea otter-10" ] ] ], [ [ [ "Long John Silver's-1" ] ], [ [ "Sea otter-1" ] ], [ [ "Seafood-1" ], "operation" ] ] ] }, { "qid": "131abc946e1ea9ebc87c", "term": "Parachuting", "description": "action sport of exiting an aircraft and returning to Earth using a parachute", "question": "Would Matt Damon be afraid of parachuting?", "answer": true, "facts": [ "Parachuting involves jumping from high places or airplanes.", "Matt Damon is afraid of heights. " ], "decomposition": [ "What is Matt Damon afraid of?", "Does parachuting involve #1?" ], "evidence": [ [ [ [ "Matt Damon-1" ], "no_evidence" ], [ [ "Parachuting-1" ], "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "ed81a7d100d85c51ccae", "term": "Marlboro (cigarette)", "description": "cigarette brand", "question": "Are the colors on Marlboro package found on French flag?", "answer": false, "facts": [ "The colors of the Marlboro package are red, white, and black.", "The French flag has the colors red, white, and blue." ], "decomposition": [ "What are the colors of a Marlboro package?", "What are the colors of the French flag?", "Is #1 identical to #2?" ], "evidence": [ [ [ [ "Marlboro (cigarette)-12", "Marlboro (cigarette)-15", "Marlboro (cigarette)-17" ] ], [ [ "Flag of France-1" ] ], [ "operation" ] ], [ [ [ "Marlboro (cigarette)-17" ] ], [ [ "Flag of France-1" ] ], [ "operation" ] ], [ [ [ "Marlboro (cigarette)-17" ] ], [ [ "Flag of France-1" ] ], [ "operation" ] ] ] }, { "qid": "0765f1c9358d2ad938c7", "term": "Richard Wagner", "description": "German composer", "question": "Did Richard Wagner compose the theme songs for two television series?", "answer": false, "facts": [ "Richard Wagner died in 1883.", "Televisions started to be developed in the 1920s." ], "decomposition": [ "When did the television first become available?", "When did Richard Wagner die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Television-2" ] ], [ [ "Richard Wagner-1" ] ], [ "operation" ] ], [ [ [ "Television-2" ] ], [ [ "Richard Wagner-45" ] ], [ "operation" ] ], [ [ [ "History of television-141" ] ], [ [ "Ca' Vendramin Calergi-9" ] ], [ "operation" ] ] ] }, { "qid": "86a79365b44b937959f7", "term": "Börek", "description": "Stuffed phyllo pastry", "question": "Would Recep Tayyip Erdoğan be unfamiliar with börek?", "answer": false, "facts": [ "Turkey enjoys a wide variety of regional variations of börek among the different cultures and ethnicities composing it.", "Börek is very popular in the cuisines of the former Ottoman Empire, especially in North Africa and throughout the Balkans.", "Recep Tayyip Erdoğan is the current president of Turkey and he was born and raised there." ], "decomposition": [ "Where was Recep Tayyip Erdoğan born?", "In which regions is börek part of the normal cuisine?", "Is #1 not part of #2?" ], "evidence": [ [ [ [ "Istanbul-1", "Recep Tayyip Erdoğan-7" ] ], [ [ "Börek-1" ] ], [ "operation" ] ], [ [ [ "Recep Tayyip Erdoğan-7" ] ], [ [ "Börek-9" ] ], [ "operation" ] ], [ [ [ "Istanbul-1", "Recep Tayyip Erdoğan-7" ] ], [ [ "Börek-1" ] ], [ [ "Turkey-1" ], "operation" ] ] ] }, { "qid": "d2b83ee517ac51dc5dcc", "term": "Naruhito", "description": "Emperor of Japan", "question": "Are Naruhito's ancestors the focus of Romance of the Three Kingdoms?", "answer": false, "facts": [ "Naruhito is the Emperor of Japan.", "Romance of the Three Kingdoms was a 14th century historical novel about the Three Kingdoms Period.", "The Three Kingdoms Period was the division of China among the states of Wei, Shu, and Wu." ], "decomposition": [ "Where are the ancestors of Naruhito from?", "What country is the novel Romance of the Three Kingdoms set in?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Naruhito-3" ] ], [ [ "Romance of the Three Kingdoms-1" ] ], [ "operation" ] ], [ [ [ "Naruhito-1" ] ], [ [ "Romance of the Three Kingdoms-1" ] ], [ "operation" ] ], [ [ [ "Naruhito-3" ], "no_evidence" ], [ [ "Romance of the Three Kingdoms-1" ] ], [ "operation" ] ] ] }, { "qid": "f1fec95172ebcbdf1fb8", "term": "Felicity Huffman", "description": "American actress", "question": "Would Felicity Huffman vote for Mike DeWine?", "answer": false, "facts": [ "Mike DeWine is Governor of Ohio", "Felicity Huffman is a resident of California" ], "decomposition": [ "What elected office is held by Mike DeWine?", "What state is Mike DeWine #1 of?", "What state does Felicity Huffman live in?", "Are #2 and #3 the same state?" ], "evidence": [ [ [ [ "Mike DeWine-24" ] ], [ [ "Mike DeWine-24" ] ], [ [ "Felicity Huffman-24" ] ], [ [ "Felicity Huffman-24" ] ] ], [ [ [ "Mike DeWine-1" ] ], [ [ "Mike DeWine-1" ] ], [ [ "Felicity Huffman-24" ] ], [ "operation" ] ], [ [ [ "Mike DeWine-1" ] ], [ [ "Mike DeWine-1" ] ], [ [ "Felicity Huffman-24" ] ], [ "operation" ] ] ] }, { "qid": "72d1364ea31e9717f017", "term": "The Colbert Report", "description": "US satirical news commentary TV program", "question": "Would the host of The Colbert Report be likely to vote for Trump?", "answer": false, "facts": [ "The host of the Colbert report is Stephen Colbert.", "Stephen Colbert has spoken against Donald Trump multiple times on various platforms." ], "decomposition": [ "Who is the host of The Colbert Report show?", "Has #1 said more positive things than negative things about Trump?" ], "evidence": [ [ [ [ "The Colbert Report-1" ], "no_evidence" ], [ [ "Stephen Colbert-30" ], "no_evidence" ] ], [ [ [ "The Colbert Report-1" ] ], [ [ "Stephen Colbert-56" ], "no_evidence" ] ], [ [ [ "The Colbert Report-17" ] ], [ [ "Stephen Colbert-56" ] ] ] ] }, { "qid": "6a8f77c62222517534f5", "term": "Panic of 1907", "description": "three-week financial crisis in the United States", "question": "Was the father of social security system serving in the white house during the Panic of 1907?", "answer": false, "facts": [ "The father of social security system is Franklin D. Roosevelt. ", "Franklin D. Roosevelt was in Columbia Law School in 1907. " ], "decomposition": [ "Who is the father of the social security system?", "What position serves in the White House?", "When did #1 serve as #2?", "Is 1907 in the range of #3?" ], "evidence": [ [ [ [ "Franklin D. Roosevelt-3" ] ], [ [ "White House-1" ] ], [ [ "Franklin D. Roosevelt-1" ] ], [ "operation" ] ], [ [ [ "Edwin E. Witte-1" ] ], [ [ "Edwin E. Witte-12" ] ], [ [ "Edwin E. Witte-12" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Social Security (United States)-1" ] ], [ [ "White House-1" ] ], [ [ "Franklin D. Roosevelt-1" ] ], [ "operation" ] ] ] }, { "qid": "b995e9731d1488cc1241", "term": "Cancer", "description": "group of diseases", "question": "Do all cancer patients get disability?", "answer": false, "facts": [ "All forms of cancer qualify as diagnoses that can result in disability.", "Disability is not determined by diagnosis, but by degree of impairment.", "Some cancer patients do not experience major impairment." ], "decomposition": [ "What is disability determined by?", "Do all patients of cancer have the same degree of #1?" ], "evidence": [ [ [ [ "Disability-1" ] ], [ [ "Disability-4" ] ] ], [ [ [ "Disability-2" ], "no_evidence" ], [ [ "Cancer-99" ], "no_evidence", "operation" ] ], [ [ [ "Disability Determination Services-18" ] ], [ [ "Cancer rehabilitation-3" ], "operation" ] ] ] }, { "qid": "dae25ebc5462a52b5c17", "term": "Harry Potter and the Philosopher's Stone", "description": "1997 fantasy novel by J. K. Rowling", "question": "Did children read Harry Potter and the Philosopher's Stone during the Albanian Civil War?", "answer": true, "facts": [ "Harry Potter and the Philosopher's Stone was a 1997 children's fantasy book.", "The Albanian Civil War, also called the Albanian Civil Unrest, happened in 1997." ], "decomposition": [ "What year was Harry Potter and the Philosopher's Stone published?", "What year was the Albanian Civil War?", "Did #1 not after #2?" ], "evidence": [ [ [ [ "Harry Potter and the Philosopher's Stone-35" ] ], [ [ "Albanian Civil War-6" ], "no_evidence" ], [ [ "Albanian Civil War-6", "Harry Potter and the Philosopher's Stone-35" ], "operation" ] ], [ [ [ "Harry Potter and the Philosopher's Stone-2" ] ], [ [ "Albanian Civil War-1" ] ], [ "operation" ] ], [ [ [ "Harry Potter and the Philosopher's Stone-2" ] ], [ [ "Albanian Civil War-1" ] ], [ "operation" ] ] ] }, { "qid": "5801d9ee89c288ccb4b7", "term": "Arnold Schwarzenegger", "description": "Austrian-American actor, businessman, bodybuilder and politician", "question": "Would Arnold Schwarzenegger have a hard time picking up a red fox in 1967?", "answer": false, "facts": [ "In 1967, Schwarzenegger won the Munich stone-lifting contest, in which a stone weighing 508 German pounds (254 kg / 560 lb) is lifted between the legs while standing on two footrests.", "Red foxes weigh between 2.2–14 kg (5–31 lb)." ], "decomposition": [ "How much could Arnold Schwarzenegger life in 1967?", "What is the typical weight of a Red Fox?", "Is #2 more than #1?" ], "evidence": [ [ [ [ "Arnold Schwarzenegger-23" ] ], [ [ "Red fox-18" ] ], [ "operation" ] ], [ [ [ "Arnold Schwarzenegger-23" ] ], [ [ "Red fox-18" ] ], [ "operation" ] ], [ [ [ "Arnold Schwarzenegger-23" ] ], [ [ "Vulpes-1" ] ], [ "operation" ] ] ] }, { "qid": "497501e6d21674257ca5", "term": "Six-Day War", "description": "1967 war between Israel and Egypt, Jordan, and Syria", "question": "Could an NBA game be completed within the span of the Six-Day War?", "answer": true, "facts": [ "The Six-Day War took place between June 5th-June 10th, 1967.", "There are 24 hours in a day.", "An NBA game consists of four quarters that are 12 minutes long and a 15 minute long halftime.", "There are 60 minutes in an hour." ], "decomposition": [ "How long did the Six-day War last?", "How long does a basketball game last?", "Is #1 longer than #2?" ], "evidence": [ [ [ [ "Six-Day War-8" ] ], [ [ "Rules of basketball-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Six-Day War-1" ] ], [ [ "Basketball-32" ] ], [ "operation" ] ], [ [ [ "Six-Day War-1" ] ], [ [ "Basketball-32" ] ], [ "operation" ] ] ] }, { "qid": "c7a4b7ed963de3d9cd92", "term": "Mount Emei", "description": "mountain", "question": "Would it be difficult for Kami Rita to climb Mount Emei?", "answer": false, "facts": [ "Kami Rita has climbed Mount Everest 24 times.", "Mount Everest has an elevation of 8,848 m (29,029 ft).", "Mount Emei has an elevation of 3,099 metres (10,167 ft)." ], "decomposition": [ "What is the highest mountain Kami Rita has climbed?", "What is the height of #1?", "What is the height of Mount Emei?", "Is #3 greater or equal to #2?" ], "evidence": [ [ [ [ "Kami Rita-1" ] ], [ [ "Mount Everest-2" ] ], [ [ "Mount Emei-2" ] ], [ "operation" ] ], [ [ [ "Kami Rita-1" ] ], [ [ "Mount Everest-2" ] ], [ [ "Mount Emei-2" ] ], [ "operation" ] ], [ [ [ "Kami Rita-1" ] ], [ [ "Mount Everest-2" ], "no_evidence" ], [ [ "Mount Emei-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "72277c0ba2a0bcdaf739", "term": "Scottish independence", "description": "political aim for Scotland to be independent from the UK", "question": "Is Alistair Darling in favor of Scottish independence?", "answer": false, "facts": [ "Alistair Darling was the chair of the Better Together Campaign.", "Better Together was the principal campaign for a No vote in the 2014 Scottish independence referendum, advocating Scotland continuing to be part of the United Kingdom. " ], "decomposition": [ "What was the main purpose of the Better Together Campaign?", "Was #1 against independence of Scotland?", "Was Alistair Darling the chair of the campaign?", "Is #2 or #3 negative?" ], "evidence": [ [ [ [ "Better Together (campaign)-1" ] ], [ [ "Better Together (campaign)-2" ] ], [ [ "Better Together (campaign)-2" ] ], [ "operation" ] ], [ [ [ "Better Together (campaign)-1" ] ], [ "operation" ], [ [ "Alistair Darling-3" ] ], [ "operation" ] ], [ [ [ "Better Together (campaign)-1" ] ], [ "operation" ], [ [ "Better Together (campaign)-2" ] ], [ "operation" ] ] ] }, { "qid": "6f4c8a2789c0305c4f63", "term": "Maya Angelou", "description": "American poet, author, and civil rights activist", "question": "Did any of Maya Angelou's children follow in her footsteps?", "answer": true, "facts": [ "Maya Angelou was a civil rights activist and author.", "Maya Angelou had a son named Guy Johnson in 1945.", "Guy Johnson is an author that has written over twenty books and essays.", "Guy Johnson's books explore many civil rights themes." ], "decomposition": [ "What was Maya angelou's profession?", "Who is Maya Angelou's son?", "Did #2 do #1?" ], "evidence": [ [ [ [ "Maya Angelou-1" ] ], [ [ "Maya Angelou-25" ] ], [ [ "Maya Angelou-1", "Maya Angelou-25" ], "no_evidence" ] ], [ [ [ "Maya Angelou-1" ] ], [ [ "Maya Angelou-8" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Maya Angelou-1" ] ], [ [ "Maya Angelou-8" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f740fed84da8bd24301a", "term": "Sarah", "description": "Biblical character", "question": "Did Methuselah live at least 800 years as long as Sarah?", "answer": true, "facts": [ "The biblical Sarah lived to the age of 127.", "The biblical Methuselah lived to 969 years of age." ], "decomposition": [ "At what age did Methuselah die?", "At what age did Sarah die?", "What is the difference between #1 and #2?", "Is #3 at least 800?" ], "evidence": [ [ [ [ "Methuselah-1" ] ], [ [ "Sarah-11" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Methuselah-1" ] ], [ [ "Sarah-11" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Methuselah-1" ] ], [ [ "Sarah-11" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "e6d8397b258ac4006210", "term": "Rosemary", "description": "species of plant, rosemary", "question": "Is Rosemary outclassed as plant found in most song titles?", "answer": true, "facts": [ "Rosemary appears in a few popular song titles such as Love Grows (Where My Rosemary Goes) and Randy Newman's Rosemary.", "Rose appears in many song titles including: Kiss From a Rose, The Rose, Desert Rose, Beauty of the Rose, and I Never Promised You a Rose Garden." ], "decomposition": [ "How many songs have \"rosemary\" in the title?", "How many songs have the plant \"rose\" in the title?", "Is #1 fewer than #2?" ], "evidence": [ [ [ [ "Love Grows (Where My Rosemary Goes)-3" ], "no_evidence" ], [ [ "Blue Rose (song)-4", "Kiss from a Rose-1", "Lady Rose (song)-1", "The Rose (song)-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Love Grows (Where My Rosemary Goes)-1", "Rosemary Lane (song)-1" ] ], [ [ "Desert Rose (Sting song)-1", "Every Rose Has Its Thorn-1", "Kiss from a Rose-1" ] ], [ "operation" ] ], [ [ [ "Rosemary-23" ], "no_evidence" ], [ [ "Rose-25" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3336bf888443399593b9", "term": "Spanish–American War", "description": "Conflict in 1898 between Spain and the United States", "question": "Did Switzerland support the United States in the Spanish–American War?", "answer": false, "facts": [ "The Spanish–American War was an armed conflict between Spain and the United States in 1898.", "Switzerland avoids alliances that might entail military, political, or direct economic action and has been neutral since the end of its expansion in 1515.", "Its policy of neutrality was internationally recognised at the Congress of Vienna in 1815." ], "decomposition": [ "What is Switzerland's major policy in terms of foreign relations and international institutions?", "Considering #1, is it likely that Switzerland would have supported the US in the Spanish–American War?" ], "evidence": [ [ [ [ "Switzerland-2" ] ], [ [ "Neutral country-1" ], "operation" ] ], [ [ [ "Foreign relations of Switzerland-23" ] ], [ [ "Spanish–American War-1" ] ] ], [ [ [ "Swiss neutrality-4" ] ], [ "operation" ] ] ] }, { "qid": "bd06ae2cc099d8394c83", "term": "Bill Gates", "description": "American business magnate and philanthropist", "question": "Is Bill Gates the wealthiest of the Baby Boomers?", "answer": false, "facts": [ "The Baby Boomers are the generation born between the years 1946-1964.", "Bill Gates was born on October 28, 1955 and has a net worth of 108 billion as of 2020.", "Jeff Bezos was born on January 12, 1964 and has a net worth of 160 billion as of 2020." ], "decomposition": [ "Which of the present billionaires are baby boomers?", "Who is the wealthiest among #1?", "Is #2 the same as Bill Gates?" ], "evidence": [ [ [ [ "Baby boomers-1", "Bill Gates-1", "Jeff Bezos-1", "Jim Walton-1" ], "no_evidence" ], [ [ "Jeff Bezos-1" ] ], [ "operation" ] ], [ [ [ "Baby boomers-1", "Billionaire-3" ], "no_evidence" ], [ [ "Jeff Bezos-1" ] ], [ "operation" ] ], [ [ [ "Baby boomers-1", "Bill Gates-1", "Bill Gates-3", "Jeff Bezos-1" ], "no_evidence" ], [ [ "Jeff Bezos-1" ], "operation" ], [ "operation" ] ] ] }, { "qid": "b651b1d70b88ae98bc25", "term": "Ammonia", "description": "Chemical compound of nitrogen and hydrogen", "question": "Is it safe to use Ammonia with Clorox?", "answer": false, "facts": [ "Clorox is a brand name of a line of bleach products.", "Ammonia and bleach react together to produce toxic gas." ], "decomposition": [ "What is the main ingredient in Clorox?", "What happens when you mix ammonia and #1 together?", "Is #2 dangerous?" ], "evidence": [ [ [ [ "Sodium hypochlorite-46" ] ], [ [ "Sodium hypochlorite-3" ] ], [ "no_evidence" ] ], [ [ [ "Bleach-4", "Clorox-2" ] ], [ [ "Ammonia-72" ] ], [ "operation" ] ], [ [ [ "Sodium hypochlorite-2" ] ], [ [ "Sodium hypochlorite-3" ] ], [ "operation" ] ] ] }, { "qid": "e48be839783682257024", "term": "Tony Bennett", "description": "American singer", "question": "Did Tony Bennett have more children than he had wives?", "answer": true, "facts": [ "Tony Bennett had four children.", "Tony Bennet has had three wives." ], "decomposition": [ "How many children has Tony Bennett had?", "How many wives has Tony Bennett had?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Tony Bennett-13", "Tony Bennett-27" ] ], [ [ "Tony Bennett-13", "Tony Bennett-27", "Tony Bennett-43" ] ], [ "operation" ] ], [ [ [ "Tony Bennett-13", "Tony Bennett-27" ] ], [ [ "Tony Bennett-29", "Tony Bennett-43" ] ], [ "operation" ] ], [ [ [ "Sandra Grant Bennett-2" ], "no_evidence" ], [ [ "Tony Bennett-27" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "d21ae4c7ee26f90b9574", "term": "Greyhound", "description": "Dog breed used in dog racing", "question": "Would a greyhound be able to outrun a greyhound bus?", "answer": false, "facts": [ "A greyhound bus can travel speeds upward of 60 mph.", "A greyhound dog can run at speeds up to 45 mph." ], "decomposition": [ "What is the top speed of a Greyhound bus?", "What is the top speed of a greyhound dog?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Greyhound Lines-83", "Speed limits in the United States-1" ], "no_evidence" ], [ [ "Greyhound-3" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Greyhound-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Greyhound Lines-17", "MCI 102DL3 & D4500-4" ], "no_evidence" ], [ [ "Greyhound-3" ] ], [ "operation" ] ] ] }, { "qid": "84235f18202b0fe962f2", "term": "Bonanza", "description": "1959-1973 American western/cowboy television series", "question": "Would Bonanza marathon end before WWE Heat marathon?", "answer": false, "facts": [ "Bonanza had a total of 431 episodes.", "WWE Heat had a total of 513 episodes.", "The average run time of WWE Heat was 45 minutes.", "The average run time of Bonanza was 49 minutes." ], "decomposition": [ "How many episodes exist of Bonanza?", "How many episodes exist of WWE Heat?", "How long is each episode of Bonanza?", "How long is each episode of WWE Heat?", "Is #1 times #3 less than #2 times #4?" ], "evidence": [ [ [ [ "Bonanza-30" ] ], [ [ "WWE Heat-11" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Bonanza-30" ] ], [ [ "WWE Heat-3" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bonanza-1" ] ], [ [ "WWE Heat-11" ] ], [ [ "Bonanza-19", "Bonanza-33" ], "no_evidence" ], [ [ "WWE Heat-4" ] ], [ "operation" ] ] ] }, { "qid": "e26a5601edb90a738d56", "term": "Futurama", "description": "American animated sitcom for the Fox Broadcasting Company and Comedy Central", "question": "Will Futurama surpass the number of episodes of The Simpsons by the end of 2020?", "answer": false, "facts": [ "Futurama was cancelled in 2013.", "The Simpsons is still creating new episodes as of May 2020.", "Futurama aired 140 total episodes.", "The Simpsons has aired over 600 episodes." ], "decomposition": [ "How many episodes of Futurama have been produced to date?", "How many episodes of the Simpsons has been produced to date?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Meanwhile (Futurama)-1" ] ], [ [ "The Simpsons-3" ] ], [ "operation" ] ], [ [ [ "Futurama-2", "Futurama-3" ], "no_evidence" ], [ [ "History of The Simpsons-4" ] ], [ "operation" ] ], [ [ [ "Futurama (season 1)-2" ], "no_evidence" ], [ [ "History of The Simpsons-4" ] ], [ "operation" ] ] ] }, { "qid": "ca296dafd5943d07b01c", "term": "Ariana Grande", "description": "American singer, songwriter, and actress", "question": "Was Ariana Grande inspired by Imogen Heap?", "answer": true, "facts": [ "Ariana Grande's song 'Goodnight And Go' uses a sample from a track of the same name.", "\"Goodnight and Go\" is originally an Imogen Heap song." ], "decomposition": [ "Who was the original singer of Ariana Grande's cover 'Goodnight and Go'", "Is #1 Imogen Heap?" ], "evidence": [ [ [ [ "Goodnight and Go-1", "Sweetener (song)-1" ] ], [ "operation" ] ], [ [ [ "Goodnight and Go-1" ] ], [ "operation" ] ], [ [ [ "Goodnight and Go-1" ] ], [ "operation" ] ] ] }, { "qid": "c4d0ceb86bb1e8faba5c", "term": "Communist Party of the Soviet Union", "description": "Ruling political party of the Soviet Union", "question": "Can the Communist Party of the Soviet Union get a perfect all kill?", "answer": false, "facts": [ "The Communist Party of the Soviet Union is a political party", "A perfect all kill occurs when a South Korean recording artist hits number one simultaneously on every music chart" ], "decomposition": [ "Who can get a perfect all kill?", "Is the Communist Party of the Soviet Union a kind of #1?" ], "evidence": [ [ [ [ "Lisa (rapper)-5" ] ], [ [ "Communist Party of the Soviet Union-1" ], "operation" ] ], [ [ [ "Whistle (Blackpink song)-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Love Scenario-4", "Musical ensemble-1", "Song-1" ] ], [ [ "Communist Party of the Soviet Union-1" ] ] ] ] }, { "qid": "517614ab4adf946dbe20", "term": "Eve", "description": "Biblical figure", "question": "Was Eve involved in an incestuous relationship?", "answer": true, "facts": [ "God made Eve from a bone he removed from Adam.", "Since Eve was made from Adam, they would have had similar DNA and been considered twins or at least siblings.", "As the only humans at the time, they ended up starting a family together." ], "decomposition": [ "Who did Eve have intercourse with?", "How was Eve related to #1?", "Can it be concluded that they are family based on #2?" ], "evidence": [ [ [ [ "Incest-18" ] ], [ [ "Incest-18" ] ], [ "operation" ] ], [ [ [ "Eve-13" ] ], [ [ "Eve-2" ] ], [ "operation" ] ], [ [ [ "Adam and Eve-2" ] ], [ [ "Adam and Eve-2" ] ], [ [ "Adam and Eve-2" ] ] ] ] }, { "qid": "cf01f73739a3d1fdb0e6", "term": "Cantonese", "description": "Standard dialect of Yue language that originated in the vicinity of Guangzhou (Canton) in southern China", "question": "Did George W. Bush grow up speaking Cantonese?", "answer": false, "facts": [ "George Bush grew up primarily in Texas.", "English is the primary language spoken in Texas.", "Cantonese is the primary language spoken in certain parts of China." ], "decomposition": [ "Where did George Bush spend the first few years of his life?", "Which language is primarily spoken in #1?", "Is #2 the same as Cantonese?" ], "evidence": [ [ [ [ "George W. Bush-5" ] ], [ [ "United States-80" ] ], [ "operation" ] ], [ [ [ "George W. Bush Childhood Home-1" ] ], [ [ "Texas-146" ] ], [ "operation" ] ], [ [ [ "George W. Bush-5" ] ], [ [ "Texas-61" ] ], [ "operation" ] ] ] }, { "qid": "1331f2020fafcdb794c6", "term": "Thesis", "description": "document submitted in support of candidature for an academic degree", "question": "Would a thesis paper be unusual to assign to kindergartners? ", "answer": true, "facts": [ "Kindergartners are usually between 4 and 6 years of age.", "Kindergartners are tasked with learning the alphabet and how to write their own names." ], "decomposition": [ "What skill set is required to create a thesis paper?", "What skill set do kindergartners possess?", "Are all the skills in #1 also found in #2?" ], "evidence": [ [ [ [ "Thesis-1" ], "no_evidence" ], [ [ "Kindergarten-89" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Thesis-15" ] ], [ [ "Kindergarten-29" ] ], [ [ "Kindergarten-29" ], "operation" ] ], [ [ [ "Thesis-17" ] ], [ [ "Cognitive development-23", "Kindergarten-29" ] ], [ [ "Cognitive development-23" ], "operation" ] ] ] }, { "qid": "133364fbf7e41b0f2b38", "term": "Henry Ford", "description": "American businessperson", "question": "Do people still see Henry Ford's last name often?", "answer": true, "facts": [ "Henry Ford is the founder of Ford Motor Company.", "Every Ford vehicle still bears Henry's last name on the brand logo." ], "decomposition": [ "What company did Henry Ford create?", "What does #1 produce? ", "Does #2 have Henry's name on it?" ], "evidence": [ [ [ [ "Henry Ford-1" ] ], [ [ "Ford Motor Company-1" ] ], [ "operation" ] ], [ [ [ "Henry Ford Company-1" ] ], [ [ "Henry Ford Company-1" ] ], [ [ "Henry Ford Company-1" ] ] ], [ [ [ "Henry Ford-14" ] ], [ [ "Henry Ford-14" ] ], [ [ "Henry Ford-14" ], "operation" ] ] ] }, { "qid": "a63e6bc5afd5080b5a70", "term": "Harvey Milk", "description": "American politician who became a martyr in the gay community", "question": "Could a cow produce Harvey Milk?", "answer": false, "facts": [ "Harvey Milk was a human being.", "Cows are not human beings.", "Only human beings can produce offspring which are also human beings." ], "decomposition": [ "What products can be derived from cows?", "Is Harvey Milk a kind of any of #1?" ], "evidence": [ [ [ [ "Cattle-2" ] ], [ [ "Harvey Milk-1" ], "operation" ] ], [ [ [ "Milk-32" ] ], [ [ "Harvey Milk-1" ], "operation" ] ], [ [ [ "Cattle-2" ] ], [ "operation" ] ] ] }, { "qid": "7571100f05bc56919c78", "term": "Funeral", "description": "ceremony for a person who has died", "question": "Is it unusual to play Happy hardcore music at a funeral?", "answer": true, "facts": [ "Happy hardcore is a music genre of hard dance.", "Happy hardcore emerged both from the UK breakbeat hardcore rave scene, and Belgian, German and Dutch hardcore techno scenes.", "A funeral is traditionally a somber event.", "Funerals typically do not involve dancing.", "Raves are typically energetic and upbeat places and are not somber like a funeral." ], "decomposition": [ "What type of music is usually played at funerals?", "What are the characteristics of Happy Hardcore music?", "Do any of #1 have the characteristics of #2?" ], "evidence": [ [ [ [ "Funeral-8" ] ], [ [ "Happy hardcore-1" ] ], [ "operation" ] ], [ [ [ "Dirge-1" ] ], [ [ "Happy hardcore-1", "Happy hardcore-7" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Funeral march-1" ] ], [ [ "Happy hardcore-2" ] ], [ [ "Funeral march-1" ] ] ] ] }, { "qid": "fcc9fc36ed71bd5d5723", "term": "Lil Wayne", "description": "American rapper, record executive and actor from Louisiana", "question": "Lil Wayne similar real name rapper has over quadruple Wayne's Grammy awards?", "answer": true, "facts": [ "Lil Wayne was born Dwayne Michael Carter.", "Jay-Z was born Shawn Corey Carter.", "Lil Wayne has won 5 Grammy awards.", "Jay-Z has won 22 Grammy awards." ], "decomposition": [ "What is Lil Wayne's real name?", "What rapper has a real name that is similar to #1?", "How many Grammy awards does Lil Wayne have?", "How many Grammy awards does #2 have?", "Is #4 divided by #3 greater than 4?" ], "evidence": [ [ [ [ "Lil Wayne-1" ] ], [ [ "Jay-Z-1" ] ], [ [ "Lil Wayne-4" ] ], [ [ "Jay-Z-4" ] ], [ "operation" ] ], [ [ [ "Lil Wayne-1" ] ], [ [ "Jay-Z-1" ] ], [ [ "Lil Wayne-4" ] ], [ [ "Jay-Z-4" ] ], [ "operation" ] ], [ [ [ "Lil Wayne-1" ] ], [ [ "Jay-Z-1" ] ], [ [ "Lil Wayne-4" ] ], [ [ "Jay-Z-4" ] ], [ "operation" ] ] ] }, { "qid": "3d0f74e6e517d232fe66", "term": "Communist Party of the Soviet Union", "description": "Ruling political party of the Soviet Union", "question": "Would Communist Party of the Soviet Union hypothetically support Trickle Down Economics?", "answer": false, "facts": [ "The Communist Party of the Soviet Union believed in the main aspects of Communism.", "Communism believes that no private ownership of property should be allowed. ", "Trickle Down Economics, popularized by Ronald Reagan, involved rich businesses getting tax breaks so they could supposedly pass the wealth to the poor.", "Trickle Down Economics required rich business owners to have control over the means of production and property." ], "decomposition": [ "Under Leninism, which class led the Communist Party of the Soviet Union?", "Under Trickle-down economics, which economic class gains wealth and power?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Leninism-1" ] ], [ [ "Trickle-down economics-1" ] ], [ "operation" ] ], [ [ [ "Dictatorship of the proletariat-24" ] ], [ [ "Trickle-down economics-7" ] ], [ "operation" ] ], [ [ [ "Leninism-1" ] ], [ [ "Trickle-down economics-1" ] ], [ "operation" ] ] ] }, { "qid": "fd67eec2cbde837f5096", "term": "Brazilian jiu-jitsu", "description": "martial art focusing on grappling and ground fighting, originally based on Kodokan judo newaza taught by Japanese judoka, that developed independently in Brazil from experimentation and adaptation by Carlos and Hélio Gracie, Luiz França, et al.", "question": "Could a white belt defeat Jon Jones in a Brazilian jiu-jitsu match?", "answer": false, "facts": [ "A white belt is the lowest ranking in Brazilian jiu-jitsu.", "Jon Jones has a purple belt in Brazilian jiu-jitsu under Roberto Alencar.", "A purple belt is the second highest ranking in Brazilian jiu-jitsu.", "Jon Jones is one of the greatest combat sports athletes to ever live." ], "decomposition": [ "What color belt does Jon Jones have in Brazilian jiu-jitsu?", "In belt color ranking in Brazilian jiu-jitsu, where is #1? ", "In belt color ranking in Brazilian jiu-jitsu, where is white belt?", "Is #2 higher than #3?" ], "evidence": [ [ [ [ "Jon Jones-2" ], "no_evidence" ], [ [ "Brazilian jiu-jitsu-39" ] ], [ [ "Brazilian jiu-jitsu-39" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Brazilian jiu-jitsu ranking system-5" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Jon Jones-1" ], "no_evidence" ], [ [ "Black belt (martial arts)-9" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f286dde2dc9fae70eba9", "term": "Bull shark", "description": "Species of fish", "question": "Does bull shark bite hurt worse than crocodile bite?", "answer": false, "facts": [ "The bull shark has the highest weight for weight bite of all cartilaginous fish at 5,914 newtons.", "Crocodile slam their jaws shut with 3,700 pounds per square inch (psi), or 16,460 newtons, of bite force." ], "decomposition": [ "What is the weight bite of a bull shark?", "What is the weight bite of a Crocodile?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Bull shark-9" ] ], [ [ "Crocodile-24" ] ], [ "operation" ] ], [ [ [ "Bull shark-9" ] ], [ [ "Crocodile-24" ] ], [ "operation" ] ], [ [ [ "Bull shark-9" ], "operation" ], [ [ "Crocodile-24" ] ], [ "operation" ] ] ] }, { "qid": "bb8fd1c42fa213c158b5", "term": "1936 Summer Olympics", "description": "games of the XI Olympiad, celebrated in Berlin in 1936", "question": "Is 1936 Summer Olympics venue too small for a Superbowl crowd?", "answer": false, "facts": [ "The 1936 Summer Olympics was held at the Olympiastadion Berlin.", "The Olympiastadion Berlin has a capacity of over 74,000 people.", "The 2020 Superbowl was attended by 62,417 fans." ], "decomposition": [ "At which venue did the 1936 Summer Olympics take place?", "What is the spectator capacity of #1?", "How many people attended the 2020 Superbowl?", "Is #3 greater than #2?" ], "evidence": [ [ [ [ "1936 Summer Olympics-21" ] ], [ [ "1936 Summer Olympics-2" ] ], [ [ "Super Bowl LIV-53" ] ], [ "operation" ] ], [ [ [ "Olympiastadion (Berlin)-1" ] ], [ [ "Olympiastadion (Berlin)-1" ] ], [ [ "Super Bowl LIV-4" ] ], [ [ "Hard Rock Stadium-22" ], "operation" ] ], [ [ [ "Olympiastadion (Berlin)-1" ] ], [ [ "Olympiastadion (Berlin)-2" ] ], [ [ "Hard Rock Stadium-22", "Super Bowl LIV-2" ] ], [ "operation" ] ] ] }, { "qid": "5562e0c2c63d85105bab", "term": "Dr. Seuss", "description": "American children's writer and illustrator", "question": "Was Dr. Seuss a liar?", "answer": true, "facts": [ "Dr. Seuss was a writer and illustrator of children's books", "Dr. Seuss first published a children's book under the name of Dr. Seuss in 1937", "Dr. Seuss did not actually have a doctorate or equivalent degree until 1956" ], "decomposition": [ "When did Dr. Seuss first use the title \"Dr.\"?", "When did he get his doctorate (or equivalent)?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Dr. Seuss-2" ] ], [ [ "Dr. Seuss-18" ] ], [ "operation" ] ], [ [ [ "Dr. Seuss-8" ] ], [ [ "Dr. Seuss-2" ] ], [ "operation" ] ], [ [ [ "Dr. Seuss-2" ] ], [ [ "Dr. Seuss-18" ] ], [ "operation" ] ] ] }, { "qid": "b09403162818b33457eb", "term": "Christopher Columbus", "description": "Italian explorer, navigator, and colonizer", "question": "Did Christopher Columbus go to Antarctica? ", "answer": false, "facts": [ "Between 1492 and 1503, Columbus completed four round-trip voyages between Spain and the Americas.", " His expeditions, sponsored by the Catholic Monarchs of Spain, were the first European contact with the Caribbean, Central America, and South America.", "Antarctica is Earth's southernmost continent." ], "decomposition": [ "Which areas did Christopher Columbus visit in his voyages?", "Is Antarctica one of #1?" ], "evidence": [ [ [ [ "Christopher Columbus-1" ] ], [ [ "Antarctica-3" ], "operation" ] ], [ [ [ "Christopher Columbus-1" ] ], [ "operation" ] ], [ [ [ "Christopher Columbus-1" ] ], [ "operation" ] ] ] }, { "qid": "3756f2a4b805deba01b6", "term": "Breakdancing", "description": "Style of street dance", "question": "Is breakdancing safe for people with tendonitis?", "answer": false, "facts": [ "Tendonitis is a condition where the joints are inflamed.", "Strong motions in joints suffering from tendonitis can result in damage to nerves.", "Breakdancing is a style of dance that involves many vigorous motions.", "The downrock breakdancing maneuver involves balancing the body weight on the floor using one arm." ], "decomposition": [ "What are the symptoms of tendonitis?", "Which kind of movements are involved in breakdancing?", "Are #2 safe when experiencing #1?" ], "evidence": [ [ [ [ "Tendinopathy-1" ], "no_evidence" ], [ [ "Breakdancing-27" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Tendinopathy-1" ] ], [ [ "Breakdancing-1", "Power move-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Tendinopathy-4" ] ], [ [ "Breakdancing-27" ] ], [ [ "Tendinopathy-5" ], "operation" ] ] ] }, { "qid": "bc8bcfe04e4b026e9c96", "term": "Louvre", "description": "Art museum and Historic site in Paris, France", "question": "Is the Louvre's pyramid known for being unbreakable? ", "answer": false, "facts": [ "The Pyramid at the Louvre is made of glass and metal.", "The Louvre Pyramid glass is 10mm thick.", "10mm thick glass is not unbreakable." ], "decomposition": [ "What materials is the Louvre made of?", "Are all the materials in listed in #1 unbreakable?" ], "evidence": [ [ [ [ "Louvre Pyramid-1" ] ], [ [ "Glass-19" ] ] ], [ [ [ "Louvre Pyramid-1" ] ], [ [ "Glass-2" ], "operation" ] ], [ [ [ "Louvre Pyramid-3" ] ], [ [ "Mohs scale of mineral hardness-4", "Strength of glass-5" ], "no_evidence" ] ] ] }, { "qid": "959fb200fccf056f00d7", "term": "Naruto", "description": "Japanese manga and anime series", "question": "Did Naruto escape the Temple of Doom?", "answer": false, "facts": [ "Naruto is a character in a Japanese anime and manga about ninjas", "The Temple of Doom is a setting from an Indiana Jones movie" ], "decomposition": [ "Which country was the movie Indiana Jones and the Temple of Doom set in?", "What is the setting of manga that features Naruto?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Indiana Jones and the Temple of Doom-4" ] ], [ [ "Manga-1" ] ], [ "operation" ] ], [ [ [ "Indiana Jones and the Temple of Doom-4" ] ], [ [ "Naruto-1" ] ], [ "operation" ] ], [ [ [ "Indiana Jones-8" ] ], [ [ "Naruto-16" ] ], [ "operation" ] ] ] }, { "qid": "fba624d8c01833419760", "term": "August", "description": "eighth month in the Julian and Gregorian calendars", "question": "Is August a winter month for part of the world?", "answer": true, "facts": [ "August is a summer month in the northern hemisphere.", "However, the seasons are opposite south of the Equator.", "August is in the middle of winter for Australia, Antarctica, and parts of Africa and South America." ], "decomposition": [ "What season is August a part of in the northern hemisphere?", "Does #1 correspond to winter south of the Equator?" ], "evidence": [ [ [ [ "Summer-2" ] ], [ [ "Summer-1" ] ] ], [ [ [ "Summer-1" ] ], [ [ "Summer-1" ] ] ], [ [ [ "August-3" ] ], [ [ "Winter-1" ], "operation" ] ] ] }, { "qid": "13043933149b7952472f", "term": "Saddam Hussein", "description": "Iraqi politician and President", "question": "Would Saddam Hussein hypothetically choose Saladin as ally over Idris I?", "answer": true, "facts": [ "Saddam Hussein, President of Iraq, was a Sunni Muslim that brutalized many Shiite Muslims.", "Saladin was the first Sultan of Egypt and was a Sunni Muslim.", "Idris I was called the founder of Morocco and was a Shiite Muslim." ], "decomposition": [ "Which denomination of Islam did Saddam Hussein identify with?", "Which Islamic denomination did Saladin belong to?", "Which Islamic denomination did Idris I belong to?", "Does #1 match #2 and contrast with #3?" ], "evidence": [ [ [ [ "Saddam Hussein-2" ] ], [ [ "Saladin-1" ] ], [ [ "Idris I of Morocco-1" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Saddam Hussein-1", "Saddam Hussein-54" ] ], [ [ "Saladin-1" ] ], [ [ "Idris I of Morocco-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Saddam Hussein-2" ] ], [ [ "Saladin-1" ] ], [ [ "Zaidiyyah-1", "Zaidiyyah-22" ] ], [ "operation" ] ] ] }, { "qid": "007e8eb724fccca37c36", "term": "Apollo", "description": "God in Greek mythology", "question": "Do Apollo and Baldur share similar interests?", "answer": true, "facts": [ "Apollo is a Greek god of light.", "Baldur is a Norse god of light.", "They are both interested in light." ], "decomposition": [ "Apollo is the Greek god of what object?", "What is Baldur the Norse god of?", "Is the item in #2 the same as #1?" ], "evidence": [ [ [ [ "Apollo-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Apollo-1" ] ], [ [ "Baldr-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Apollo-1" ] ], [ [ "Baldr-7" ] ], [ "operation" ] ] ] }, { "qid": "3157a0f428531ebed7b9", "term": "Minor League Baseball", "description": "hierarchy of professional baseball leagues affiliated with Major League Baseball", "question": "Are any minor league baseball teams named after felines?", "answer": true, "facts": [ "Felines include cats, tigers, and lions.", "The Sacramento River Cats are a minor league baseball affiliate of the San Francisco Giants.", "The Tri-City Valley Cats are a minor league baseball affiliate of the Houston Astros.", "The Lakeland Flying Tigers are a minor league baseball affiliate of the Detroit Tigers." ], "decomposition": [ "What are the names of teams in Minor League Baseball?", "Which animals are regarded as felines?", "Does any of #1 include any of #2?" ], "evidence": [ [ [ [ "Lynchburg Hillcats-1" ], "no_evidence" ], [ [ "Felinae-1" ] ], [ "operation" ] ], [ [ [ "Minor League Baseball-1" ], "no_evidence" ], [ [ "Felidae-1" ] ], [ [ "New Hampshire Fisher Cats-1" ], "operation" ] ], [ [ [ "New Hampshire Fisher Cats-1", "Sacramento River Cats-1" ], "no_evidence" ], [ [ "Felidae-1" ] ], [ "operation" ] ] ] }, { "qid": "425324eb79a7fc1916ef", "term": "Art Deco", "description": "Influential visual arts design style which first appeared in France during the 1920s", "question": "Did Andy Warhol influence Art Deco style?", "answer": false, "facts": [ "Art Deco is a visual style that first appeared in the 1920s.", "Andy Warhol was born in 1928.", "Andy Warhol started drawing when he was in third grade." ], "decomposition": [ "When did Art Deco first appear as a visual style?", "When was Andy Warhol born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Art Deco-9" ] ], [ [ "Andy Warhol-4" ] ], [ "operation" ] ], [ [ [ "Art Deco-1" ] ], [ [ "Andy Warhol-1" ] ], [ "operation" ] ], [ [ [ "Art Deco-9" ] ], [ [ "Andy Warhol-1" ] ], [ "operation" ] ] ] }, { "qid": "49321f6bd311e10c2634", "term": "Rupert Murdoch", "description": "Australian-born American media mogul", "question": "Would Dante Alighieri hypothetically place Rupert Murdoch in 8th Circle of Hell?", "answer": true, "facts": [ "Dante Alighieri was an Italian poet that wrote Inferno.", "Inferno depicts several layers of Hell.", "The 8th Circle of Hell is reserved for liars, bribers, flatterers, and false prophets.", "Rupert Murdoch was involved in a News International scandal in which police were bribed and phones were hacked.", "Rupert Murdoch refused to take any responsibility for the actions of his employees in the News International scandal." ], "decomposition": [ "Which of Dante Alighieri's works describes hell?", "According to #1, which sins would cause one to be placed in the 8th circle of hell?", "Has Rupert Murdoch come under allegations of any of #2?" ], "evidence": [ [ [ [ "Divine Comedy-2" ] ], [ [ "Malebolge-2" ] ], [ [ "Rupert Murdoch-31" ], "no_evidence" ] ], [ [ [ "Inferno (Dante)-1" ] ], [ [ "Inferno (Dante)-57" ] ], [ [ "News International phone hacking scandal-1" ], "no_evidence", "operation" ] ], [ [ [ "Inferno (Dante)-45" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "4e1b65e81ec09397b26e", "term": "Giant squid", "description": "Deep-ocean dwelling squid in the family Architeuthidae", "question": "Is capturing giant squid in natural habitat impossible with no gear?", "answer": true, "facts": [ "Giant squids live between 1,000 and 3,800 feet in the ocean.", "With a dry dive suit, a scuba tank, gloves, and so on, divers can reach depths of around 1000 feet.", "Without scuba gear people can safely dive a maximum of 60 feet without feeling the negative risks associated with diving beyond the limit." ], "decomposition": [ "At what depths do giant squid live?", "What is the max depth a person can safely dive without gear?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Giant squid-5" ] ], [ [ "Freediving-47" ] ], [ "operation" ] ], [ [ [ "Giant squid-5" ] ], [ [ "Scuba diving-65" ] ], [ "operation" ] ], [ [ [ "Giant squid-5" ] ], [ [ "Underwater diving-5" ] ], [ "operation" ] ] ] }, { "qid": "ce2336a5272765f263c4", "term": "Harry Potter and the Philosopher's Stone", "description": "1997 fantasy novel by J. K. Rowling", "question": "Would characters in Harry Potter and the Philosopher's Stone be persecuted as pagans?", "answer": true, "facts": [ "Pagans are defined as people that hold beliefs other than those of the major world religions (Christianity, Islam, and Judaism).", "The characters in Harry Potter and the Philosopher's Stone practice magic.", "Islam explicitly forbid the practice of magic and has harsh consequences for it.", "Jezebel in Hebrew scripture was a worshiper of pagan Baal and was thrown from a window for her beliefs.", "Women accused of being witches were burned alive by Christians during the Salem Witch Trials." ], "decomposition": [ "What are the characters in Harry Potter and the Philosopher's Stone known to perform?", "What would performers of #1 be considered by Christians?", "What have Christians done to #2 in the past?", "Are #2 pagans and #3 a form of persecution?" ], "evidence": [ [ [ [ "Harry Potter and the Philosopher's Stone-1" ] ], [ [ "Witchcraft-1" ] ], [ [ "Witchcraft-5" ] ], [ [ "Persecution-1" ] ] ], [ [ [ "Harry Potter and the Philosopher's Stone-1" ] ], [ [ "Magic (supernatural)-2", "Paganism-24" ] ], [ [ "Witch trials in the early modern period-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Harry Potter-1" ] ], [ [ "Witchcraft-2" ] ], [ [ "Death by burning-18" ] ], [ "operation" ] ] ] }, { "qid": "dfaf734cc6f1016cbb0c", "term": "Aretha Franklin", "description": "American singer, songwriter, and pianist", "question": "Has Aretha Franklin ever collaborated with a suicidal person?", "answer": true, "facts": [ "Donny Hathaway was a singer and session musician that worked with Staple Singers, Jerry Butler, Aretha Franklin, the Impressions and Curtis Mayfield.", "Donny Hathaway jumped from his 15th floor room and his death was ruled a suicide." ], "decomposition": [ "What music artists has Aretha Franklin done collaborations with?", "Did any of the artists listed in #1 commit suicide?" ], "evidence": [ [ [ [ "Donny Hathaway-4" ] ], [ [ "Donny Hathaway-17" ] ] ], [ [ [ "Donny Hathaway-1" ], "no_evidence" ], [ [ "Donny Hathaway-16" ], "operation" ] ], [ [ [ "Donny Hathaway-4" ], "no_evidence" ], [ [ "Donny Hathaway-17" ] ] ] ] }, { "qid": "fe5e84a00c13770bf65a", "term": "Paramount leader", "description": "The highest leader of China, usually the General Secretary or Chairman of Chinese Communist Party.", "question": "Did the Paramount leader produce Titanic?", "answer": false, "facts": [ "The Paramount leader is the highest leader of China", "Titanic was produced by Paramount Pictures", "Paramount Pictures is an American film studio" ], "decomposition": [ "Who is the Paramount leader?", "Who produced Titanic?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Jim Gianopulos-1" ] ], [ [ "Titanic (1997 film)-1", "Titanic (1997 film)-12" ] ], [ "operation" ] ], [ [ [ "Paramount leader-1" ] ], [ [ "Titanic (1997 film)-1" ] ], [ "operation" ] ], [ [ [ "Paramount leader-1" ] ], [ [ "Titanic (1997 film)-43" ] ], [ "operation" ] ] ] }, { "qid": "06c7a2b41766c56b48f7", "term": "Nickel", "description": "Chemical element with atomic number 28", "question": "Would nickel boil in the outer core of the earth?", "answer": true, "facts": [ "The boiling point of nickel is 3003 Kelvin", "The temperature of earth's outer core is 3,000–4,500 Kelvin" ], "decomposition": [ "What is the boiling point of nickel?", "What the temperature range of the earth's outer core?", "Is #1 within #2?" ], "evidence": [ [ [ [ "Nickel-5" ], "no_evidence" ], [ [ "Nickel-4" ], "no_evidence" ], [ "no_evidence" ] ], [ [ "no_evidence" ], [ [ "Earth's outer core-3" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Earth's outer core-3" ] ], [ "operation" ] ] ] }, { "qid": "74e55542839294b511d0", "term": "Muslims", "description": "Adherents of Islam", "question": "Do Muslims have a different idea of Seraphim than Christians?", "answer": false, "facts": [ "Seraphim are celestial of heavenly beings.", "Christians refer to Seraphims as the highest choir of the angelic hierarchy and caretakers of God's throne.", "Muslims refer to Seraphim as beings created from celestial fire that are part of an exalted assembly." ], "decomposition": [ "How is the Seraphim regarded in Christianity?", "How is the Seraphim regarded in Islam?", "Does #1 clearly contrast with #2?" ], "evidence": [ [ [ [ "Seraph-12" ] ], [ [ "Seraph-20" ] ], [ "operation" ] ], [ [ [ "Seraph-2" ] ], [ [ "Seraph-20" ] ], [ "operation" ] ], [ [ [ "Christianity in Zambia-47" ], "no_evidence" ], [ [ "Antisemitism in Islam-33" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "9d445747557bf9d8786e", "term": "Julia Roberts", "description": "American actress and producer", "question": "Did Julia Roberts practice blast beats as a child?", "answer": false, "facts": [ "Julia Roberts played the clarinet in her school band.", "Blast beats are a drum beat that originated in hardcore punk and grindcore, and is often associated with certain styles of extreme metal, namely black metal and death metal." ], "decomposition": [ "What instrument did Julia Roberts play as a child?", "What instrument does Blast Beats simulate?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Julia Roberts-7" ] ], [ [ "Blast beat-1" ] ], [ "operation" ] ], [ [ [ "Julia Roberts-7" ] ], [ [ "Blast beat-1" ] ], [ "operation" ] ], [ [ [ "Julia Roberts-7" ] ], [ [ "Blast beat-6" ] ], [ "operation" ] ] ] }, { "qid": "9ad0135c76557f64f7e9", "term": "Muslims", "description": "Adherents of Islam", "question": "Can a Muslim eat a McRib sandwich?", "answer": false, "facts": [ "Pork products are haram, or forbidden in Islam.", "The McRib is a pork-based sandwich." ], "decomposition": [ "What foods are Muslims forbidden to eat?", "What is a McRib made of?", "Are #1 and #2 different from each other?" ], "evidence": [ [ [ [ "Islamic dietary laws-14" ] ], [ [ "McRib-3" ] ], [ "operation" ] ], [ [ [ "Islamic culture-45" ] ], [ [ "McRib-1" ] ], [ "operation" ] ], [ [ [ "Islamic dietary laws-2" ] ], [ [ "McRib-3" ] ], [ "operation" ] ] ] }, { "qid": "63830fb94ef200092420", "term": "Mental disorder", "description": "Distressing thought or behavior pattern", "question": "Are there mental disorders you can hide?", "answer": true, "facts": [ "Many people do not notice depression in their friends or loved ones. ", "\"Masking\" is a phrase used to describe concealing the effects of one's personality, including mental disorder." ], "decomposition": [ "Do any mental disorders have symptoms/effects that can be hidden?" ], "evidence": [ [ [ [ "Mental disorder-39", "Mental disorder-4" ], "no_evidence", "operation" ] ], [ [ [ "Major depressive disorder-2" ] ] ], [ [ [ "Mental disorder-6" ], "no_evidence" ] ] ] }, { "qid": "018f6ea10facddae3b2e", "term": "Porsche", "description": "automotive brand manufacturing subsidiary of Volkswagen", "question": "Was Dorothea Wendling from same place Porsche originated?", "answer": true, "facts": [ "Dorothea Wendling was a singer born in Stuttgart, Germany.", "Porsche was founded in 1931 in Stuttgart, Germany." ], "decomposition": [ "Where was Dorothea Wendling born?", "Where was Posche founded?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Dorothea Wendling-1" ] ], [ [ "Porsche-2" ] ], [ "operation" ] ], [ [ [ "Dorothea Wendling-1" ] ], [ [ "Porsche-1" ] ], [ "operation" ] ], [ [ [ "Dorothea Wendling-1" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "d98cd1c63297e18c77d9", "term": "Underworld", "description": "The mythic Relm of the Dead, located far underground (aka, Hades; Underworld)", "question": "Does Hades have a loose grip on the Underworld?", "answer": false, "facts": [ "Hades alone can allow passage out of the Underworld.", "Hades created a terribly difficult task for Orpheus to complete to bring Eurydice from the Underworld. ", "The subjects of Hades in the Underworld are under his complete control." ], "decomposition": [ "In whose power is it solely within to allow passage out of the underworld?", "Whose rule are the subjects in the Underworld under?", "Is #1 or #2 not Hades?" ], "evidence": [ [ [ [ "Hades-11" ] ], [ [ "Hades-1" ] ], [ "operation" ] ], [ [ [ "Hades-13" ] ], [ [ "Hades-11" ] ], [ "operation" ] ], [ [ [ "Hades-11" ] ], [ [ "Hades-11" ] ], [ "operation" ] ], [ [ [ "Hades-1" ] ], [ [ "Hades-11" ] ], [ "operation" ] ] ] }, { "qid": "f794258d1e33a50d1348", "term": "Surfing", "description": "sport that consists of riding a wave", "question": "Was Surfing popular when pogs came out?", "answer": true, "facts": [ "Pogs came out in the 1990's.", "The 90's saw a rise in 'Big Wave Culture', a practice involving finding the largest possible waves to surf on." ], "decomposition": [ "When were Pogs released?", "Did surfing experience a growth in popularity in #1?" ], "evidence": [ [ [ [ "Milk caps (game)-8" ] ], [ [ "Surf culture-68" ] ] ], [ [ [ "Milk caps (game)-3" ] ], [ [ "Surf culture-68" ], "operation" ] ], [ [ [ "Milk caps (game)-1" ] ], [ [ "Surf culture-28" ] ] ] ] }, { "qid": "12ba3528a86c6ece9166", "term": "Upload", "description": "sending of data from a local system to a remote system", "question": "Can Centurylink max internet plan upload 1000GB in a fortnight?", "answer": true, "facts": [ "A fortnight refers to a period of two weeks.", "Centurylink's max internet plan speed is 1,000MB per second.", "1000GB takes 2.5 hours to upload for every 1000MB speed." ], "decomposition": [ "How long is a fortnight?", "What is Centurylink's max internet plan speed?", "With #2, how long would it take to upload 1000GB?", "Is #3 less than #1?" ], "evidence": [ [ [ [ "Counting-9" ] ], [ [ "CenturyLink-12" ] ], [ [ "CenturyLink-12" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "CenturyLink-12" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Fortnight (disambiguation)-1" ] ], [ [ "CenturyLink-12" ] ], [ [ "Bit-15", "Megabyte-5" ], "operation" ], [ "operation" ] ] ] }, { "qid": "bfb0f6c573076a1a0634", "term": "Solomon", "description": "king of Israel and the son of David", "question": "Did Solomon make up bigger percentage of Islamic prophets than Kings of Judah?", "answer": false, "facts": [ "According to The Quran, Solomon was one of 25 prophets.", "According to some Islamic hadiths, there have been as many as 124,000 prophets.", "Solomon was one of 20 Kings of Judah." ], "decomposition": [ "According to the Quran, how many prophets were there?", "How many Kings of Judah were there?", "What is 1 divided by #1?", "What is 1 divided by #2?", "Is #3 greater than #4?" ], "evidence": [ [ [ [ "Prophet-23" ] ], [ [ "Kings of Judah-14" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Prophet-23" ] ], [ [ "David-1", "Zedekiah-1" ], "no_evidence" ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Quran-47" ], "no_evidence" ], [ [ "Kingdom of Judah-1" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3a0a8c5869df571f0b93", "term": "Billy Joel", "description": "American singer-songwriter and pianist", "question": "Has Billy Joel sold out Astana Arena?", "answer": false, "facts": [ "Astana Arena is a 30,000 seat football stadium in Kazakhstan.", "Billy Joel, who has performed concerts all over the world, has never been to Kazakhstan." ], "decomposition": [ "Which country is the Astana Arena located in?", "Has Billy Joel ever been to #1?" ], "evidence": [ [ [ [ "Astana Arena-1" ] ], [ "no_evidence" ] ], [ [ [ "Astana Arena-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Astana Arena-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "6de9966e60e743c03611", "term": "Good", "description": "Term in religion, ethics, and philosophy", "question": "In star rating systems, is 5 stars considered good?", "answer": true, "facts": [ "Most star rating systems are composed of 5 stars.", "In star rating, most people want to avoid a 1 star review." ], "decomposition": [ "What is the highest rating possible in most star rating systems?", "Is #1 equal to five stars?" ], "evidence": [ [ [ [ "Star (classification)-1" ] ], [ "operation" ] ], [ [ [ "Nutritional rating systems-4" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Star (classification)-1" ] ], [ "operation" ] ] ] }, { "qid": "0da3b98c8e6af0dd89d3", "term": "Geometry", "description": "Branch of mathematics that studies the shape, size and position of objects", "question": "Does Siri know geometry?", "answer": true, "facts": [ "Geometry is the study of size, shape and distance of objects.", "Determining location requires geometry.", "Siri can determine your location." ], "decomposition": [ "What are some basic user information that Siri can determine?", "What mathematical concepts must be known in order to determine the location of a point?", "Is location included in #1 and geometry in #2?" ], "evidence": [ [ [ [ "Siri-3" ] ], [ [ "Location-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Siri-3" ] ], [ [ "Geometry-15" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Global Positioning System-1", "Siri-1", "Siri-11" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "5ce7ac7909c8331097b5", "term": "Coca", "description": "group of plant varieties cultivated for coca production", "question": "Is a Coca plant farm likely to be found in Yakutsk?", "answer": false, "facts": [ "Coca is a plant originating in South America and used as a cash crop.", "Yakutsk is a city in east Siberia.", "The Coca plant grows in a humid tropical climate.", "Siberia has an extremely cold subarctic climate." ], "decomposition": [ "What kind of climate does the Coca plant grow in?", "What country is Yakutsk located on?", "What is the climate of #2?", "Is #1 the same as #3?" ], "evidence": [ [ [ [ "Coca-16" ], "no_evidence" ], [ [ "Yakutsk-1" ] ], [ [ "Yakutsk-2" ] ], [ "operation" ] ], [ [ [ "Coca-16", "Yungas-1" ] ], [ [ "Yakutsk-1" ] ], [ [ "Climate of Russia-1", "Russia-84", "Russia-85" ] ], [ "operation" ] ], [ [ [ "Coca-25" ] ], [ [ "Yakutsk-1" ] ], [ [ "Yakutsk-2" ] ], [ "operation" ] ] ] }, { "qid": "5f9ae2b7933da8c32505", "term": "Butter", "description": "dairy product", "question": "Does butter industry survive cow extinction?", "answer": true, "facts": [ "Butter is a dairy product made from milk.", "Cows are the predominant source of milk in the US.", "Goats, sheep, buffalo, and other mammals produce milk.", "Goat butter, made from 100% Goat's Milk, is an excellent source of Vitamin A." ], "decomposition": [ "What animal product is butter made from?", "What common livestock animals produce #1?", "Are animals other than cows included in #2?" ], "evidence": [ [ [ [ "Butter-2" ] ], [ [ "Butter-2" ] ], [ [ "Butter-2" ] ] ], [ [ [ "Butter-10" ], "no_evidence" ], [ [ "Butter-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Butter-1" ] ], [ [ "Milk-26" ] ], [ "operation" ] ] ] }, { "qid": "3655a4efabd3358429d0", "term": "Chrome OS", "description": "Linux-based operating system developed by Google", "question": "Is an internet connection essential for someone using Chrome OS?", "answer": true, "facts": [ "Most Chromebook apps require internet access to function properly.", "There are apps for the Chromebook that can function properly without internet access.", "To download any apps onto the Chromebook, including offline apps, one must connect to the internet." ], "decomposition": [ "What are the applications needed for essential functions on devices running Chrome OS?", "Is an internet connection needed for any of #1 to be downloaded or to work properly?" ], "evidence": [ [ [ [ "Chrome OS-1" ] ], [ [ "Chrome OS-5" ], "operation" ] ], [ [ [ "Chrome OS-2" ] ], [ "operation" ] ], [ [ [ "Chrome OS-2", "Chrome OS-3" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "11f3a3f27f9029362aaf", "term": "Surgery", "description": "Medical specialty", "question": "Can surgery prevent an existential crisis?", "answer": false, "facts": [ "Surgery is used to correct medical problems or make physical alterations to the body", "An existential crisis is a metaphysical affliction" ], "decomposition": [ "What is an existential crisis?", "What kinds of ailments can be treated with surgery?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Existential crisis-1" ] ], [ [ "Surgery-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Existential crisis-1" ] ], [ [ "Surgery-1" ] ], [ "operation" ] ], [ [ [ "Existential crisis-1" ] ], [ [ "Surgery-1" ] ], [ "operation" ] ] ] }, { "qid": "16c08e5f354b035c41a5", "term": "The Godfather", "description": "1972 film directed by Francis Ford Coppola", "question": "Was a USB flash drive used in The Godfather?", "answer": false, "facts": [ "USB flash drives first appeared on the market in late 2000.", "The Godfather was released in 1972." ], "decomposition": [ "When did USB flash drives become available?", "When was The Godfather released?", "What time-span was setting of The Godfather based on?", "Is #1 before or within #2 or #3?" ], "evidence": [ [ [ [ "USB flash drive-6" ] ], [ [ "The Godfather-3" ] ], [ [ "The Godfather-12" ] ], [ [ "USB flash drive-7" ], "operation" ] ], [ [ [ "USB flash drive-1" ] ], [ [ "The Godfather-1" ] ], [ [ "The Godfather (novel)-1" ] ], [ "operation" ] ], [ [ [ "USB flash drive-1" ] ], [ [ "The Godfather-1" ] ], [ [ "The Godfather-5" ] ], [ "operation" ] ] ] }, { "qid": "b39a840f37ef3d97d498", "term": "Goofy", "description": "Disney cartoon character", "question": "Would Goofy hypothetically enjoy Nylabone?", "answer": true, "facts": [ "Goofy is a popular Disney cartoon character that is a dog.", "Nylabone is a popular dog bone brand.", "Dogs chew bones for the taste, and to exercise the muscles of the jaw." ], "decomposition": [ "What kind of animal does Goofy portray?", "What kind of animal are Nylabones made for?", "Are #1 and #2 both dog?" ], "evidence": [ [ [ [ "Goofy-1" ] ], [ [ "Dog toy-7" ] ], [ "operation" ] ], [ [ [ "Goofy-1" ] ], [ [ "Dog toy-7" ] ], [ "operation" ] ], [ [ [ "Goofy-1" ] ], [ [ "Dog toy-7" ] ], [ "operation" ] ] ] }, { "qid": "52a5095ce9ac6b3c7ae0", "term": "Software engineer", "description": "Practitioner of software engineering", "question": "Can a software engineer work during a power outage?", "answer": false, "facts": [ "Software engineers require computers to do their work.", "Computers do not work without electricity.", "A power outage is the temporary lack of electrical power." ], "decomposition": [ "What is the main equipment software engineers need to do their job?", "What is a power outage?", "Can #1 work without #2?" ], "evidence": [ [ [ [ "Software engineering-2" ] ], [ [ "Power outage-1" ] ], [ [ "Computer-46" ], "no_evidence" ] ], [ [ [ "Software engineer-1" ] ], [ [ "Power outage-1" ] ], [ "operation" ] ], [ [ [ "Software engineer-6" ] ], [ [ "Power outage-5" ] ], [ [ "Power outage-5" ] ] ] ] }, { "qid": "bb3737083669e10c889f", "term": "Gallic Wars", "description": "Wars in which the Roman Republic conquered Gaul", "question": "Would Roman Gallic Wars army struggle to build the pyramids faster?", "answer": false, "facts": [ "The pyramids were built by an estimated 30,000 workers.", "The Roman Gallic war army had around 75,000 soldiers." ], "decomposition": [ "How many people worked on the pyramids?", "How many soldiers were in the Roman Gallic war army?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Giza pyramid complex-19" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Egyptian pyramid construction techniques-2", "Egyptian pyramid construction techniques-28" ], "no_evidence" ], [ [ "Roman legion-24", "Size of the Roman army-7" ] ], [ "operation" ] ], [ [ [ "Giza pyramid complex-31" ] ], [ [ "Gallic Wars-4", "Size of the Roman army-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f353cbd3286ea1e452aa", "term": "Telescope", "description": "Optical instrument that makes distant objects appear magnified", "question": "Can telescopes hear noise?", "answer": false, "facts": [ "Telescopes are used to view things far away.", "Telescopes are an optical instrument. " ], "decomposition": [ "What are the uses of a telescope?", "Does #1 include detecting noise?" ], "evidence": [ [ [ [ "Telescope-1" ] ], [ "operation" ] ], [ [ [ "Telescope-5" ] ], [ [ "Telescope-5" ] ] ], [ [ [ "Telescope-1" ] ], [ [ "Telescope-1" ], "operation" ] ] ] }, { "qid": "8036e79e6e6d26a45b28", "term": "Al-Farabi", "description": "Philosopher in 10th century Central Asia", "question": "Was Al-Farabi a student of the Great Sheikh?", "answer": false, "facts": [ "The Great Sheikh was the name for Avicenna", "Avicenna was born in 980", "Al Farabi died around 950 " ], "decomposition": [ "What other name was the Great Sheikh known by?", "When was #1 born?", "When did Al-Farabi die?", "Is #3 more recent than #2?" ], "evidence": [ [ [ [ "Zayed bin Sultan Al Nahyan-2" ] ], [ [ "Zayed bin Sultan Al Nahyan-1" ] ], [ [ "Al-Farabi-10" ] ], [ "operation" ] ], [ [ [ "Zayed bin Khalifa Al Nahyan-1" ], "no_evidence" ], [ [ "Zayed bin Khalifa Al Nahyan-1" ], "no_evidence" ], [ [ "Al-Farabi-1" ] ], [ "operation" ] ], [ [ [ "Zayed bin Khalifa Al Nahyan-1" ] ], [ [ "Zayed bin Khalifa Al Nahyan-1" ] ], [ [ "Al-Farabi-1" ] ], [ "operation" ] ] ] }, { "qid": "b9e623a3ea2aa739facf", "term": "Chlorine", "description": "Chemical element with atomic number 17", "question": "Is it dangerous to consume chlorine when mixed with sodium?", "answer": false, "facts": [ "Chlorine mixed with sodium is sodium chloride, also known as table salt.", "Table salt is one of the most commonly consumed seasonings among all cultures." ], "decomposition": [ "What do you make when you mix Chlorine with sodium?", "What is another name for #1?", "Is #2 dangerous to consume?" ], "evidence": [ [ [ [ "Sodium chloride-13" ] ], [ [ "Sodium chloride-12" ] ], [ [ "Salt-5" ] ] ], [ [ [ "Sodium chloride-22" ] ], [ [ "Sodium chloride-22" ] ], [ "operation" ] ], [ [ [ "Sodium chloride-1" ] ], [ [ "Salt-1" ] ], [ [ "Salt-2", "Salt-5" ] ] ] ] }, { "qid": "d158406a91f5dddbb6a5", "term": "Palace of Westminster", "description": "Meeting place of the Parliament of the United Kingdom,", "question": "Are Big Ben's bells currently rung on their normal schedule at the Palace of Westminster?", "answer": false, "facts": [ "Big Ben is currently under a four year renovation project.", "The bells have been silenced for the duration of the work due to safety.", "They are only rung on certain holidays, until the construction finishes in 2021." ], "decomposition": [ "What is the status of Big Ben right now?", "When will #1 be completed?", "Until #2, is it safe for the bells to ring at Big Ben?" ], "evidence": [ [ [ [ "Big Ben-61" ] ], [ [ "Big Ben-61" ] ], [ [ "Big Ben-61" ] ] ], [ [ [ "Big Ben-61" ] ], [ [ "Big Ben-61" ] ], [ [ "Big Ben-61" ], "no_evidence" ] ], [ [ [ "Big Ben-5" ] ], [ [ "Big Ben-5" ] ], [ [ "Big Ben-46" ] ] ] ] }, { "qid": "0f3ab70fd0a59071bb06", "term": "The Hobbit", "description": "Fantasy novel by J. R. R. Tolkien", "question": "Would a Drow tower over The Hobbit's hero?", "answer": true, "facts": [ "The hero of the Hobbit is Bilbo Baggins.", "Bilbo Baggins is a hobbit, which is a race resembling very short humans with furry feet.", "Halfling is another term for hobbits, and halflings are described as being half the size of a human.", "The Drow are a race of dark elves described as being around five feet in height." ], "decomposition": [ "Who is the hero of The Hobbit", "What is a Drow?", "Is #2 taller than #1?" ], "evidence": [ [ [ [ "Hobbit-12", "Hobbit-2" ] ], [ [ "Drow-28" ] ], [ "operation" ] ], [ [ [ "The Hobbit-7" ] ], [ [ "Drow-28" ] ], [ "no_evidence", "operation" ] ], [ [ [ "The Hobbit-2" ] ], [ [ "Drow-1" ] ], [ [ "Drow-28", "Hobbit-1" ], "operation" ] ] ] }, { "qid": "1f9b1e6e299a9da962fe", "term": "March", "description": "third month in the Julian and Gregorian calendars", "question": "Does March begin on the same day of the week as February during leap years?", "answer": false, "facts": [ "During normal years, February has exactly 28 days, so March begins on the same day of the week as February.", "However, on leap years, February has an extra day, so March begins the next day of the week from whichever day started February." ], "decomposition": [ "How many days are in February in a non-leap year?", "How many days are in February in a leap year?", "Does #1 mean that March will begin on the same day as February?", "Given that #3 is positive, will #2 make no difference to this outcome?" ], "evidence": [ [ [ [ "Leap year-2" ] ], [ [ "Leap year-3" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "February-10" ] ], [ [ "Leap year-2" ] ], [ [ "Determination of the day of the week-11" ], "operation" ], [ [ "Determination of the day of the week-11" ], "operation" ] ], [ [ [ "February-1" ] ], [ [ "February-1" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "807244d53c3924958261", "term": "QWERTY", "description": "keyboard layout where the first line is \"QWERTYUIOP\"", "question": "Can first letter row of QWERTY keyboard spell a palindrome?", "answer": true, "facts": [ "The first letter row of the QWERTY keyboard contains: QWERTYUIOP", "A palindrome is a word that can be spelled the same backwords and forwards such as racecar and rotor." ], "decomposition": [ "What letters are on the first line of a QWERTY keyboard?", "What vowels are listed in #!?", "What consonants are listed in #1?", "Could a word be formed by repeating the following process: Pick a consonant from #3 then select a vowel from #2. Construct a string of letters from those 2 elements such that the first and third letters are the selected consonant and the second is the vowel.", "If #4 is \"no\" then using the process in Step 4 insert a second occurrence of the same vowel adjacent to the first then choose a consonant from #3 and insert it between the repeated vowel. Is a word formed?" ], "evidence": [ [ [ [ "QWERTY-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "QWERTY-1" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "QWERTY-1" ], "no_evidence" ], [ "operation" ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "f92ab04a1f144b593809", "term": "British royal family", "description": "Family consisting of close relatives of the monarch of the United Kingdom", "question": "Have any members of the 2020 British royal family allegedly committed a felony?", "answer": true, "facts": [ "The 2020 British royal family includes Queen Elizabeth II and her children.", "Prince Andrew is the son of Queen Elizabeth II.", "Prince Andrew was accused of sexual abuse in 2019.", "Sexual assault is classified as a felony." ], "decomposition": [ "Which royal family does Prince Andrew belong to?", "What is Prince Andrew accused of?", "What type of crime is #2?", "Does #1 have a member accused of #3?" ], "evidence": [ [ [ [ "Prince Andrew, Duke of York-1" ] ], [ [ "Prince Andrew, Duke of York-22" ] ], [ [ "Adolescent sexuality in the United States-53" ] ], [ [ "Prince Andrew, Duke of York-25" ] ] ], [ [ [ "Prince Andrew, Duke of York-6" ] ], [ [ "Prince Andrew, Duke of York-22", "Prince Andrew, Duke of York-25" ] ], [ [ "Prince Andrew, Duke of York-22" ] ], [ [ "Prince Andrew, Duke of York-25" ], "operation" ] ], [ [ [ "Prince Andrew, Duke of York-1" ] ], [ [ "Prince Andrew, Duke of York-3" ] ], [ [ "Felony-6" ] ], [ "operation" ] ] ] }, { "qid": "8d3ddaee20ad48edc066", "term": "Brazilian jiu-jitsu", "description": "martial art focusing on grappling and ground fighting, originally based on Kodokan judo newaza taught by Japanese judoka, that developed independently in Brazil from experimentation and adaptation by Carlos and Hélio Gracie, Luiz França, et al.", "question": "Did Brazilian jiu-jitsu Gracie founders have at least a baker's dozen of kids between them?", "answer": true, "facts": [ "A baker's dozen refers to 13 of anything.", "Brazilian jiu-jitsu was founded by Carlos and Helio Gracie. ", "Helio Gracie had 9 children.", "Carlos Gracie had 11 children." ], "decomposition": [ "Who were the founders of Brazilian jiu-jitsu?", "How many children do #1 have altogether", "What is the number represented by the baker's dozen?", "Is #2 greater than or equal to #3?" ], "evidence": [ [ [ [ "Brazilian jiu-jitsu-2" ] ], [ [ "Carlos Gracie-12", "Hélio Gracie-25" ], "operation" ], [ [ "Dozen-7" ] ], [ "operation" ] ], [ [ [ "Carlos Gracie-1", "Hélio Gracie-1" ] ], [ [ "Carlos Gracie-12", "Hélio Gracie-25" ] ], [ [ "Dozen-7" ] ], [ "operation" ] ], [ [ [ "Gracie family-1" ] ], [ [ "Hélio Gracie-25" ], "no_evidence" ], [ [ "Dozen-7" ] ], [ "operation" ] ] ] }, { "qid": "ec81e2442ee7421b632b", "term": "John Kerry", "description": "68th United States Secretary of State", "question": "Did any Golden Globe winners attend John Kerry's alma mater?", "answer": true, "facts": [ "John Kerry graduated from Yale University.", "Jennifer Connelly attended Yale University in the late 1980s.", "Jennifer Connelly won a Golden Globe award for the film A Beautiful Mind," ], "decomposition": [ "What is John Kerry's alma mater?", "Who has won a Golden Globe?", "Have any of #2 attended #1?" ], "evidence": [ [ [ [ "John Kerry-2" ] ], [ [ "Meryl Streep-19" ], "no_evidence" ], [ [ "Meryl Streep-9" ], "no_evidence", "operation" ] ], [ [ [ "John Kerry-2" ] ], [ [ "Claire Danes-2" ] ], [ [ "Claire Danes-3" ] ] ], [ [ [ "John Kerry-9" ] ], [ [ "Meryl Streep-1" ] ], [ [ "Meryl Streep-9" ], "operation" ] ] ] }, { "qid": "cb798531b692ec645086", "term": "Dolce & Gabbana", "description": "Italian fashion house", "question": "Did Mozart ever buy anything from Dolce & Gabbana?", "answer": false, "facts": [ "Dolce & Gabbana was founded in 1985.", "Wolfgang Amadeus Mozart died in 1791." ], "decomposition": [ "When was Dolce & Gabbana established?", "Was Mozart still alive as at #1?" ], "evidence": [ [ [ [ "Dolce & Gabbana-1" ] ], [ [ "Wolfgang Amadeus Mozart-1" ], "operation" ] ], [ [ [ "Dolce & Gabbana-1" ] ], [ [ "Biographies of Mozart-1" ] ] ], [ [ [ "Dolce & Gabbana-1" ] ], [ [ "Wolfgang Amadeus Mozart-1" ], "operation" ] ] ] }, { "qid": "288a22ad708fe41454cb", "term": "Breathing", "description": "Process of moving air into and out of the lungs", "question": "Is snoring a sign of good breathing while sleeping?", "answer": false, "facts": [ "Snoring can be a sign of sleep apnea.", "Snoring can cause a variety of symptoms ranging from low energy to high blood pressure." ], "decomposition": [ "What medical condition can snoring be a sign of?", "Is #1 considered good?" ], "evidence": [ [ [ [ "Snoring-1" ] ], [ "operation" ] ], [ [ [ "Snoring-1" ] ], [ [ "Sleep apnea-1" ], "operation" ] ], [ [ [ "Snoring-1" ] ], [ [ "Obstructive sleep apnea-1", "Obstructive sleep apnea-2" ] ] ] ] }, { "qid": "ce01c88f5d2c29a80cb3", "term": "Clown", "description": "A comic performer often for children's entertainment", "question": "Would Stephen King fans be likely to own an image of a clown?", "answer": true, "facts": [ "Stephen King wrote a popular book called \"It\" about an evil clown.", "\"It\" has been made into two major films and has been merchandised. " ], "decomposition": [ "Who is the antagonist of popular Stephen King's book 'It'?", "Is #1 a clown?" ], "evidence": [ [ [ [ "It (novel)-1" ] ], [ "operation" ] ], [ [ [ "It (character)-1" ] ], [ "operation" ] ], [ [ [ "It (character)-1" ] ], [ [ "It (character)-4" ] ] ] ] }, { "qid": "e7ba17761073c7a1ec24", "term": "Secretary", "description": "occupation", "question": "Is the US Secretary of State similar to an administrative secretary of an office?", "answer": false, "facts": [ "An administrative secretary of an office is hired to handle routine and calendar scheduling for a superior.", "The US Secretary of State is the head of the Department of State.", "The US Secretary of State is analogous to a foreign minister of other countries. ", "The US secretary of state can have administrative assistants. ", "Another name for administrative secretary is administrative assistant. " ], "decomposition": [ "What kind of duties are assigned to an administrative secretary?", "What are the duties and nature of the position of the US Secretary of State?", "Is #2 in accordance with #1?" ], "evidence": [ [ [ [ "Secretary-3" ] ], [ [ "United States Secretary of State-4" ] ], [ "operation" ] ], [ [ [ "Secretary-1", "Secretary-3" ] ], [ [ "Secretary of state-14" ] ], [ "operation" ] ], [ [ [ "Secretary-1" ] ], [ [ "Secretary of state-14" ] ], [ "operation" ] ] ] }, { "qid": "aa5d9115cc83aa120b7a", "term": "Burning Man", "description": "annual experimental festival based in Nevada, United States", "question": "Would it be impossible to get to Burning Man on the Mayflower?", "answer": true, "facts": [ "The Mayflower was a seafaring vessel", "Burning Man is held in Black Rock Desert", "There are no bodies of water flowing into the Black Rock Desert" ], "decomposition": [ "What was the Mayflower?", "What environment does #1 work in?", "What kind of environment is The Burning Man?", "Is #2 different from #3?" ], "evidence": [ [ [ [ "Mayflower-1" ] ], [ [ "Ship-1" ] ], [ [ "Burning Man-1" ] ], [ "operation" ] ], [ [ [ "Mayflower-1" ] ], [ [ "Fluyt-1" ] ], [ [ "Burning Man-1" ] ], [ "operation" ] ], [ [ [ "Mayflower-1" ] ], [ [ "Ship-1" ] ], [ [ "Burning Man-1" ] ], [ "operation" ] ] ] }, { "qid": "b93a4174d1a7dca2ee0f", "term": "Hippopotamus", "description": "A large, mostly herbivorous, semiaquatic mammal native to sub-Saharan Africa", "question": "Are hippos dangerous to humans?", "answer": true, "facts": [ "Hippos are large and have large teeth.", "If threatened, they aggressively defend themselves." ], "decomposition": [ "How do hippopotami respond to perceived threats?", "Considering #1 and their prominent physical features, could they hurt humans?" ], "evidence": [ [ [ [ "Hippopotamus-42" ] ], [ "operation" ] ], [ [ [ "Hippopotamus-35", "Hippopotamus-36" ] ], [ [ "Hippopotamus-42" ], "operation" ] ], [ [ [ "Hippopotamus-42" ] ], [ [ "Hippopotamus-42" ] ] ] ] }, { "qid": "d6f1675f42d55691e008", "term": "Sniper", "description": "Highly trained marksman", "question": "Can a sniper shoot a fish past Bathypelagic Zone in ocean?", "answer": false, "facts": [ "The Bathypelagic Zone extends 4000 meters down in the ocean.", "The longest recorded sniper kill is 3,540 meters." ], "decomposition": [ "How deep is the bathypelagic zone?", "How far can snipers shoot?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Bathyal zone-1" ] ], [ [ "Sniper rifle-9" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Bathyal zone-1" ] ], [ [ "Longest recorded sniper kills-1" ] ], [ "operation" ] ], [ [ [ "Bathyal zone-1" ] ], [ [ "Sniper-16" ] ], [ "operation" ] ] ] }, { "qid": "b6c4ef03511c60d02183", "term": "Holy Grail", "description": "Cup, dish or stone with miraculous powers, important motif in Arthurian literature", "question": "Has the Holy Grail been featured in at least five films?", "answer": true, "facts": [ "1981's Excalibur film features King Arthur and his knights looking for the Holy Grail.", "Monty Python and the Holy Grail spoofs Arthurian legend.", "Indiana Jones and the Last Crusade features a search for the Holy Grail.", "Prince Killian and the Holy Grail focuses on retrieval of the grail.", "The Silver Chalice focuses on a man that has to sculpt the Holy Grail." ], "decomposition": [ "What movies have featured the Holy Grail?", "Are at least 5 movies listed in #1?" ], "evidence": [ [ [ [ "Indiana Jones and the Last Crusade-1", "Lancelot du Lac (film)-3", "Monty Python and the Holy Grail-2", "The Fisher King-4", "The Light in the Dark-4" ] ], [ "operation" ] ], [ [ [ "Holy Grail-32" ] ], [ [ "Holy Grail-32" ], "operation" ] ], [ [ [ "Holy Grail-32" ] ], [ "operation" ] ] ] }, { "qid": "230f706be3c42e680d12", "term": "Autumn", "description": "one of the Earth's four temperate seasons, occurring between summer and winter", "question": "Does American Independence Day occur during autumn?", "answer": false, "facts": [ "Autumn runs from about September 20 to about December 20.", "American Independence Day is July 4, over two months before autumn begins." ], "decomposition": [ "When does autumn occur in North America?", "When is American Independence Day celebrated?", "Is #2 within the range of #1?" ], "evidence": [ [ [ [ "Autumn-1" ], "no_evidence" ], [ [ "Independence Day (United States)-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Autumn-3" ] ], [ [ "Independence Day (United States)-1" ] ], [ "operation" ] ], [ [ [ "Autumn-1" ] ], [ [ "Independence Day (United States)-1" ] ], [ "operation" ] ] ] }, { "qid": "29c9d972bd56437185ee", "term": "Communist Party of China", "description": "Political party of the People's Republic of China", "question": "Did Karl Marx influence the communist party of China?", "answer": true, "facts": [ "Communist ideology is the foundation of communist party of China.", "Marx produced a political pamphlet that has since come to be commonly known as the communist manifesto. " ], "decomposition": [ "What does the communist party of China stand to represent?", "What were the political activities of Karl Max?", "Does any of #1 have a source/origin in #2?" ], "evidence": [ [ [ [ "Communist Party of China-37" ] ], [ [ "Das Kapital-3" ] ], [ [ "Communist Party of China-37", "Das Kapital-3" ] ] ], [ [ [ "Communist Party of China-2" ] ], [ [ "Marxism-1" ] ], [ "operation" ] ], [ [ [ "Communist Party of China-2" ] ], [ [ "Karl Marx-3" ] ], [ "operation" ] ] ] }, { "qid": "fa160aa61805e9d27398", "term": "Great Lakes", "description": "System of interconnected, large lakes in North America", "question": "Are the Great Lakes part of an international border?", "answer": true, "facts": [ "The lakes are bordered on the north by Canada.", "The lakes are bordered on the south by United States.", "Canada and United States are two different countries." ], "decomposition": [ "What borders the great lakes to the north?", "What borders the great lakes to the south?", "Are #1 and #2 different countries? " ], "evidence": [ [ [ [ "Great Lakes-1", "Great Lakes-19" ] ], [ [ "Great Lakes-5" ] ], [ "operation" ] ], [ [ [ "Great Lakes-5" ] ], [ [ "Great Lakes-5" ] ], [ "no_evidence" ] ], [ [ [ "Great Lakes-1" ] ], [ [ "Great Lakes-1" ] ], [ "operation" ] ] ] }, { "qid": "98f8a80a3a83d0951176", "term": "Curiosity (rover)", "description": "American robotic rover exploring the crater Gale on Mars", "question": "Can Curiosity (rover) kill a cat?", "answer": true, "facts": [ "Cats weigh on average between 7 to 10 pounds.", "Curiosity (rover), a space vehicle that explores Mars, weighs 1,982 pounds.", "As mass falls, it picks up acceleration and adds to the force of impact." ], "decomposition": [ "How much does a cat weigh?", "How much does Curiosity (rover) weigh?", "Is #2 more than #1?" ], "evidence": [ [ [ [ "Cat-28" ], "no_evidence" ], [ [ "Curiosity (rover)-33" ] ], [ "operation" ] ], [ [ [ "Cat-29" ] ], [ [ "Curiosity (rover)-33" ] ], [ "operation" ] ], [ [ [ "Meow (cat)-2" ], "no_evidence" ], [ [ "Curiosity (rover)-6" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "62ac009461272e979537", "term": "Albany, Georgia", "description": "City in Georgia, United States", "question": "Is Albany, Georgia the most populous US Albany?", "answer": false, "facts": [ "Albany, Georgia had a population of 75,249 in 2018.", "Albany, New York had a population of 97,279 in 2018." ], "decomposition": [ "Which places are known as Albany in the United States?", "What are the respective populations of #1?", "Is the population of Albany, Georgia the greatest of #2?" ], "evidence": [ [ [ [ "Albany, Georgia-1", "Albany, New York-2" ] ], [ [ "Albany, Georgia-1", "Albany, New York-2" ] ], [ "operation" ] ], [ [ [ "Albany, New York-1" ] ], [ [ "Albany, New York-2" ] ], [ [ "Albany, Georgia-1" ] ] ], [ [ [ "Albany, Georgia-1", "Albany, New York-2" ] ], [ [ "Albany, Georgia-1", "Albany, New York-2" ] ], [ "operation" ] ] ] }, { "qid": "997096ba85eb878a4fd3", "term": "Eiffel Tower", "description": "Tower located on the Champ de Mars in Paris, France", "question": "Was King Kong climbing at a higher altitude than Eiffel Tower visitors?", "answer": true, "facts": [ "The Eiffel Tower is 984 ft high, and the visitor platform is 906 ft high.", "King Kong climbed up to the top of the Empire State Building.", "The Empire State Building is 1230 ft high." ], "decomposition": [ "How high is the visitor platform at the Eiffel Tower?", "What is the height of the Empire State Building?", "Is #2 higher than #1?" ], "evidence": [ [ [ [ "Eiffel Tower-4" ] ], [ [ "Empire State Building-1" ] ], [ "operation" ] ], [ [ [ "Eiffel Tower-4" ] ], [ [ "Empire State Building-1", "Empire State Building-28" ] ], [ "operation" ] ], [ [ [ "Eiffel Tower-4" ] ], [ [ "Empire State Building-1" ] ], [ "operation" ] ] ] }, { "qid": "24d689b8a99b3a83b60c", "term": "Constitution of the United States", "description": "Supreme law of the United States of America", "question": "Would Constitution of the United States paper offend PETA?", "answer": true, "facts": [ "The Constitution of the United States is written on parchment.", "Parchment is writing material made from specially prepared untanned skins of animals.", "PETA is an organization that advocates for the ethical treatment of animals." ], "decomposition": [ "What is the US Constitution written on?", "What is #1 made of?", "What does PETA hate?", "How is #2 acquired?", "Is there an overlap between #3 and #4?" ], "evidence": [ [ [ [ "Vellum-1" ], "no_evidence" ], [ [ "Vellum-1" ] ], [ [ "People for the Ethical Treatment of Animals-1" ] ], [ [ "Vellum-7" ] ], [ "operation" ] ], [ [ [ "Constitution of the United States-2" ] ], [ [ "Parchment-1" ] ], [ [ "Killing of animals-28" ] ], [ [ "Parchment-1" ] ], [ "operation" ] ], [ [ [ "U.S. Constitution hemp paper hoax-1" ] ], [ "operation" ], [ [ "Lauren Anderson (model)-3" ], "no_evidence" ], [ [ "Hot dog-12" ] ], [ "operation" ] ] ] }, { "qid": "363cf9e4f1ab45e05a4b", "term": "Cream", "description": "Dairy product", "question": "If you bottle your own milk, would there be cream on top of it?", "answer": true, "facts": [ "Milk that has been bottled straight from a cow has not been homogenized. ", "Homogenization causes the fats in milk to become emulsified.", "Non-homogenized milk will feature fats that separate and float to the top.", "The fats in non-homogenized milk are cream." ], "decomposition": [ "When milk is taken directly from a cow, what appearance and position do the fats assume?", "Is #1 cream and at the top?" ], "evidence": [ [ [ [ "Cream-1" ] ], [ "operation" ] ], [ [ [ "Milk-59" ] ], [ "operation" ] ], [ [ [ "Milk-53", "Milk-59" ] ], [ [ "Milk-59" ] ] ] ] }, { "qid": "2b69557b4f4f87193f1c", "term": "Nine Inch Nails", "description": "American industrial rock band", "question": "Is Nine Inch Nails's lead singer associated with David Lynch?", "answer": true, "facts": [ "David Lynch is a director that created the television show Twin Peaks.", "Trent Reznor is the lead singer of Nine Inch Nails.", "Trent Reznor appeared on Twin Peaks: The Return in 2017.", "David Lynch directed the music video for Nine Inch Nail's Came Back Haunted." ], "decomposition": [ "Who is the lead singer of Nine Inch Nails?", "What works has #1 appeared in?", "What are the works of David Lynch?", "Is there overlap between #2 and #3?" ], "evidence": [ [ [ [ "Trent Reznor-1" ] ], [ [ "Trent Reznor-29" ] ], [ [ "Trent Reznor-29" ] ], [ "operation" ] ], [ [ [ "Trent Reznor-1" ] ], [ [ "Trent Reznor-29" ], "no_evidence" ], [ [ "David Lynch-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Nine Inch Nails-1" ] ], [ [ "Part 8 (Twin Peaks)-13", "Trent Reznor-3" ], "no_evidence" ], [ [ "David Lynch-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "37505be3ab3ef4b7f2ab", "term": "Pan (god)", "description": "Ancient Greek god of the wilds, shepherds, and flocks", "question": "Is Pan a symbol of virtue and virginity in women?", "answer": false, "facts": [ "Pan is famous for his sexual powers.", "Women who had had sexual relations with several men were referred to as \"Pan girls.\"" ], "decomposition": [ "What was the nature of Pan's relation with women?", "Is #1 not sexual?" ], "evidence": [ [ [ [ "Pan (god)-17" ] ], [ [ "Pan (god)-17", "Pan (god)-19" ] ] ], [ [ [ "Pan (god)-17", "Pan (god)-18" ] ], [ "operation" ] ], [ [ [ "Pan (god)-17", "Pan (god)-18" ], "no_evidence" ], [ [ "Virginity-1" ], "operation" ] ] ] }, { "qid": "c5d50d6e2d1747880aed", "term": "Black Sea", "description": "Marginal sea of the Atlantic Ocean between Europe and Asia", "question": "Do people put creatures from the Black Sea on their pizza?", "answer": true, "facts": [ "Pizza toppings include pepperoni, sausage, bacon, meatball, and anchovies.", "The Black Sea is home to many animals including dogfish, jellyfish, and anchovies." ], "decomposition": [ "What creatures are native to the Black Sea?", "What are common pizza toppings?", "Do any of #1 appear in #2?" ], "evidence": [ [ [ [ "Zebra mussel-14" ] ], [ [ "Pizza-1" ] ], [ "operation" ] ], [ [ [ "Black Sea-38", "Black Sea-41", "Black Sea-64" ] ], [ [ "Pizza-1" ] ], [ "operation" ] ], [ [ [ "Anchovy-2" ], "no_evidence" ], [ [ "Anchovies as food-3" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "97f35d567ddd31b7da6c", "term": "Wool", "description": "Textile fibre from the hair of sheep or other mammals", "question": "Should wool be hand washed only?", "answer": true, "facts": [ "Felting is a process through which wool is shaped and shrunken through agitation in soapy water. ", "Felting will often occur if you put a wool item in the washer." ], "decomposition": [ "What is felting?", "Will washing wool cause #1?" ], "evidence": [ [ [ [ "Felt-8" ] ], [ [ "Felt-9" ], "operation" ] ], [ [ [ "Felt-8", "Wool-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sense-45" ] ], [ [ "Wool-3" ] ] ] ] }, { "qid": "5a20b692ab755caf4ae9", "term": "Paratrooper", "description": "Military parachutists functioning as part of an airborne force", "question": "Are paratroopers good at mountain rescue?", "answer": true, "facts": [ "A paratrooper is a member of a military unit that deploys parachutes. ", "A PJ is the acronym name for a military parachute jumper.", "PJs are an elite mountain rescue unit. " ], "decomposition": [ "What military unit do paratroopers belong to?", "Do #1 use equipment that makes them suitable for mountain rescue?" ], "evidence": [ [ [ [ "Paratrooper-1" ] ], [ [ "Paratrooper-2" ], "no_evidence" ] ], [ [ [ "Paratrooper-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Paratrooper-74" ] ], [ [ "Paratrooper-1" ], "no_evidence" ] ] ] }, { "qid": "1517e4258ddaf94d85c7", "term": "Berlin University of the Arts", "description": "public art school in Berlin, Germany", "question": "Is the Berlin University of the Arts a Baroque period relic?", "answer": true, "facts": [ "The Berlin University of the Arts is a German school created in 1696.", "The Baroque period lasted in Europe from 1600 to 1750." ], "decomposition": [ "In which year was the Berlin University of the Arts established?", "When the Baroque period start and end?", "Does #1 fall within #2?" ], "evidence": [ [ [ [ "Berlin University of the Arts-5", "Prussian Academy of Arts-1" ] ], [ [ "17th century-1", "Baroque-1" ] ], [ "operation" ] ], [ [ [ "Berlin University of the Arts-5" ], "no_evidence" ], [ [ "Baroque music-1", "Baroque-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Berlin University of the Arts-5" ] ], [ [ "Baroque-1" ] ], [ "operation" ] ] ] }, { "qid": "7c5daad08c46d9410a81", "term": "French people", "description": "People from France", "question": "Can a student from Smithtown's Cleary School understand the speech of a French person?", "answer": false, "facts": [ "French is a romance language that originated in France.", "The Cleary School in Smithtown New York is a school for the deaf." ], "decomposition": [ "The Cleary School in Smithtown, New York is for students with which disability?", "Can a person with #1 perceive or understand speech?" ], "evidence": [ [ [ [ "Deaf education-25" ] ], [ "operation" ] ], [ [ [ "Deaf education-25", "Hearing loss-1" ] ], [ "operation" ] ], [ [ [ "Deaf education-25" ] ], [ [ "Physical disability-7" ], "operation" ] ] ] }, { "qid": "90fc9a677ae0df6f2974", "term": "Nostradamus", "description": "16th-century French apothecary and reputed seer", "question": "Would Dante have hypothetically placed Nostradamus in 3rd Circle of Hell?", "answer": false, "facts": [ "Nostradamus was a famous seer and court astrologer.", "Dante's 3rd Circle of Hell is reserved for gluttons.", "The 8th Circle of Hell is reserved for frauds.", "Astrology was seen as a valuable skill during Dante's lifetime.", "Dante places the mystic and prophet Joachim of Flora in the heaven of the sun." ], "decomposition": [ "What type of people did Dante put in the 3rd Circle of Hell?", "Is there evidence Nostradamus was #1?" ], "evidence": [ [ [ [ "Inferno (Dante)-33" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Inferno (Dante)-32" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Inferno (Dante)-32" ] ], [ [ "Nostradamus-13" ] ] ] ] }, { "qid": "2847095b289d1f4b48fd", "term": "Rabbi", "description": "teacher of Torah in Judaism", "question": "Can a rabbi save the soul of a Christian?", "answer": true, "facts": [ "Rabbis are spiritual leaders of the Jewish community.", "Baptism is a necessary Christian ritual for salvation.", "Any non-Christian can perform an emergency baptism if someone's life is in danger." ], "decomposition": [ "What are Rabbis's role in the Jewish community?", "What is a Christian ritual that is needed for salvation?", "In an emergency, can #1 perform #2?" ], "evidence": [ [ [ [ "Rabbi-1" ] ], [ [ "Salvation in Christianity-5" ] ], [ [ "Salvation in Christianity-2" ] ] ], [ [ [ "Rabbi-1" ] ], [ [ "Baptism-2", "Last rites-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Rabbi-1" ] ], [ [ "Baptism-1" ] ], [ [ "Baptism-59" ], "no_evidence" ] ] ] }, { "qid": "5f89881767cac98c56d6", "term": "Seven Years' War", "description": "Global conflict between 1756 and 1763", "question": "Could the Austrian casualties from Seven Years' War fit in Indianapolis Motor Speedway?", "answer": true, "facts": [ "There were 373,588 Austrian casualties during the Seven Years' War.", "The infield seating at the Indianapolis Motor Speedway raises capacity to an approximate 400,000 people." ], "decomposition": [ "How many casualties did the Austrian's have in the Seven Year War?", "What is the seating capacity for the Indianapolis Motor Speedway?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Battle of Prague (1757)-1" ], "no_evidence" ], [ [ "Indianapolis Motor Speedway-3" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Indianapolis Motor Speedway-3" ] ], [ "operation" ] ], [ [ [ "Seven Years' War-36" ], "no_evidence" ], [ [ "Indianapolis Motor Speedway-3" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3a810d24f086f07f71bf", "term": "Butler", "description": "male domestic worker in charge of all the male household staff", "question": "Did the butler Eugene Allen retire the same year a centuries-old war ended?", "answer": true, "facts": [ "Eugene Allen was a butler at the White House for 34 years until 1986", "The United Kingdom and the Kingdom of the Netherlands ended the Three Hundred and Thirty Five Years' Warnin 1986" ], "decomposition": [ "In what year did Eugene Allen retire?", "Which war ended in #1?", "How many years did #2 last?", "Is #3 greater than 100?" ], "evidence": [ [ [ [ "Eugene Allen-6" ] ], [ [ "Three Hundred and Thirty Five Years' War-1" ] ], [ [ "Three Hundred and Thirty Five Years' War-1" ] ], [ "operation" ] ], [ [ [ "Eugene Allen-1" ] ], [ [ "Three Hundred and Thirty Five Years' War-1" ] ], [ [ "Three Hundred and Thirty Five Years' War-1" ] ], [ "operation" ] ], [ [ [ "Eugene Allen-1" ] ], [ [ "Three Hundred and Thirty Five Years' War-1" ] ], [ [ "Three Hundred and Thirty Five Years' War-1" ] ], [ "operation" ] ] ] }, { "qid": "2ea15e27e5d94fd82471", "term": "Van Morrison", "description": "Northern Irish singer-songwriter and musician", "question": "Does title of Van Morrison's most played song apply to a minority of women worldwide?", "answer": false, "facts": [ "Van Morrison's most played song was the hit Brown Eyed Girl.", "Between 55 to 79 percent of people worldwide have brown eyes.", "Brown is the most common eye color." ], "decomposition": [ "What is Van Morrison's most played song?", "What percentage of women worldwide meet the description in #1?", "Is #2 less than 50%?" ], "evidence": [ [ [ [ "Van Morrison-1" ] ], [ [ "Eye color-11" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Brown Eyed Girl-7" ] ], [ [ "Brown-20" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Brown Eyed Girl-7" ] ], [ [ "Eye color-2" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "e4945001c03fa74e1cc9", "term": "Hanuman", "description": "The divine monkey companion of Rama in Hindu mythology", "question": "Does Hanuman have some of the same duties as Athena?", "answer": true, "facts": [ "Hanuman, the divine Hindu monkey, is a god of strength, knowledge, and victory.", "Athena was the Greek goddess of war, and wisdom." ], "decomposition": [ " Hanuman said to be the deity of what subjects?", "Athena said to be the deity of what subjects?", "Is at least one subject listed in both #1 and #2?" ], "evidence": [ [ [ [ "Hanuman-14" ] ], [ [ "Athena-1" ] ], [ "operation" ] ], [ [ [ "Hanuman-2" ], "no_evidence" ], [ [ "Athena-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Hanuman-2" ] ], [ [ "Athena-1" ] ], [ "operation" ] ] ] }, { "qid": "d82193894aa1c12fbc40", "term": "Nissan", "description": "Japanese automobile manufacturer", "question": "Do workers at Nissan's headquarters eat with chopsticks?", "answer": true, "facts": [ "Nissan's headquarters are located in Yokohama, Japan.", "It is customary to eat with chopsticks in East Asian countries.", "Japan is a country in East Asia." ], "decomposition": [ "Where is Nissan's headquarters located?", "Do people living in #1 usually eat with chopsticks?" ], "evidence": [ [ [ [ "Nissan-1", "Yokohama-1" ] ], [ [ "Chopsticks-1" ] ] ], [ [ [ "Nissan-1" ] ], [ [ "Chopsticks-1", "Etiquette in Japan-21" ] ] ], [ [ [ "Nissan-3" ] ], [ [ "Chopsticks-1" ], "operation" ] ] ] }, { "qid": "89740d0e33a805a78e1d", "term": "Edward II of England", "description": "14th-century King of England and Duke of Aquitaine", "question": "Would Edward II of England have been born without Vikings?", "answer": false, "facts": [ "Edward II was King of England from 1307-1327.", "Rollo was a Viking who became the first ruler of Normandy.", "William the Conqueror was the first Norman King of England and the great-great-great-grandson of Rollo.", "Edward II is related to William the Conqueror through Rollo's granddaughter's line (Empress Matilda)." ], "decomposition": [ "What descendant of Vikings conquered England?", "What realm did #1 hail from?", "Who was the first ruler of #2?", "Is Edward II of England descended from #3?" ], "evidence": [ [ [ [ "Normandy-10" ] ], [ [ "William the Conqueror-1" ] ], [ [ "House of Normandy-1" ] ], [ [ "Edward II of England-1" ] ] ], [ [ [ "Harold Godwinson-1" ], "no_evidence" ], [ [ "Normans-1" ], "no_evidence" ], [ "no_evidence" ], [ [ "Edward II of England-1" ], "no_evidence", "operation" ] ], [ [ [ "William the Conqueror-1" ] ], [ [ "William the Conqueror-8" ] ], [ [ "Rollo-1" ] ], [ [ "Rollo-23" ], "no_evidence", "operation" ] ] ] }, { "qid": "9e231570b7c36885ee2b", "term": "Black swan", "description": "species of bird", "question": "Do black swan cygnets typically know both of their genetic parents?", "answer": false, "facts": [ "Up to one-quarter of monogamous black swan pairs are same-sex, with males taking over a female's nest to raise her young without her", "One-third of monogamous black swan pairs show extra-pair paternity, with the male aiding the female in raising another male's offspring ", "A cygnet is a juvenile swan" ], "decomposition": [ "What kinds of adults take on the responsibility of raising black swan cygnets?", "Is there an insignificant chance of #1 not being both genetic parents of the cygnets they raise?" ], "evidence": [ [ [ [ "Black swan-1" ], "no_evidence" ], [ [ "Black swan-11" ], "operation" ] ], [ [ [ "Black swan-22", "Black swan-23" ] ], [ "operation" ] ], [ [ [ "Black swan-22", "Black swan-23" ] ], [ [ "Black swan-22" ], "no_evidence" ] ] ] }, { "qid": "0d147c2a8eb58d38cffa", "term": "Painting", "description": "Practice of applying paint, pigment, color or other medium to a surface", "question": "Is the most recent Democrat President in the US known for his painting practice?", "answer": false, "facts": [ "George W. Bush is a former Republican President of the US.", "George W. Bush posts photos of his paintings online.", "Barack Obama succeeded George W. Bush as a Democrat President.", "Barack Obama doesn't post photos of paintings he has made." ], "decomposition": [ "Who is the most recent Democrat President in the US?", "Was #1 well known for painting?" ], "evidence": [ [ [ [ "Democratic Party (United States)-4" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Barack Obama-65" ] ], [ [ "Barack Obama-128", "George W. Bush-157" ] ] ], [ [ [ "Barack Obama-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "157913fa4c74220aff2d", "term": "Pickled cucumber", "description": "Cucumber pickled in brine, vinegar, or other solution", "question": "Could pickled cucumbers from 1,000 years ago be good still?", "answer": false, "facts": [ "Pickled foods disintegrate over time.", "Pickling and preserving technology from 1,000 years ago was not airtight or made for longevity." ], "decomposition": [ "What happens to picked foods over time?", "Was technology available 1000 years ago to prevent #1?" ], "evidence": [ [ [ [ "Pickling-2", "Pickling-35" ] ], [ [ "Refrigeration-1" ] ] ], [ [ [ "Pickling-35" ], "no_evidence" ], [ [ "Refrigeration-26" ], "operation" ] ], [ [ [ "Pickling-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "cac0fdde384da0c11524", "term": "Royal Society", "description": "National academy of science in the United Kingdom", "question": "Can numerologists become members of Royal Society?", "answer": false, "facts": [ "The royal society fulfills a number of roles: promoting science and its benefits, recognizing excellence in science, supporting outstanding science, providing scientific advice for policy.", "Numerology is a superstition and a pseudoscience that uses numbers to give the subject a veneer of scientific authority." ], "decomposition": [ "What is the primary basis for being selected as a member of the Royal Society?", "What do numerologists do?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Royal Society-24" ] ], [ [ "Numerology-3" ] ], [ "operation" ] ], [ [ [ "Royal Society-24" ] ], [ [ "Numerology-1" ] ], [ "operation" ] ], [ [ [ "Royal Society-17" ] ], [ [ "Numerology-3" ] ], [ "operation" ] ] ] }, { "qid": "f606b2166555bf5e6bfd", "term": "Cream", "description": "Dairy product", "question": "If someone is lactose intolerant, do they have to avoid cream?", "answer": true, "facts": [ "People with lactose intolerance are unable to fully digest the sugar (lactose) in milk.", "Cream is a dairy product composed of the higher-fat layer skimmed from the top of milk before homogenization", "Cream contains milk." ], "decomposition": [ "What do people who are lactose intolerant have to avoid?", "Does cream contain #1?" ], "evidence": [ [ [ [ "Lactose intolerance-1" ] ], [ [ "Cream-1", "Milk-51" ] ] ], [ [ [ "Lactose intolerance-1" ] ], [ [ "Cream-1" ], "operation" ] ], [ [ [ "Lactose intolerance-1" ] ], [ [ "Cream-1" ], "operation" ] ] ] }, { "qid": "1335c853d7f61866345a", "term": "United States Naval Academy", "description": "The U.S. Navy's federal service academy", "question": "Could Jamie Brewer have attended the United States Naval Academy?", "answer": false, "facts": [ "Jamie Brewer is a famous actress with down syndrome.", "Individuals with down syndrome are disqualified from military service." ], "decomposition": [ "What genetic disorder does Jamie Brewer have?", "Are individuals with #1 allowed to be in the US Naval Academy?" ], "evidence": [ [ [ [ "New York Fashion Week-26" ] ], [ "no_evidence" ] ], [ [ [ "Jamie Brewer-7" ] ], [ [ "Basic Military Qualification-3" ], "no_evidence", "operation" ] ], [ [ [ "Jamie Brewer-3" ] ], [ [ "United States Naval Academy-99" ] ] ] ] }, { "qid": "040f15ccc61888c73b48", "term": "Apartheid", "description": "System of institutionalised racial segregation that existed in South Africa and South West Africa (Namibia) from 1948 until the early 1990s", "question": "Did Elle Fanning play an essential part in ending apartheid?", "answer": false, "facts": [ "Apartheid lasted from 1948 until the early 1990s.", "Actress Elle Fanning was born on April 9, 1998." ], "decomposition": [ "When was Actress Elle Fanning born?", "Through which period did the Apartheid last?", "Is #1 before or within #2?" ], "evidence": [ [ [ [ "Elle Fanning-1" ] ], [ [ "Apartheid-1" ] ], [ "operation" ] ], [ [ [ "Elle Fanning-5" ] ], [ [ "Apartheid-153" ] ], [ "operation" ] ], [ [ [ "Elle Fanning-1" ] ], [ [ "Apartheid-1" ] ], [ "operation" ] ] ] }, { "qid": "86434d8eecff8b7ddb50", "term": "Twenty-third Amendment to the United States Constitution", "description": "Grants residents of Washington, D.C. the right to vote in U.S. presidential elections", "question": "Did the 23rd amendment give Puerto Ricans the right to vote for president?", "answer": false, "facts": [ "The 23rd Amendment to the Constitution gave residents of Washington D.C. the right to vote in presidential elections.", "Puerto Rico is an American territory, not a state, and does not have the right to vote for president.", "Puerto Rico is not Washington D.C." ], "decomposition": [ "Which US state was given the right to vote by the 23rd Amendment?", "Is Is Puerto Rico a US state or the same as #1?" ], "evidence": [ [ [ [ "Twenty-third Amendment to the United States Constitution-1" ] ], [ [ "Puerto Rico-1" ], "operation" ] ], [ [ [ "Twenty-third Amendment to the United States Constitution-1" ] ], [ [ "Puerto Rico-1" ], "operation" ] ], [ [ [ "Twenty-third Amendment to the United States Constitution-1" ] ], [ [ "Puerto Rico-1" ] ] ] ] }, { "qid": "f3584715f2b8d7894a3a", "term": "Edward Snowden", "description": "American whistleblower and former National Security Agency contractor", "question": "Could Edward Snowden join MENSA?", "answer": true, "facts": [ "Snowden scored above 145 on two separate IQ tests.", "The minimum accepted IQ score for MENSA on the Stanford–Binet is 132, while for the Cattell it is 148." ], "decomposition": [ "What is the minimum accepted IQ score to be admitted to MENSA?", "What is Edward Snowden's IQ?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Mensa International-5" ] ], [ [ "Edward Snowden-5" ] ], [ "operation" ] ], [ [ [ "Mensa International-5" ] ], [ [ "Edward Snowden-5" ] ], [ [ "Edward Snowden-5", "IQ Award-1" ], "operation" ] ], [ [ [ "Mensa International-5" ] ], [ [ "Edward Snowden-5" ] ], [ "operation" ] ] ] }, { "qid": "b1dc0601af214a930eb2", "term": "French people", "description": "People from France", "question": "Were French people involved in the American Civil War?", "answer": true, "facts": [ "The French General Lafayette allied with the American Revolutionaries.", "Lafayette's army scored several key victories for the rebels." ], "decomposition": [ "Which allies did the American Revolutionaries have during the war?", "Which of #1 scored many important victories for them?", "Are any of #2 French?" ], "evidence": [ [ [ [ "France in the American Revolutionary War-16" ] ], [ [ "Major General Comte Jean de Rochambeau-1" ] ], [ [ "Jean-Baptiste Donatien de Vimeur, comte de Rochambeau-1" ], "operation" ] ], [ [ [ "George Washington in the American Revolution-10" ], "no_evidence" ], [ [ "France in the American Revolutionary War-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "American Revolutionary War-42" ], "no_evidence" ], [ [ "Battle of the Chesapeake-1" ] ], [ "operation" ] ] ] }, { "qid": "3cab1f5599e8b1e4a581", "term": "Helium", "description": "Chemical element with atomic number 2", "question": "Is helium the cause of the Hindenburg explosion?", "answer": false, "facts": [ "The Hindenburgh was filled with hydrogen.", "Helium is considered a noble, inert gas that doesn't react.", "Hydrogen is highly flammable. " ], "decomposition": [ "What gas was the Hindenburg filled with?", "Is #1 helium?" ], "evidence": [ [ [ [ "Hindenburg disaster-46" ] ], [ "operation" ] ], [ [ [ "Hindenburg disaster-52" ] ], [ [ "Hindenburg disaster-52" ], "operation" ] ], [ [ [ "Hindenburg disaster-58" ] ], [ "operation" ] ] ] }, { "qid": "515231a39af241acbf14", "term": "Bulk carrier", "description": "merchant ship specially designed to transport unpackaged bulk cargo", "question": "Does Southwest Airlines use bulk carriers?", "answer": true, "facts": [ "Southwest Airlines requires jet fuel, which is brought in by bulk carrier transport.", "Southwest Airlines requires glycol for de-icing their planes, which is brought in by bulk transport." ], "decomposition": [ "Which vehicles does Southwest Airlines use to provide their services?", "What are some common products needed for the running and maintenance of #1?", "Would Southwest Airlines require #2 in quantities large enough for bulk carriers?" ], "evidence": [ [ [ [ "Southwest Airlines fleet-1" ] ], [ [ "Aircraft maintenance-16", "Aviation fuel-20" ] ], [ [ "Boeing 737 Next Generation-41" ], "operation" ] ], [ [ [ "Southwest Airlines fleet-1", "Southwest Airlines-2" ], "no_evidence" ], [ "no_evidence" ], [ [ "Bulk carrier-1" ], "no_evidence", "operation" ] ], [ [ [ "Southwest Airlines fleet-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c16870454604d281f0f1", "term": "Pelvis", "description": "lower part of the trunk of the human body between the abdomen and the thighs (sometimes also called pelvic region of the trunk", "question": "Is dysphoria around one's pelvis treatable without surgery?", "answer": true, "facts": [ "For individuals experiencing dysphoria around having a vagina, packers can relieve symptoms.", "For people experiencing dysphoria about having a penis, there are tucking underwear and comfort gaffs available." ], "decomposition": [ "Which dysphoria could be experienced around the pelvis/genitals?", "What are the ways of treating #1?", "Are there others apart from surgery included in #2?" ], "evidence": [ [ [ [ "Gender dysphoria-7" ] ], [ [ "Gender dysphoria-16" ] ], [ [ "Gender dysphoria-16", "Gender dysphoria-18", "Gender dysphoria-20" ] ] ], [ [ [ "Dysphoria-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Dysphoria-5" ] ], [ [ "Gender dysphoria-16" ] ], [ "operation" ] ] ] }, { "qid": "5b78898b7ef289f3cd26", "term": "Keyboard layout", "description": "any specific mechanical, visual, or functional arrangement of the keys of a keyboard or typewriter", "question": "Is the QWERTY keyboard layout meant to be slow?", "answer": true, "facts": [ "The QWERTY keyboard layout was made for use on typewriters.", "Typewriters could not be used too quickly, or they would jam. " ], "decomposition": [ "What machine was QWERTY keyboard layout created for?", "Why was QWERTY keyboard layout created for #1?", "Will typing slow solve the problem of #2?" ], "evidence": [ [ [ [ "QWERTY-1" ] ], [ [ "QWERTY-17" ] ], [ "operation" ] ], [ [ [ "QWERTY-1" ] ], [ [ "QWERTY-4" ], "no_evidence" ], [ [ "QWERTY-6", "QWERTY-7" ], "operation" ] ], [ [ [ "QWERTY-1" ] ], [ [ "QWERTY-5" ] ], [ [ "QWERTY-17" ] ] ] ] }, { "qid": "3d18b12522dc4b2404ea", "term": "Longitude", "description": "A geographic coordinate that specifies the east-west position of a point on the Earth's surface", "question": "Can I find my home with latitude and longitude?", "answer": true, "facts": [ "My home is a location on earth where I live. ", "Latitude and Longitude are geographic coordinate systems that identify east/west and north/south locations.", "Specific Latitude and Longitude coordinates can be used to pinpoint specific locations. ", "Every point on the earth has a corresponding latitude and longitude coordinate. " ], "decomposition": [ "What are the uses of latitude and longitude?", "What is the range of latitude and longitude?", "Does #1 and #2 make it possible to locate most people's homes?" ], "evidence": [ [ [ [ "Geographic coordinate system-4" ], "no_evidence" ], [ [ "Geographic coordinate system-16" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Geographic coordinate system-15" ] ], [ [ "Geographic coordinate system-16" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Geographic coordinate system-15" ] ], [ [ "Geographic coordinate system-15" ], "no_evidence" ], [ [ "Geographic coordinate system-15" ] ] ] ] }, { "qid": "64fbc620641915a4a6b4", "term": "Robotics", "description": "Design, construction, operation, and application of robots", "question": "Did the Wall Street Crash of 1929 hurt the stocks of robotics companies?", "answer": false, "facts": [ "The first robotics company was formed in the 1950s", "The crash of 1929 was a single event, not one that lasted decades" ], "decomposition": [ "When did the first robotic company form?", "When did the crash of 1929 last till?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "History of robots-41" ] ], [ [ "Wall Street Crash of 1929-1" ] ], [ "operation" ] ], [ [ [ "Unimate-2" ], "no_evidence" ], [ [ "Great Depression-1" ] ], [ "operation" ] ], [ [ [ "Robotics-7" ] ], [ [ "Wall Street Crash of 1929-1" ] ], [ "operation" ] ] ] }, { "qid": "bdb553a709a06b79ed91", "term": "Snoopy", "description": "cartoon dog", "question": "Could Snoopy transmit rabies?", "answer": false, "facts": [ "Snoopy is a fictional dog.", "Fictional animals cannot transmit diseases to real people." ], "decomposition": [ "What can transmit rabies?", "What is Snoopy?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Rabies-3" ] ], [ [ "Snoopy-2" ] ], [ "operation" ] ], [ [ [ "Rabies-2" ] ], [ [ "Snoopy-1" ] ], [ "operation" ] ], [ [ [ "Rabies-15" ] ], [ [ "Snoopy-1" ] ], [ "operation" ] ] ] }, { "qid": "798252398fa282fa38e6", "term": "Messiah (Handel)", "description": "Oratorio by Handel", "question": "Would Bruce Gandy be an odd choice for Messiah (Handel)?", "answer": true, "facts": [ "Messiah (Handel) is a 1741 Oratorio by George Frideric Handel.", "Messiah (Handel) requires the following instruments: 2 trumpets; timpani; 2 oboes; 2 violins; and a viola.", "Bruce Gandy is a world renowned bagpipe player." ], "decomposition": [ "What instruments are used in Messiah (Handel)?", "What instrument is played by Bruce Gandy?", "Is #2 listed in #1?" ], "evidence": [ [ [ [ "Messiah (Handel)-3", "Orchestra-1" ], "no_evidence" ], [ [ "Bruce Gandy-1" ] ], [ "operation" ] ], [ [ [ "Structure of Handel's Messiah-9" ], "no_evidence" ], [ [ "Bruce Gandy-1" ] ], [ "operation" ] ], [ [ [ "Structure of Handel's Messiah-7" ] ], [ [ "Bruce Gandy-5" ] ], [ "operation" ] ] ] }, { "qid": "87afbace24750551d781", "term": "Popular science", "description": "Interpretation of science intended for a general audience", "question": "Is popular science used to peer review papers?", "answer": false, "facts": [ "Popular science is a simplified version of scientific work.", "Peer review uses detailed scientific information to verify papers. " ], "decomposition": [ "What is popular science?", "What types of documents does peer review use to verify papers?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Popular Science-1" ] ], [ [ "Peer review-1" ] ], [ "operation" ] ], [ [ [ "Popular science-1" ] ], [ [ "Peer review-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Popular science-1" ] ], [ [ "Scholarly peer review-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "20693ee66ad7ca890e08", "term": "Egyptian pyramids", "description": "Ancient pyramid-shaped masonry structures located in Egypt", "question": "Do the Egyptian pyramids look the same from outside as they did when new?", "answer": false, "facts": [ "When originally built, the Great Pyramids had a thin surface of limestone covering the bricks, making them shine in the sun.", "Over the centuries, the limestone layer has been removed by thieves and erosion, exposing the more common stone bricks underneath." ], "decomposition": [ "When originally built, what was the outer layer of the Great Pyramids covered in?", "Is #1 able to withstand erosion over time?" ], "evidence": [ [ [ [ "Great Pyramid of Giza-2" ] ], [ [ "Limestone-18" ], "operation" ] ], [ [ [ "Great Pyramid of Giza-2" ] ], [ [ "Limestone-32" ] ] ], [ [ [ "Egyptian pyramids-9" ] ], [ [ "Limestone-2" ], "operation" ] ] ] }, { "qid": "909f989bd088424372c0", "term": "Ku Klux Klan", "description": "American white supremacy group", "question": "Would an average American Public University be welcoming to Ku Klux Klan members?", "answer": false, "facts": [ "American Universities are known for being liberal in their demographics.", "Groups like the Ku Klux Klan are condemned by liberal groups, as they advocate for human equality." ], "decomposition": [ "What political party is the majority in American Universities?", "Is the Ku Klux Klan welcomed by #1?" ], "evidence": [ [ [ [ "Political views of American academics-1" ] ], [ [ "Ku Klux Klan-74" ], "no_evidence" ] ], [ [ [ "University-1" ], "no_evidence" ], [ [ "Ku Klux Klan-1" ], "no_evidence", "operation" ] ], [ [ [ "Liberal arts college-1", "Liberalism-8" ], "no_evidence" ], [ [ "Ku Klux Klan-74" ], "no_evidence", "operation" ] ] ] }, { "qid": "b528be028d2232b34887", "term": "Lullaby", "description": "soothing song, usually sung to young children before they go to sleep", "question": "Would a lullaby be enough to wake Hellen Keller up?", "answer": false, "facts": [ "Lullabies can be sung or played via instrument.", "Hellen Keller was deaf. " ], "decomposition": [ "How are lullabies played?", "What did Hellen Keller suffer from?", "Would a person with #2 be able to hear #1?" ], "evidence": [ [ [ [ "Lullaby-1" ] ], [ [ "Helen Keller Day-5" ] ], [ "operation" ] ], [ [ [ "Lullaby-1" ] ], [ [ "Helen Keller-1" ] ], [ [ "Deafblindness-1" ] ] ], [ [ [ "Lullaby-11" ] ], [ [ "Helen Keller-6" ] ], [ [ "Deaf hearing-2" ] ] ] ] }, { "qid": "505424c2715e4c11506b", "term": "Accountant", "description": "practitioner of accountancy or accounting", "question": "Is accountant a difficult profession for a person suffering from Dyscalculia?", "answer": true, "facts": [ "Accounting is a math intensive profession in which a person keeps or inspects financial accounts.", "Dyscalculia is a math learning disability that impairs an individual's ability to represent and process numerical magnitude in a typical way. ", "Common symptoms of Dyscalculia include: difficulty with number sense. difficulty with fact and calculation", "Dyscalculia is sometimes called “number dyslexia” or “math dyslexia.”" ], "decomposition": [ "What skills does dyscalculia impair?", "What skills are necessary to be an accountant?", "Are some parts of #2 also in #1?" ], "evidence": [ [ [ [ "Dyscalculia-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Dyscalculia-1" ] ], [ [ "Accountant-2" ] ], [ "operation" ] ], [ [ [ "Dyscalculia-4" ] ], [ [ "Accounting-12" ] ], [ "operation" ] ] ] }, { "qid": "3210c34ec2b8fcda4cae", "term": "German Shepherd", "description": "Dog breed", "question": "Would Robert Wadlow tower over a German Shepherd?", "answer": true, "facts": [ "German Shepherds have a height between 22 and 26 inches.", "Robert Wadlow was the tallest man ever, reaching a height of 8 ft 11.1 inches at his death." ], "decomposition": [ "What is the typical height range of German Shepherds?", "How tall was Robert Wadlow?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "German Shepherd-3" ] ], [ [ "Robert Wadlow-2" ] ], [ "operation" ] ], [ [ [ "German Shepherd-3" ] ], [ [ "Robert Wadlow-2" ] ], [ "operation" ] ], [ [ [ "German Shepherd-3" ], "no_evidence" ], [ [ "Robert Wadlow-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "64f18d020f552f7dd4ec", "term": "Judge", "description": "official who presides over court proceedings", "question": "Would an Orthodox Presbyterian object to 1700s judge's attire?", "answer": true, "facts": [ "Judges in the 1700s wore powdered wigs and large robes during court proceedings.", "Many Orthodox Presbyterians argue that the Bible prohibits adornment such as wigs and jewelry.", "The 1 Timothy 2:8-9 Bible verse warns against adorning oneself with objects." ], "decomposition": [ "What attire did judges in the 1700's wear?", "What things are prohibited by Orthodox Presbyterians?", "Are some elements of #1 also found in #2?" ], "evidence": [ [ [ [ "Wig-16" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Court dress-110" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Wig-16" ], "no_evidence" ], [ [ "Orthodox Presbyterian Church-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "5c62f57b2fd2423b5f59", "term": "Cherokee", "description": "Native American people indigenous to the Southeastern United States", "question": "Did the Cherokee people send a delegation to oppose allotment?", "answer": true, "facts": [ "The Four Mothers Society or Four Mothers Nation is a religious, political, and traditionalist organization of Muscogee Creek, Cherokee, Choctaw and Chickasaw people.", "The Four Mothers Society sent a delegation to Congress in 1906 to oppose the Curtis Act and the Dawes Act.", "With the passage of the Curtis Act in 1898 and Dawes Act, allotment became US policy and the various tribal governments were forced to allot land." ], "decomposition": [ "Which acts cause allotment to become US policy?", "Who sent as a delegate to congress to oppose #1?", "Is #2 related to the Cherokee people?" ], "evidence": [ [ [ [ "Dawes Act-3", "Dawes Act-4" ] ], [ [ "Redbird Smith-3" ] ], [ "operation" ] ], [ [ [ "Dawes Act-1" ] ], [ [ "Dawes Act-3" ] ], [ "operation" ] ], [ [ [ "Dawes Act-1" ] ], [ [ "Dawes Act-3" ] ], [ "operation" ] ] ] }, { "qid": "088e22a7b1b40fd2d95b", "term": "Moscow Kremlin", "description": "fortified complex in Moscow, Russia", "question": "Can the Moscow Kremlin fit inside Disney Land?", "answer": true, "facts": [ "The Moscow Kremlin is a fortified complex in the middle of Moscow Russia.", "The Kremlin takes up sixty eight acres.", "Disney Land is an amusement park in California. ", "Disney Land occupies eighty five acres." ], "decomposition": [ "What is the area of Moscow Kremlin?", "What is the size of Disney Land?", "Is #1 smaller than #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Disneyland-22" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Moscow Kremlin-1" ], "no_evidence" ], [ [ "Disneyland-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Moscow Kremlin-18" ] ], [ [ "Disneyland-22" ] ], [ "operation" ] ] ] }, { "qid": "1857d2f86cc49c8eb728", "term": "Euphoria", "description": "mental and emotional condition in which a person experiences intense feelings of well-being, elation, happiness and excitement", "question": "Is euphoria associated with drug addiction?", "answer": true, "facts": [ "Euphoria is a state of unusually extreme happiness.", "Several drugs are known to artificially induce this reaction including cannabis and opium." ], "decomposition": [ "What is euphoria?", "Do some drugs create the feeling of #1?" ], "evidence": [ [ [ [ "Euphoria-1" ] ], [ [ "Euphoria-15" ] ] ], [ [ [ "Euphoria-19" ] ], [ [ "Euphoria-19" ] ] ], [ [ [ "Euphoria-1" ] ], [ [ "Euphoria-15", "Euphoria-16" ], "operation" ] ] ] }, { "qid": "e1cce18241839245a516", "term": "Agnosticism", "description": "view that the existence of any deity is unknown or unknowable", "question": "Does Billy Graham support agnosticism?", "answer": false, "facts": [ "Bill Graham was a prominent American evangelical leader.", "In Christianity, evangelism is the commitment to or act of publicly preaching (ministry) of the Gospel with the intention to share the message and teachings of Jesus Christ. ", "Agnosticism is the belief that humanity is unsure if God exists.", "Evangelical Christians share the belief that God exists." ], "decomposition": [ "What religion does Billy Graham subscribe to?", "Does #1 allow for the uncertainty of God's existence?" ], "evidence": [ [ [ [ "Billy Graham-1" ] ], [ [ "Evangelicalism-1" ], "no_evidence", "operation" ] ], [ [ [ "Billy Graham-1" ] ], [ [ "Evangelicalism-1" ], "operation" ] ], [ [ [ "Billy Graham-1" ] ], [ [ "Evangelicalism-1" ] ] ] ] }, { "qid": "07c218fd4846cf6b77a8", "term": "Milky Way", "description": "Spiral galaxy containing our Solar System", "question": "Is Ganymede in the Milky Way galaxy?", "answer": true, "facts": [ "Ganymede is a moon of Jupiter.", "Jupiter is the largest planet in our solar system.", "The solar system is part of the Milky Way galaxy." ], "decomposition": [ "What does Ganymede orbit?", "What larger astronomical system is #1 part of?", "Is #2 located in the Milky Way?" ], "evidence": [ [ [ [ "Ganymede (moon)-1" ] ], [ [ "Ganymede (moon)-1" ] ], [ [ "Solar System-6" ] ] ], [ [ [ "Ganymede (moon)-1" ] ], [ [ "Milky Way-1" ] ], [ "operation" ] ], [ [ [ "Ganymede (moon)-1" ] ], [ [ "Jupiter-1" ] ], [ [ "Milky Way-1" ] ] ] ] }, { "qid": "abcfc72e32a9c180db0f", "term": "Internet troll", "description": "Person who sows discord on the Internet", "question": "Can you avoid internet trolls on reddit?", "answer": false, "facts": [ "Internet Trolls flock to any popular platform on the internet.", "Reddit is the 19th most popular website online." ], "decomposition": [ "What types of sites do internet trolls go to?", "What is the popularity ranking of Reddit compared to other websites", "Would #1's be likely to go to a site ranked #2 in popularity? " ], "evidence": [ [ [ [ "Internet troll-1" ] ], [ [ "Reddit-2" ] ], [ [ "Internet troll-37" ] ] ], [ [ [ "Internet troll-37" ] ], [ [ "Reddit-2" ] ], [ "operation" ] ], [ [ [ "Internet troll-1" ] ], [ [ "Reddit-2" ] ], [ "operation" ] ] ] }, { "qid": "42b5997366dc3f987db2", "term": "Giraffe", "description": "Tall African ungulate", "question": "Could Javier Sotomayor jump over the head of the average giraffe?", "answer": false, "facts": [ "Fully grown giraffes stand 4.3–5.7 m (14.1–18.7 ft) tall.", "Javier Sotomayor is the current world record holder in the long jump, with a personal best of 2.45 m (8 ft 1/2 in)." ], "decomposition": [ "How tall are giraffes?", "What is Javier Sotomayor's personal record in the high jump?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Giraffe-16" ] ], [ [ "Javier Sotomayor-1" ] ], [ "operation" ] ], [ [ [ "Giraffe-16" ] ], [ [ "Javier Sotomayor-1" ] ], [ "operation" ] ], [ [ [ "Giraffe-16" ] ], [ [ "Javier Sotomayor-1" ] ], [ "operation" ] ] ] }, { "qid": "5a5cdf519ba94a287202", "term": "Goofy", "description": "Disney cartoon character", "question": "Could Goofy have counted nine planets in his first year?", "answer": true, "facts": [ "Goofy was created in 1932", "Pluto (the celestial object) was discovered in 1930 and labeled a planet" ], "decomposition": [ "When was Goofy first created?", "What year was the ninth planet found?", "Was #1 after #2?" ], "evidence": [ [ [ [ "Goofy-1" ] ], [ [ "Pluto-2" ] ], [ [ "Goofy-1", "Pluto-2" ] ] ], [ [ [ "Goofy-1" ] ], [ [ "Pluto-2" ] ], [ "operation" ] ], [ [ [ "Goofy-1" ] ], [ [ "Pluto-2" ] ], [ "operation" ] ] ] }, { "qid": "06ebd3ca1fe98d5796f7", "term": "Douglas Adams", "description": "British author and humorist", "question": "Did Douglas Adams use email as a child?", "answer": false, "facts": [ "Douglas Adams was born in 1952.", "Modern email did not emerge until 1977." ], "decomposition": [ "When was Douglas Adams born?", "What year did email begin?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Douglas Adams-1" ] ], [ [ "Email-1" ] ], [ "operation" ] ], [ [ [ "Douglas Adams-1" ] ], [ [ "Email-6" ] ], [ "operation" ] ], [ [ [ "Douglas Adams-1" ] ], [ [ "Email-1" ] ], [ "operation" ] ] ] }, { "qid": "315575d5cbdd1d154744", "term": "Kurt Cobain", "description": "American singer, composer, and musician", "question": "Would Kurt Cobain have benefited from Project Semicolon?", "answer": true, "facts": [ "Project Semicolon is an American nonprofit organization known for its advocacy of mental health wellness and its focus as an anti-suicide initiative.", "During the last years of his life, Cobain struggled with heroin addiction and chronic health problems such as depression.", "Cobain died at the age of 27 from apparent suicide by gunshot." ], "decomposition": [ "What problems does Project Semicolon work to solve?", "Did Kurt Cobain have any of the problems listed in #1?" ], "evidence": [ [ [ [ "Project Semicolon-1" ] ], [ [ "Suicide of Kurt Cobain-2" ] ] ], [ [ [ "Project Semicolon-1" ] ], [ [ "Kurt Cobain-3" ] ] ], [ [ [ "Project Semicolon-5" ] ], [ [ "Kurt Cobain-47", "Kurt Cobain-49", "Project Semicolon-5" ] ] ] ] }, { "qid": "43da475d5c2db929c458", "term": "Arctic Ocean", "description": "The smallest and shallowest of the world's five major oceans, located in the north polar regions", "question": "Could the Eiffel Tower be completely submerged at the Arctic Ocean's deepest point?", "answer": true, "facts": [ "The deepest point in the Arctic Ocean is 18,210 feet below the surface.", "The Eiffel Tower is 1,063 feet tall." ], "decomposition": [ "How deep is the deepest point in the Arctic Ocean?", "How tall is the Eiffel Tower?", "Is #2 smaller than #1?" ], "evidence": [ [ [ [ "Molloy Deep-2" ] ], [ [ "Eiffel Tower-3" ] ], [ "operation" ] ], [ [ [ "Molloy Deep-2" ] ], [ [ "Eiffel Tower-3" ] ], [ [ "Eiffel Tower-3", "Molloy Deep-2" ], "operation" ] ], [ [ [ "Fram Strait-4" ] ], [ [ "Eiffel Tower-3" ] ], [ "operation" ] ] ] }, { "qid": "d7886984475ee7c616a2", "term": "Haiku", "description": "very short form of Japanese poetry", "question": "Is Lines on the Antiquity of Microbes briefer than any haiku?", "answer": true, "facts": [ "A haiku is a short Japanese poem that follows a 5, 7, 5 syllable structure.", "Lines on the Antiquity of Microbes, also known simply as Fleas is said to be the shortest poem written.", "Lines on the Antiquity of Microbes is made of one brief phrase: Adam. Had 'em." ], "decomposition": [ "How long is a haiku?", "How long is Lines on the Antiquity of Microbes?", "Is #2 shorter than #1?" ], "evidence": [ [ [ [ "Haiku-2" ] ], [ [ "Lines on the Antiquity of Microbes-1", "Lines on the Antiquity of Microbes-3" ] ], [ "operation" ] ], [ [ [ "Haiku-2" ], "no_evidence" ], [ [ "Lines on the Antiquity of Microbes-1" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Haiku-2" ] ], [ [ "Lines on the Antiquity of Microbes-3" ] ], [ "operation" ] ] ] }, { "qid": "ecd7594494e88099aeac", "term": "Goblin shark", "description": "Deep-sea shark", "question": "Would a goblin shark eat at Crossroads Kitchen?", "answer": false, "facts": [ "Goblin sharks are carnivores that subsist on other fish, cephalopods and crustaceans", "Crossroads Kitchen is a vegan restaurant in Los Angeles", "Vegans do not consume any animal products" ], "decomposition": [ "What is the goblin shark's diet-based classification?", "What kind of food is served at Crossroads Kitchen?", "Would a #1 typically consume #2?" ], "evidence": [ [ [ [ "Goblin shark-12" ] ], [ [ "Crossroads Kitchen-1" ] ], [ "operation" ] ], [ [ [ "Goblin shark-12" ] ], [ [ "Crossroads Kitchen-6" ] ], [ [ "Goblin shark-12" ], "operation" ] ], [ [ [ "Goblin shark-12" ] ], [ [ "Crossroads Kitchen-1" ] ], [ "operation" ] ] ] }, { "qid": "2ed50522610a3683933f", "term": "Jalapeño", "description": "Hot pepper", "question": "Is jalapeno heat outclassed by Bhut jolokia?", "answer": true, "facts": [ "The Scoville scale measures how hot peppers are.", "The jalapeno pepper has a Scoville scale rating of between 3,500 and 3,600 SHU (Scoville Heat Units).", "The Bhut jolokia (ghost pepper) has a Scoville scale rating of 1 million SHU (Scoville Heat Units)." ], "decomposition": [ "How many Scoville units does a Jalapeno have?", "How many Scoville units does a Bhut jolokia have?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Jalapeño-16" ] ], [ [ "Bhut jolokia-4" ] ], [ "operation" ] ], [ [ [ "Jalapeño-1" ] ], [ [ "Bhut jolokia-2" ] ], [ "operation" ] ], [ [ [ "Jalapeño-16" ] ], [ [ "Race to grow the hottest pepper-1" ] ], [ "operation" ] ] ] }, { "qid": "a4de29904c45a2d17511", "term": "Linux", "description": "Family of free and open-source software operating systems based on the Linux kernel", "question": "If you're running focal fossa, are you using linux?", "answer": true, "facts": [ "Focal Fossa is the most recent Ubuntu release.", "Ubuntu is a Linux distribution. " ], "decomposition": [ "Which operating system was codenamed focal fossa?", "Is #1 a Linux distribution?" ], "evidence": [ [ [ [ "Ubuntu version history-146" ] ], [ [ "Ubuntu-1" ], "operation" ] ], [ [ [ "Ubuntu version history-146" ] ], [ [ "Ubuntu-1" ] ] ], [ [ [ "Ubuntu-2" ] ], [ "operation" ] ] ] }, { "qid": "0c30c162923eb59ae816", "term": "Badminton", "description": "racquet sport", "question": "Are birds important to badminton?", "answer": true, "facts": [ "Badminton is played with racquets and a shuttlecock.", "A shuttlecock is a projectile made of feathers attached to a cork base.", "Birds have feathers covering their body." ], "decomposition": [ "What are the equipment needed to play badminton?", "Is any of #1 made with a bird product?" ], "evidence": [ [ [ [ "Badminton-1" ] ], [ [ "Badminton-3" ], "operation" ] ], [ [ [ "Badminton-35" ], "no_evidence" ], [ [ "Shuttlecock-4" ] ] ], [ [ [ "Badminton-28" ], "no_evidence" ], [ [ "Badminton-28" ], "no_evidence" ] ] ] }, { "qid": "f8e83518c769ad012589", "term": "Banana", "description": "edible fruit", "question": "Would you buy bananas for tostones?", "answer": false, "facts": [ "Tostones are a Latin American twice fried dish made of plantains.", "Plantains resemble bananas, but are not bananas." ], "decomposition": [ "What ingredients are used to make tostones?", "Are bananas included in #1?" ], "evidence": [ [ [ [ "Tostones-2" ] ], [ "operation" ] ], [ [ [ "Tostones-2" ] ], [ "operation" ] ], [ [ [ "Tostones-1" ] ], [ "operation" ] ] ] }, { "qid": "42f310b71bdd412d2057", "term": "Dungeons & Dragons", "description": "Fantasy role-playing game", "question": "Is Dungeons and Dragons a game well suited for solo play?", "answer": false, "facts": [ "Dungeons and Dragons requires one person to act as the \"Dungeon Master\" to construct the world for the other players to roleplay in.", "Dungeons and Dragons cannot work without at least one DM and two players." ], "decomposition": [ "How many basic roles must be accounted for in order to play a game of Dungeons and Dragons?", "Is #1 equal to one?" ], "evidence": [ [ [ [ "Dungeons & Dragons-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Dungeons & Dragons-2" ] ], [ "operation" ] ], [ [ [ "Dungeons & Dragons-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "1479ad9ab9d5dbc5c140", "term": "The Matrix", "description": "1999 science fiction action film directed by the Wachowskis", "question": "Do the directors of The Matrix advocate for transgender rights?", "answer": true, "facts": [ "Lilly Wachowski is a trans woman who was a director of The Matrix.", "Lena Wachowski is a trans woman who was a director of The Matrix.", "The Wachowski sisters speak actively about viewing their films through a \"lens of transness\"" ], "decomposition": [ "Who directed The Matrix?", "Are #1 transgender rights advocates?" ], "evidence": [ [ [ [ "The Matrix-1" ] ], [ [ "The Wachowskis-1", "The Wachowskis-57" ] ] ], [ [ [ "The Matrix-1" ] ], [ [ "The Wachowskis-57" ], "operation" ] ], [ [ [ "The Wachowskis-2" ] ], [ [ "The Wachowskis-53" ], "operation" ] ] ] }, { "qid": "a874e8bbb2230cdacf19", "term": "Chickpea", "description": "species of plant", "question": "Would vegans consider chickpeas for a tuna substitute?", "answer": true, "facts": [ "Vegans do not eat any animal products, including milk, eggs, meat, and cheese. ", "Vegan alternatives for things like mayo exist. ", "There are vegan recipes for \"tuna\" online that include chickpeas as the main component." ], "decomposition": [ "What kinds of foods do vegans avoid?", "What are replacements for #1 in vegan diets called?", "What are some #2 for tuna?", "Are chickpeas included in the recipes of any of #3?" ], "evidence": [ [ [ [ "Veganism-1" ] ], [ [ "Meat analogue-16" ] ], [ "no_evidence", "operation" ], [ "operation" ] ], [ [ [ "Veganism-1" ] ], [ [ "Veganism-34" ] ], [ [ "Chickpea-14" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Veganism-1" ] ], [ [ "Chickpea-1", "Meat analogue-1" ] ], [ [ "Chickpea-17" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0c715b8610f8c79b1ecd", "term": "Giant panda", "description": "species of mammal", "question": "Does giant panda have colors that differ from yin yang?", "answer": false, "facts": [ "The giant panda is a mammal that is black and white in color.", "Yin yang is an ancient Chinese concept represented by the colors black and white." ], "decomposition": [ "What colors does the concept \" yin and yang\" represent?", "What are colors of adult giant pandas?", "Is any of #2 excluded from #1?" ], "evidence": [ [ [ [ "Yin and yang-40" ] ], [ [ "Giant panda-12" ] ], [ "operation" ] ], [ [ [ "Yin and yang-40" ] ], [ [ "Giant panda-11" ] ], [ "operation" ] ], [ [ [ "Yin and yang-1" ] ], [ [ "Giant panda-13" ] ], [ "operation" ] ] ] }, { "qid": "10bc0e26996fa472791d", "term": "Nine Inch Nails", "description": "American industrial rock band", "question": "Did Nine Inch Nails inspire Aretha Franklin's sound?", "answer": false, "facts": [ "Nine Inch Nails is a industrial heavy rock band.", "Aretha Franklin was a soul and R&B singer.", "Aretha Franklin began singing in a gospel choir.", "Nine Inch Nails lyrics have been described as profane and anti-God." ], "decomposition": [ "What genre are Nine Inch Nails' music?", "What genre of songs does Aretha Franklin sing?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Nine Inch Nails-1" ] ], [ [ "Aretha Franklin-27" ] ], [ "operation" ] ], [ [ [ "Nine Inch Nails-1" ] ], [ [ "Aretha Franklin-27" ] ], [ "operation" ] ], [ [ [ "Nine Inch Nails-1" ] ], [ [ "Aretha Franklin-1" ] ], [ "operation" ] ] ] }, { "qid": "47cb228abaa8a8106df9", "term": "Hundred Years' War", "description": "Series of conflicts and wars between England and France during the 14th and 15th-century", "question": "Was Hundred Years' War a misnomer?", "answer": true, "facts": [ "A misnomer is a wrong or inaccurate name.", "The Hundred Years' War lasted for longer than one hundred years.", "The Hundred Years' War lasted from 1337-1453." ], "decomposition": [ "How many years the the Hundred Years' War actually last?", "Is #1 greater or less than 100?" ], "evidence": [ [ [ [ "Hundred Years' War-1" ] ], [ "operation" ] ], [ [ [ "Hundred Years' War-1" ] ], [ "operation" ] ], [ [ [ "Hundred Years' War-1" ] ], [ "operation" ] ] ] }, { "qid": "97054ba9a1bcd06a31d6", "term": "Northern fur seal", "description": "The largest fur seal in the northern hemisphere", "question": "Would a northern fur seal pass a driving test?", "answer": false, "facts": [ "A driving test measures the ability to drive according to traffic laws.", "The northern fur seal does not have the ability to legally drive." ], "decomposition": [ "What does a driving test require one to do?", "Does a northern fur seal have the ability to accomplish all of #1?" ], "evidence": [ [ [ [ "Driving test-1" ] ], [ [ "Northern fur seal-2" ], "operation" ] ], [ [ [ "Driving test-1" ] ], [ [ "Northern fur seal-2" ] ] ], [ [ [ "Driving test-1", "Driving test-4" ], "no_evidence" ], [ [ "Northern fur seal-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "f66cf3ddea2d87e8e572", "term": "Sicilian Defence", "description": "Chess opening", "question": "Would Lee Sedol understand the complexities of the Sicilian Defence?", "answer": false, "facts": [ "Lee Sedol is a former South Korean professional Go player of 9 dan rank.", "Go has a different rule set than chess.", "It would not be worthwhile to spend time understanding the complexities of a game that you don't play professionally." ], "decomposition": [ "What is Lee Sedol's profession?", "The Sicilian defense is a tactic of which game?", "Would #1 typically invest time in learning about #2?" ], "evidence": [ [ [ [ "Lee Sedol-1" ] ], [ [ "Sicilian Defence-1" ] ], [ "operation" ] ], [ [ [ "Lee Sedol-1" ] ], [ [ "Sicilian Defence, Accelerated Dragon-1" ] ], [ "operation" ] ], [ [ [ "Lee Sedol-3" ], "operation" ], [ [ "Sicilian Defence-6" ], "operation" ], [ "operation" ] ] ] }, { "qid": "cac049d9ec0b86a7f911", "term": "Starbucks", "description": "American multinational coffee company", "question": "Would menu at Chinese Starbucks be familiar to an American?", "answer": false, "facts": [ "American Starbucks sells a number of coffee beverages like Lattes and Cappucino.", "The Chinese Starbucks menu focuses on teas such as Blackcurrant Raspberry Juiced Tea and Iced Shaken Mango Herbal Juiced Tea.", "Mooncakes, Chinese bakery products traditionally eaten during the Mid-Autumn Festival, are popular items at Chinese Starbucks." ], "decomposition": [ "What is on the typical American Starbucks' menu?", "What is on the typical Chinese Starbucks' menu?", "Are most things in #2 not found in #1?" ], "evidence": [ [ [ [ "Starbucks-16" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Starbucks-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Starbucks-1" ] ], [ [ "Starbucks-27" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "f2c7e13e2f02dd6c8849", "term": "Saturn", "description": "Sixth planet from the Sun in the Solar System", "question": "Are Saturn's famous rings solid?", "answer": false, "facts": [ "The rings are made mostly of dust and particles.", "The ring matter is constantly in orbital motion around Saturn." ], "decomposition": [ "In what form do Saturn's famous rings exist in?", "Can #1 be considered solid?" ], "evidence": [ [ [ [ "Rings of Saturn-16" ] ], [ [ "Solid-1" ], "operation" ] ], [ [ [ "Saturn-30" ] ], [ [ "Solid-1" ] ] ], [ [ [ "Saturn-3" ] ], [ [ "Debris-1", "Ice-1", "Rock (geology)-1" ], "operation" ] ] ] }, { "qid": "4f08d9a0fba58ad9ca73", "term": "Taco Bell", "description": "American fast-food chain", "question": "Will more people go in and out of Taco Bell than a Roy Rogers each year?", "answer": true, "facts": [ "Taco Bell has over 7,072 restaurants as of 2018.", "Roy Rogers had over 600 restaurants at its peak.", "Roy Rogers has 48 locations as of 2019." ], "decomposition": [ "How many restaurants does Taco Bell have?", "How many restaurants does Roy Rogers have?", "Is #1 significantly greater than #2?" ], "evidence": [ [ [ [ "Taco Bell-1" ] ], [ [ "Roy Rogers Restaurants-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Roy Rogers Restaurants-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Roy Rogers Restaurants-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e154f37283d0f9e02277", "term": "Noah", "description": "Biblical figure", "question": "Was Noah associated with a dove?", "answer": true, "facts": [ "Noah sailed his Ark when the world was flooded.", "After the flood, he sent his dove to find land." ], "decomposition": [ "Which famous bible story was Noah associated with?", "Which creatures were the important characters in #1?", "Is any of #2 a dove?" ], "evidence": [ [ [ [ "Noah-1" ] ], [ [ "Doves as symbols-5" ] ], [ "operation" ] ], [ [ [ "Genesis flood narrative-1", "Noah-32" ] ], [ [ "Columbidae-36", "Noah-2" ] ], [ "operation" ] ], [ [ [ "Noah-2" ] ], [ [ "Sign of the Dove-6" ] ], [ "operation" ] ] ] }, { "qid": "4a8361174e3f01cf8c2e", "term": "Achilles", "description": "Greek mythological hero", "question": "Was Achilles a direct descendent of Gaia?", "answer": true, "facts": [ "Achilles was the son of a Nereid. ", "The Nereids were the 50 daughters of Nereus.", "Nereus was the eldest son of the union between Gaia and Pontus." ], "decomposition": [ "Who were Achilles' parents?", "Who were the children of Gaia?", "Were any of #1 the children of #2?" ], "evidence": [ [ [ [ "Achilles-1" ] ], [ [ "Gaia-1", "Nereus-1" ] ], [ [ "Thetis-1" ], "operation" ] ], [ [ [ "Achilles-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Achilles-1" ] ], [ [ "Nereus-1" ] ], [ [ "Thetis-1" ], "operation" ] ] ] }, { "qid": "0d527db1bc94c695fb74", "term": "Starbucks", "description": "American multinational coffee company", "question": "Do any Islamic dominated countries have a Starbucks?", "answer": true, "facts": [ "Starbucks is a coffee shop found in numerous countries including USA, China, and the United Arab Emirates.", "The United Arab Emirates has a Starbucks in Dubai.", "Islam is the largest and the official state religion of the United Arab Emirates.", "Pew Research estimates state that over 76% of the citizens of the United Arab Emirates are Islamic." ], "decomposition": [ "Which countries does Starbucks have branch(es) in?", "Is any of #1 an Islamic dominated country?" ], "evidence": [ [ [ [ "Starbucks-31" ] ], [ [ "Islam by country-1" ] ] ], [ [ [ "Starbucks-26" ] ], [ [ "Starbucks-26" ], "operation" ] ], [ [ [ "Middle East-9", "Starbucks-37" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "c406445894101a7f4d71", "term": "Days of Our Lives", "description": "American daytime soap opera", "question": "Is a thousand dollars per Days of Our Lives episodes preferred to other soaps?", "answer": false, "facts": [ "Days of Our Lives has aired around 13,900 episodes as of 2020.", "General Hospital aired their 14,000th episode on February 23, 2018." ], "decomposition": [ "How many episodes of 'Days of Our Lives' are there as of 2020?", "How many episodes of 'General Hospital' have been aired as of 2020?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Days of Our Lives-3" ], "no_evidence" ], [ [ "General Hospital-1", "General Hospital-3" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Days of Our Lives-3" ], "no_evidence" ], [ [ "General Hospital-10" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "0592bdf18748bbc951f8", "term": "Pickled cucumber", "description": "Cucumber pickled in brine, vinegar, or other solution", "question": "Is pickled cucumber ever red?", "answer": true, "facts": [ "Koolickles are a variety of pickled cucumber made with brine and red kool-aid.", "Korean cucumber kimchi is flavored with korean pepper powder.", "Korean pepper powder is red in color. " ], "decomposition": [ "What are the Koolickles made from?", "What is Korean cucumber kimchi flavored with?", "Are any of #1 or #2 red in color?" ], "evidence": [ [ [ [ "Pickled cucumber-19" ] ], [ [ "Kimchi-13", "Kimchi-25", "Kimchi-28", "Kimchi-31" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Pickled cucumber-19" ], "no_evidence" ], [ [ "Kimchi-1" ] ], [ "operation" ] ], [ [ [ "Pickled cucumber-19" ] ], [ [ "Kimchi-1" ] ], [ "operation" ] ] ] }, { "qid": "cf95bbecccc040a95dbb", "term": "Acetylene", "description": "chemical compound", "question": "Does welding with acetylene simulate the temperature of a star?", "answer": true, "facts": [ "Acetylene is used for oxyacetylene welding ", "An acetylene/oxygen flame burns at about 3,773 K ", "The star Betelgeuse has a surface temperature of 3,500 K" ], "decomposition": [ "What temperature is reached when welding with acetylene?", "What temperature can stars reach?", "Are #1 and #2 similar in magnitude?" ], "evidence": [ [ [ [ "Acetylene-14" ] ], [ [ "Star-91" ] ], [ "operation" ] ], [ [ [ "Acetylene-8" ], "no_evidence" ], [ [ "Star-91" ] ], [ "operation" ] ], [ [ [ "Acetylene-14" ] ], [ [ "Star-91" ] ], [ "operation" ] ] ] }, { "qid": "73cbfb23a5cf28915b4e", "term": "Sandal", "description": "Type of footwear with an open upper", "question": "Is it comfortable to wear sandals outside Esperanza Base?", "answer": false, "facts": [ "Sandals are a type of footwear that leave parts of the foot exposed", "Esperanza Base is located in Antarctica", "Average temperatures in Antarctica range from -10.5C to 1.4C" ], "decomposition": [ "Where is Esperanza Base Located?", "What are the average temperatures in #1?", "What are the defining characteristics of sandals?", "Would #3 make a person's feet comfortable in #2 temperatures?" ], "evidence": [ [ [ [ "Esperanza Base-1" ] ], [ [ "Esperanza Base-7" ] ], [ [ "Sandal-1", "Sandal-2" ] ], [ "operation" ] ], [ [ [ "Esperanza Base-1" ] ], [ [ "Esperanza Base-7" ] ], [ [ "Sandal-1" ] ], [ [ "Esperanza Base-7", "Sandal-1" ], "operation" ] ], [ [ [ "Esperanza Base-1" ] ], [ [ "Esperanza Base-7" ] ], [ [ "Sandal-1" ] ], [ "operation" ] ] ] }, { "qid": "ae4cedb73e82c7f8de75", "term": "Art dealer", "description": "person that buys and sells works of art", "question": "Would an art dealer prize a print of a Van Goh? ", "answer": false, "facts": [ "Van Goh painted many valuable pieces of artwork in his lifetime.", "Prints of Van Goh's artwork are readily available at a low price." ], "decomposition": [ "What kind of art do art dealers typically look for?", "What is the cost of a typically Van Goh print?", "Is something priced as #2 considered #1?" ], "evidence": [ [ [ [ "Art dealer-2" ] ], [ [ "Dutch art-15" ] ], [ "operation" ] ], [ [ [ "Art dealer-1" ], "no_evidence" ], [ [ "Art forgery-18" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Art dealer-2" ], "no_evidence" ], [ [ "Vincent van Gogh-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "78ff5b06fb937729cc03", "term": "Lentil", "description": "Species of plant", "question": "Would Jean Harris's victim have avoided lentils?", "answer": true, "facts": [ "Jean Harris spent 12 years in jail for killing her lover Herman Tarnower.", "Herman Tarnower was the creator of the Scarsdlae Diet.", "The Scarsdale Diet focuses on a simplified diet plan and forbids corn, beans, potatoes, lentils, and any beans except green beans." ], "decomposition": [ "Who was Jean Harris' victim?", "What is #1 famous for?", "What foods are forbidden in #2?", "Are lentils listed in #3?" ], "evidence": [ [ [ [ "Jean Harris-1" ] ], [ [ "Scarsdale diet-1" ] ], [ [ "Herman Tarnower-4" ] ], [ [ "Lentil-30" ], "no_evidence", "operation" ] ], [ [ [ "Jean Harris-1" ] ], [ [ "Herman Tarnower-1" ] ], [ [ "Scarsdale diet-2" ] ], [ [ "Lentil-30" ] ] ], [ [ [ "Jean Harris-6" ] ], [ [ "Herman Tarnower-1" ] ], [ [ "Scarsdale diet-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "40eb6c9c236c77164794", "term": "The Doctor (Doctor Who)", "description": "fictional character from Doctor Who", "question": "Would Marvel's Gateway be envious of the Doctor (Doctor Who)'s TARDIS machine?", "answer": false, "facts": [ "The Doctor (Doctor Who) used the TARDIS, a largely unreliable time traveling machine, to travel through time and space.", "Gateway is a Marvel comic character linked to the X-Men comics.", "Gateway has the power to create wormholes that allow him to travel through time and space." ], "decomposition": [ "What is the TARDIS's special power?", "What is Gateway's special power?", "Is #1 different than #2?" ], "evidence": [ [ [ [ "TARDIS-13", "TARDIS-14" ] ], [ [ "Gateway (comics)-15" ] ], [ "operation" ] ], [ [ [ "TARDIS-1" ] ], [ [ "Gateway (comics)-15" ] ], [ "operation" ] ], [ [ [ "TARDIS-1" ] ], [ [ "Gateway (comics)-1" ] ], [ "operation" ] ] ] }, { "qid": "01c27885cede6741bc53", "term": "The CW", "description": "American broadcast television network", "question": "Did Supernatural break 2001 CW debuting shows seasons record?", "answer": true, "facts": [ "Smallville debuted on the CW in 2001.", "Smallville had the record of most CW seasons for a show with 10.", "Supernatural concluded its run with its record breaking 15th season on the CW." ], "decomposition": [ "What was the debuting shows in a seasons record as of 2001 for CW?", "What was Supernatural's highest debuting shows in a season?", "Is #2 higher than #1?" ], "evidence": [ [ [ [ "The WB-16" ], "no_evidence" ], [ [ "Supernatural (American TV series)-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "The CW-8" ], "no_evidence" ], [ [ "Supernatural (American TV series)-102", "Supernatural (season 1)-20" ], "no_evidence" ], [ "operation" ] ], [ [ [ "One Tree Hill (TV series)-4" ] ], [ [ "Supernatural (American TV series)-2" ] ], [ "operation" ] ] ] }, { "qid": "2c774c2108bfaef1032c", "term": "Internet slang", "description": "Slang languages used by different people on the Internet", "question": "Did Alfred Hitchcock include internet slang in his films?", "answer": false, "facts": [ "Alfred Hitchcock died in 1908.", "The internet began developing slang in the 1990's." ], "decomposition": [ "What year did Alfred Hitchcock die?", "When did internet become available for people?", "Is #1 after #2?" ], "evidence": [ [ [ [ "Alfred Hitchcock-1" ] ], [ [ "History of the World Wide Web-11" ] ], [ "operation" ] ], [ [ [ "Alfred Hitchcock-1" ] ], [ [ "World Wide Web-2" ] ], [ "operation" ] ], [ [ [ "Alfred Hitchcock-70" ] ], [ [ "Internet-10" ] ], [ "operation" ] ] ] }, { "qid": "6fe2a7efe785ac391298", "term": "Norman, Oklahoma", "description": "City in Oklahoma, United States", "question": "Is Norman Oklahoma named after a viking?", "answer": false, "facts": [ "The Normans invaded England in 1066 and were originally vikings from Scandinavia.", "Norman Oklahoma was first surveyed by land surveyor Abner Norman in the 1800s.", "1066 is said to be the end of the Viking Era." ], "decomposition": [ "Who was the city of Norman, Oklahoma named after?", "Was #1 a viking?" ], "evidence": [ [ [ [ "Norman, Oklahoma-2" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Norman, Oklahoma-5" ] ], [ "operation" ] ], [ [ [ "Norman, Oklahoma-2" ] ], [ [ "Vikings-1" ] ] ] ] }, { "qid": "7acebd6478d0935c7a3b", "term": "Pi", "description": "Ratio of the circumference of a circle to its diameter", "question": "Can every digit in Pi be memorized?", "answer": false, "facts": [ "The digits of Pi are infinite. ", "The human mind cannot hold an infinite amount of information." ], "decomposition": [ "How many digits are in Pi?", "Can the human mind memorize #1 amount of information?" ], "evidence": [ [ [ [ "Pi-4" ] ], [ [ "Memory-10", "Short-term memory-21" ], "no_evidence" ] ], [ [ [ "Pi-16" ] ], [ "operation" ] ], [ [ [ "Pi-3" ] ], [ [ "Piphilology-65" ], "operation" ] ] ] }, { "qid": "edb3d62aa83c9ec8bf4c", "term": "Red Sea", "description": "Arm of the Indian Ocean between Arabia and Africa", "question": "Would it be unusual to find a yellow perch in the Red Sea?", "answer": true, "facts": [ "The Red Sea is one of the saltiest bodies of water in the world.", "The yellow perch is a freshwater perciform fish native to much of North America." ], "decomposition": [ "What type of water do yellow perches usually live in?", "What type of water is present in the red sea?", "IS #2 the same as #1?" ], "evidence": [ [ [ [ "Yellow perch-1" ] ], [ [ "Red Sea-8" ] ], [ "operation" ] ], [ [ [ "Yellow perch-1" ] ], [ [ "Red Sea-16" ] ], [ "operation" ] ], [ [ [ "Yellow perch-17" ] ], [ [ "Red Sea-16" ] ], [ "operation" ] ] ] }, { "qid": "0dad98b7dde01ed9f144", "term": "Greyhound", "description": "Dog breed used in dog racing", "question": "Do people associate greyhounds with the movie 'Homeward Bound'?", "answer": false, "facts": [ "The movie homeward bound features a golden retriever. ", "The movie homeward bound features a pit bull type dog.", "There are no greyhounds in homeward bound." ], "decomposition": [ "What are the two types of dogs that are lost in Homeward Bound?", "Is a greyhound listed in #1?" ], "evidence": [ [ [ [ "Homeward Bound: The Incredible Journey-2" ] ], [ [ "Homeward Bound: The Incredible Journey-2" ], "operation" ] ], [ [ [ "Homeward Bound: The Incredible Journey-2" ] ], [ "operation" ] ], [ [ [ "Homeward Bound: The Incredible Journey-2" ] ], [ "operation" ] ] ] }, { "qid": "19e3882ec4a562b57d3c", "term": "Alan Turing", "description": "British mathematician and computer scientist", "question": "Did Alan Turing suffer the same fate as Abraham Lincoln?", "answer": false, "facts": [ "Alan Turing committed suicide via cyanide poisoning.", "Abraham Lincoln was killed by a gunshot wound to the head." ], "decomposition": [ "What did Alan Turing die of?", "What did Abraham Lincoln die of?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Alan Turing-57" ] ], [ [ "Abraham Lincoln-4" ] ], [ "operation" ] ], [ [ [ "Alan Turing-57" ] ], [ [ "Abraham Lincoln-113" ] ], [ "operation" ] ], [ [ [ "Alan Turing-57" ] ], [ [ "Mary Todd Lincoln-20" ] ], [ "operation" ] ] ] }, { "qid": "1bbdd43172978e342b6a", "term": "Game (hunting)", "description": "animal hunted for sport or for food", "question": "Does meat from cows fed only grass taste more like wild game?", "answer": true, "facts": [ "The food an animal eats throughout its lifetime affect the way the meat from it will taste. ", "Grass-fed cows produce meat that tends to taste more mineral-dense.", "Wild game is known for a grassy, mineral taste." ], "decomposition": [ "What is wild game known to taste like?", "What does meat from grass-fed cows typically taste like?", "Is #1 similar to #2?" ], "evidence": [ [ [ [ "Game (hunting)-7" ] ], [ [ "Cattle feeding-26" ] ], [ [ "Cattle feeding-26", "Game (hunting)-7" ] ] ], [ [ [ "Game (hunting)-7" ] ], [ [ "Cattle feeding-5" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Game (hunting)-7" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "3a61dcbed7e358b3aee8", "term": "Great Pyramid of Giza", "description": "Largest pyramid in the Giza Necropolis, Egypt", "question": "Can 200 men end to end cover Great Pyramid of Giza's base?", "answer": true, "facts": [ "The base of the Great Pyramid of Giza is 756 feet long.", "The average height of a man is 5 foot 9." ], "decomposition": [ "What is the height in inches of the average man?", "What is length in inches of the base of The Great Pyramid of Giza?", "What is 200 times #1?", "Is #3 more than #2?" ], "evidence": [ [ [ [ "Dinka people-3" ], "no_evidence" ], [ [ "Great Pyramid of Giza-4" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Dinka people-3" ], "no_evidence" ], [ [ "Great Pyramid of Giza-4" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Human height-46" ] ], [ [ "Great Pyramid of Giza-4" ] ], [ [ "Foot (unit)-1" ], "operation" ], [ "operation" ] ] ] }, { "qid": "efde6ff2282415b9f2f6", "term": "Samsung Galaxy S4", "description": "Android smartphone", "question": "Would General Zod prefer an iPhone over a Samsung Galaxy S4?", "answer": false, "facts": [ "General Zod is a villain.", "Apple does not allow moviemakers to give villains iPhones." ], "decomposition": [ "What movie is General Zod from?", "What is General Zod's role in #1?", "Does Apple allow moviemakers to give #2 iPhones?" ], "evidence": [ [ [ [ "General Zod-11" ] ], [ [ "General Zod-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "General Zod-11" ] ], [ [ "General Zod-2" ] ], [ "no_evidence" ] ], [ [ [ "Superman II-1" ] ], [ [ "General Zod-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "9711b3fc634e6f4f471d", "term": "Alan Turing", "description": "British mathematician and computer scientist", "question": "Would World War II have been the same without Alan Turing?", "answer": false, "facts": [ "During WW2, the German Military used something called the Engima device to send messages secretly. ", "Alan Turing broke the Enigma code, allowing German messages to be understood and intercepted." ], "decomposition": [ "What code did Alan Turing discover during World War II?", "Without #1, would we have been able to beat the Germans?" ], "evidence": [ [ [ [ "Alan Turing-2" ] ], [ [ "Alan Turing-3" ], "no_evidence", "operation" ] ], [ [ [ "Alan Turing-2" ] ], [ [ "Cryptanalysis of the Enigma-1" ] ] ], [ [ [ "Alan Turing-2" ] ], [ [ "Enigma machine-1" ] ] ] ] }, { "qid": "07e2a845709bbcb30b65", "term": "JPMorgan Chase", "description": "American multinational banking and financial services holding company", "question": "Could every citizen of Samoa send a letter to a unique JPMorgan Chase employee?", "answer": true, "facts": [ "JPMorgan Chase had a total of 256,981 employees in the fourth quarter of 2019.", "The estimated population of Samoa as of July 1st, 2019 is 200,874." ], "decomposition": [ "How many employees does JPMorgan Chase have?", "What is the population of Samoa?", "Is #2 less than or equal to #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Samoa-64" ] ], [ "operation" ] ], [ [ [ "JPMorgan Chase-83" ] ], [ [ "Vatia, American Samoa-17" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Samoa-64" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ddc741dd2e3c8d312c80", "term": "East India Company", "description": "16th through 19th-century British trading company", "question": "Would East India Company prefer China's modern trade?", "answer": false, "facts": [ "China accounts for 4.6 trillion or 12.4% of global trade.", "The East India Company took part in half of the world's trade from the 16th to 19th century." ], "decomposition": [ "What percent of the world's trade passed through the East India Company between the 16th and 19th centuries?", "What percent of the world's trade is accounted for by China presently?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "East India Company-2" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "East India Company-41" ], "no_evidence" ], [ [ "China-55" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "East India Company-42" ], "no_evidence" ], [ [ "History of trade of the People's Republic of China-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "811081ee6a24ee0f7177", "term": "Arithmetic", "description": "Elementary branch of mathematics", "question": "Did Neanderthals use arithmetic?", "answer": false, "facts": [ "The earliest written records indicate the Egyptians and Babylonians used all the elementary arithmetic operations as early as 2000 BC.", "Neanderthals are an extinct species or subspecies of archaic humans who lived in Eurasia until about 40,000 years ago." ], "decomposition": [ "The earliest records of arithmetic use date back to when?", "When did the Neanderthals become extinct?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Arithmetic-2" ] ], [ [ "Neanderthal-1" ] ], [ "operation" ] ], [ [ [ "Arithmetic-2" ] ], [ [ "Neanderthal-2" ] ], [ [ "Neanderthal-2" ], "operation" ] ], [ [ [ "History of mathematics-10" ] ], [ [ "Neanderthal extinction-1" ] ], [ "operation" ] ] ] }, { "qid": "3b5ee7309f79042c8c9e", "term": "Folk music", "description": "Music of the people", "question": "Is metal a type of folk music?", "answer": false, "facts": [ "Folk music tends to be simple in structure, played on traditional acoustic instruments. Groups can be large but the song is designed to be playable by solo acts or small groups.", "Metal music is designed to be as loud and epic-sounding as possible, often with complex structures and almost always with electric instruments." ], "decomposition": [ "What are the basic characteristics of folk music?", "What are the characteristics of metal music?", "Does #2 exactly match #1?" ], "evidence": [ [ [ [ "Folk music-8" ] ], [ [ "Heavy metal music-4" ] ], [ "operation" ] ], [ [ [ "Folk music-15" ] ], [ [ "Heavy metal music-19" ] ], [ "operation" ] ], [ [ [ "Folk music-8" ] ], [ [ "Heavy metal music-1" ] ], [ "operation" ] ] ] }, { "qid": "9eb2815f7551d7ea8b25", "term": "Lexus", "description": "luxury vehicle division of Toyota", "question": "Did George Washington drive a Lexus?", "answer": false, "facts": [ "Lexus was established in 1989", "George Washington died in 1799" ], "decomposition": [ "In what year did George Washington die?", "What year was Lexus founded in?", "Is #1 after #2?" ], "evidence": [ [ [ [ "George Washington-1" ] ], [ [ "Lexus-2" ] ], [ "operation" ] ], [ [ [ "George Washington-1" ] ], [ [ "Lexus-2" ] ], [ "operation" ] ], [ [ [ "George Washington-121" ] ], [ [ "Lexus-16" ] ], [ "operation" ] ] ] }, { "qid": "0e01a6c3811c737f4d78", "term": "Funeral", "description": "ceremony for a person who has died", "question": "Are there special traffic laws associated with funerals?", "answer": true, "facts": [ "Many funerals have a religious ceremony held in a chapel separate from the cemetery.", "The corpse and all the attendees have to move from the memorial site to the cemetery.", "Everyone at the funeral lines up their cars into a procession and follow special rules as they drive to keep the line assembled in transport." ], "decomposition": [ "What circumstances require special laws for the flow of traffic?", "Are funerals among #1?" ], "evidence": [ [ [ [ "Traffic code in the United States-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Traffic code in the United States-3" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "99de7a464b1a292868f3", "term": "Chinese mountain cat", "description": "Small wild cat", "question": "Can Chinese mountain cat survive in the orbit? ", "answer": false, "facts": [ "Chinese mountain cat needs to breathe oxygen for survival. ", "There is not enough oxygen for breathing in the orbit." ], "decomposition": [ "What does a Chinese mountain cat breathe?", "Is there enough #1 in space?" ], "evidence": [ [ [ [ "Chinese mountain cat-6" ], "no_evidence" ], [ [ "Outer space-56" ] ] ], [ [ [ "Breathing-18" ] ], [ [ "Outer space-56" ] ] ], [ [ [ "Chinese mountain cat-7" ], "no_evidence" ], [ [ "Orbit-1", "Outer space-55", "Single-stage-to-orbit-36" ], "operation" ] ] ] }, { "qid": "cb6819752f1688d7044b", "term": "Ham", "description": "Pork from a leg cut that has been preserved by wet or dry curing, with or without smoking", "question": "Can ham make a cut seal up quicker?", "answer": true, "facts": [ "Salt helps cleanse the skin by displacing bacteria in a cut.", "1 Cup of ham contains 1.684 mg of sodium.", "Salt is a mineral made up mostly of sodium chloride." ], "decomposition": [ "Which substances can be used to cleanse skin around a cut?", "Which elements make up salt?", "Does ham contain any of #2 in considerable quantities and salt included in #1?" ], "evidence": [ [ [ [ "Antiseptic-1", "Wound-15" ], "no_evidence" ], [ [ "Salt-1" ] ], [ [ "Ham-6" ], "no_evidence", "operation" ] ], [ [ [ "Saline (medicine)-1" ] ], [ [ "Salt-1" ] ], [ [ "Sodium chloride-22" ], "operation" ] ], [ [ [ "Salt in the Wound-1" ], "no_evidence" ], [ [ "Salt-1" ] ], [ [ "Ham-11" ], "operation" ] ] ] }, { "qid": "71682e0e8e89c5297390", "term": "Operation Barbarossa", "description": "1941 German invasion of the Soviet Union during the Second World War", "question": "Was 1941 Operation Barbarossa related to The Crusades?", "answer": true, "facts": [ "Operation Barbarossa referred to the 1941 German invasion of the Soviet Union.", "Frederick Barbarosa was the Holy Roman Empire that drowned while marching his army to The Crusades.", "The Holy Roman Empire was a medieval empire that ruled over lands including what became modern Germany." ], "decomposition": [ "What historic figures were named Barbarosa?", "Of #1, which lived during the medieval period?", "Were any of #2 active during the Crusades?" ], "evidence": [ [ [ [ "Frederick I, Holy Roman Emperor-1" ] ], [ [ "Frederick I, Holy Roman Emperor-1" ] ], [ [ "Crusades-1", "Frederick I, Holy Roman Emperor-15" ] ] ], [ [ [ "Frederick I, Holy Roman Emperor-1" ], "no_evidence" ], [ [ "Frederick I, Holy Roman Emperor-1" ] ], [ [ "Frederick I, Holy Roman Emperor-4" ], "operation" ] ], [ [ [ "Frederick I, Holy Roman Emperor-1", "Hayreddin Barbarossa-1" ] ], [ [ "Frederick I, Holy Roman Emperor-1", "Middle Ages-1" ] ], [ [ "Frederick I, Holy Roman Emperor-4", "Third Crusade-1" ] ] ] ] }, { "qid": "fb19a4a379ea2cb76568", "term": "Prime number", "description": "Integer greater than 1 that has no positive integer divisors other than itself and 1", "question": "Would an actuary be confused about what prime numbers are?", "answer": false, "facts": [ "Actuaries must go through college and rigorous studies in mathematics to obtain their jobs.", "Prime numbers are introduced in basic high school mathematics. " ], "decomposition": [ "Which subjects do actuaries study in college before getting their jobs?", "Which subject are prime numbers taught in?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Actuary-4" ], "no_evidence" ], [ [ "Prime number-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Actuary-1" ], "no_evidence" ], [ [ "Prime number-2" ] ], [ "operation" ] ], [ [ [ "Actuary-1" ] ], [ [ "Prime number-24" ] ], [ "operation" ] ] ] }, { "qid": "ae54adce616125cc6ddd", "term": "Hound", "description": "dog type", "question": "Was animal in You're a Good Sport, Charlie Brown, hypothetically a hound?", "answer": true, "facts": [ "A hound is a type of hunting dog used to track prey.", "Hounds include Basenjis, Dachsunds, and Beagles, among others.", "Snoopy is the dog in the Charlie Brown movies and books.", "Snoopy is a Beagle." ], "decomposition": [ "What animals fall under the classification of \"hound\"?", "What kind of animal was Snoopy?", "What kind of #2 was Snoopy?", "Is #3 included in #1?" ], "evidence": [ [ [ [ "Hound-1", "Hound-3" ] ], [ [ "Beagle-1" ] ], [ [ "Snoopy-1" ] ], [ "operation" ] ], [ [ [ "Hound-1" ] ], [ [ "Snoopy-2" ] ], [ [ "Snoopy-2" ] ], [ [ "Beagle-1" ] ] ], [ [ [ "Scent hound-2" ] ], [ [ "Snoopy-7" ] ], [ [ "Snoopy-1" ] ], [ [ "Beagle-1" ], "operation" ] ] ] }, { "qid": "0e29451fbb512170bddd", "term": "J. K. Rowling", "description": "English novelist", "question": "Are any of J.K. Rowling's books in the genre of And Then There Were None?", "answer": true, "facts": [ "And Then There Were None was a mystery novel written by Agatha Christie.", "J.K. Rowling is best known for her wizard fantasy series Harry Potter.", "Robert Galbraith is the author of the Cuckoo's Calling, a mystery crime fiction novel.", "Robert Galbraith is the pseudonym that J.K. Rowling writes under." ], "decomposition": [ "What genre is the book And Then There Were None?", "What genre are Rowling's fiction Cormoran Strike series?", "Is #1 same as #2?" ], "evidence": [ [ [ [ "And Then There Were None-1" ] ], [ [ "Cormoran Strike-1" ] ], [ "operation" ] ], [ [ [ "And Then There Were None-1" ] ], [ [ "Cormoran Strike-1" ] ], [ "operation" ] ], [ [ [ "And Then There Were None-10" ], "no_evidence" ], [ [ "Cormoran Strike-1" ] ], [ "operation" ] ] ] }, { "qid": "86726670e41a9401da0f", "term": "Yale University", "description": "Private research university in New Haven, Connecticut, United States", "question": "Can Ford F-350 tow entire Yale University student body?", "answer": false, "facts": [ "The Yale University student body consists of 12,385 people according to a 2015 poll.", "The average US male weighs 195 pounds.", "The average US female weighs 168 pounds.", "The maximum towing capacity of the Ford F-350 is 15,000 pounds." ], "decomposition": [ "What is the maximum towing capacity of the Ford F-350?", "How much people attend Yale each year?", "What is the average weight of an adult?", "Is #2 times #3 less than #1?" ], "evidence": [ [ [ [ "Ford Super Duty-56" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Ford Super Duty-54" ], "no_evidence" ], [ [ "Yale University-62" ], "no_evidence" ], [ [ "Human body weight-12" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Ford Super Duty-2" ] ], [ [ "Yale University-62" ] ], [ [ "Human body weight-15" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "1b5275d230fa30272cb5", "term": "Chuck Norris", "description": "American martial artist, actor, film producer and screenwriter", "question": "Could Chuck Norris ride a horse?", "answer": true, "facts": [ "Chuck Norris is a person.", "Horses are bigger than people.", "People can ride horses. " ], "decomposition": [ "Who could ride a horse?", "Is Chuck Norris #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Chuck Norris-1" ], "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Equestrianism-6" ] ], [ "operation" ] ] ] }, { "qid": "2b0900c25deff445e6b1", "term": "Salmon", "description": "Family of fish related to trout", "question": "Do salmon mate in the Caspian Sea?", "answer": false, "facts": [ "Salmon reproduce in freshwater", "The Caspian Sea is a saltwater lake" ], "decomposition": [ "What kind of water do salmon reproduce in?", "Is the Caspian Sea salt or freshwater?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Salmon-2" ] ], [ [ "Caspian Sea-54" ] ], [ "operation" ] ], [ [ [ "Salmon-2" ] ], [ [ "Caspian Sea-1" ] ], [ "operation" ] ], [ [ [ "Salmon-2" ] ], [ [ "Caspian Sea-2" ] ], [ "operation" ] ] ] }, { "qid": "b135f5ff283e5f7329a9", "term": "United States Army Rangers", "description": "Elite military formation of the United States Army", "question": "Would Michael J Fox qualify for the Army Rangers?", "answer": false, "facts": [ "Michael J Fox has Parkinson's disease. ", "Parkinson's disease is a brain disorder that leads to shaking, stiffness, and difficulty with walking, balance, and coordination.", "To qualify for the Army Rangers, you must complete a 12-mile march with a 35-pound rucksack and weapon in less than three hours." ], "decomposition": [ "What must you do to qualify for the Army Rangers?", "What condition does Michael J Fox have?", "What are some symptoms of #2?", "Could someone experiencing #3 complete #1?" ], "evidence": [ [ [ [ "Ranger School-17" ] ], [ [ "Michael J. Fox-2" ] ], [ [ "Parkinson's disease-1" ] ], [ "operation" ] ], [ [ [ "75th Ranger Regiment-63" ] ], [ [ "Michael J. Fox-2" ] ], [ [ "Parkinson's disease-1" ] ], [ "operation" ] ], [ [ [ "75th Ranger Regiment-76", "United States Army Rangers-48" ], "no_evidence" ], [ [ "Michael J. Fox-2" ] ], [ [ "Parkinson's disease-1" ] ], [ [ "Parkinson's disease-20" ], "operation" ] ] ] }, { "qid": "e8c54343164bb850727f", "term": "Model (person)", "description": "person employed to display, advertise and promote products, or to serve as a visual aid", "question": "Would a model be appropriate to star in a LA Femme Nikita remake?", "answer": true, "facts": [ "La Femme Nikita is a French movie about a beautiful female assassin.", "Models are known for their beauty and height.", "Peta Wilson and Maggie Q have played the lead role in La Femme Nikita spinoffs.", "Peta Wilson and Maggie Q have both done extensive modeling work." ], "decomposition": [ "What is La Femme Nikita?", "What is #1 about?", "Would a model be able to play #2?" ], "evidence": [ [ [ [ "La Femme Nikita (film)-2" ] ], [ [ "La Femme Nikita (film)-2", "La Femme Nikita (film)-3" ] ], [ "no_evidence" ] ], [ [ [ "La Femme Nikita (film)-1" ] ], [ [ "La Femme Nikita (film)-2" ] ], [ [ "Maggie Q-1", "Maggie Q-3", "Peta Wilson-1" ], "operation" ] ], [ [ [ "La Femme Nikita (film)-1", "La Femme Nikita (film)-1" ] ], [ [ "La Femme Nikita (TV series)-2", "La Femme Nikita (TV series)-3" ] ], [ "operation" ] ] ] }, { "qid": "fc3a305f513090432212", "term": "Rash", "description": "skin condition", "question": "Is CAS number 8009-03-8 harmful for a rash?", "answer": false, "facts": [ "Some common substances that help rashes are creams, oils, and petroleum based products.", "CAS number 8009-03-8 is the identifier number for petroleum jelly." ], "decomposition": [ "What is CAS number 8009-03-8 the identifier number for?", "Is #1 harmful to put on a rash?" ], "evidence": [ [ [ [ "Petroleum jelly-1" ] ], [ [ "Petroleum jelly-2" ] ] ], [ [ [ "Petroleum jelly-1" ] ], [ [ "Petroleum jelly-2" ], "operation" ] ], [ [ [ "Petroleum jelly-1" ] ], [ [ "Petroleum jelly-2" ] ] ] ] }, { "qid": "79a254c885843d905da3", "term": "Jay-Z", "description": "American rapper, entrepreneur, record executive, songwriter, producer and investor from New York", "question": "Did Jay-Z ever collaborate with Louis Armstrong?", "answer": false, "facts": [ "Jay-Z was born in 1969.", "Louis Armstrong died in 1971." ], "decomposition": [ "What year did Jay-Z make his first recording?", "When did Louis Armstrong die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Jay-Z-2" ] ], [ [ "Louis Armstrong-87" ] ], [ "operation" ] ], [ [ [ "Jay-Z albums discography-2" ] ], [ [ "Louis Armstrong-1" ] ], [ "no_evidence" ] ], [ [ [ "Jay-Z-11" ] ], [ [ "Louis Armstrong-1" ] ], [ "operation" ] ] ] }, { "qid": "a1be20a22026cc661660", "term": "Drew Carey", "description": "American actor, comedian, game show host, libertarian and photographer", "question": "Is Drew Carey important to the history of wrestling?", "answer": true, "facts": [ "Drew Carey competed in the 2001 Royal Rumble.", "The Royal Rumble is one of the Big 4 yearly WWE pay per view events.", "Drew Carey was inducted into the WWE Hall of Fame in 2011." ], "decomposition": [ "What competition did Drew Carey compete in in 2001?", "Is #1 an important even for the WWE?" ], "evidence": [ [ [ [ "Drew Carey-2" ] ], [ [ "Royal Rumble-2" ], "operation" ] ], [ [ [ "Royal Rumble (2001)-2" ] ], [ [ "Royal Rumble-2" ], "operation" ] ], [ [ [ "Drew Carey-18" ] ], [ [ "Royal Rumble-2" ] ] ] ] }, { "qid": "7d230194b37e59fff677", "term": "Iggy Pop", "description": "American rock singer-songwriter, musician, and actor", "question": "Would Iggy Pop travel with Justin Bieber?", "answer": false, "facts": [ "Iggy Pop is a famous punk rocker. ", "Justin Bieber is a famous pop singer.", "Punk is a music subculture that clashes against mainstream topics, ideals, and subjects.", "Pop music is a mainstream form of music constructed to appeal to the masses." ], "decomposition": [ "What genre of music does Iggy Pop play?", "What genre of music does Justin Beiber play?", "Would #1 and #2 go well together?" ], "evidence": [ [ [ [ "Iggy Pop-1" ] ], [ [ "Justin Bieber-39" ] ], [ "no_evidence" ] ], [ [ [ "Iggy Pop-1" ] ], [ [ "Justin Bieber-39" ] ], [ [ "Punk rock-1" ], "operation" ] ], [ [ [ "Iggy Pop-1", "Iggy Pop-2" ] ], [ [ "Justin Bieber-39" ] ], [ "operation" ] ] ] }, { "qid": "961b257f0eb8f704b247", "term": "Euro", "description": "European currency", "question": "Will a Euro sink in water?", "answer": true, "facts": [ "The smallest Euro paper bill is Five Euro.", "One Euro is only available as a coin.", "Coins sink in water. ", "A metal coin is more dense than water" ], "decomposition": [ "What is the density of water?", "What material is an Euro coin made of?", "Is the density of #2 usually higher than #1?" ], "evidence": [ [ [ [ "Properties of water-14" ] ], [ [ "Euro coins-50" ] ], [ [ "Euro coins-50", "Properties of water-14" ] ] ], [ [ [ "Properties of water-14" ] ], [ [ "Euro coins-27" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Buoyancy-2", "Water-7" ], "no_evidence" ], [ [ "Euro coins-50" ] ], [ [ "Alloy-13" ], "no_evidence", "operation" ] ] ] }, { "qid": "4532f1bc312eace9bcd6", "term": "Larry King", "description": "American television and radio host", "question": "Did Larry King sign the Magna Carta?", "answer": false, "facts": [ "The Magna Carta was a charter of rights signed by King John in 1215.", "Larry King was born in 1933." ], "decomposition": [ "When was Larry King born?", "When was the Magna Carta signed?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Larry King-1" ] ], [ [ "Magna Carta-1" ] ], [ "operation" ] ], [ [ [ "Larry King-1" ] ], [ [ "Magna Carta-1" ] ], [ "operation" ] ], [ [ [ "Larry King-1" ] ], [ [ "Magna Carta-5" ] ], [ "operation" ] ] ] }, { "qid": "95e6f7de4cb6713fb167", "term": "Dolce & Gabbana", "description": "Italian fashion house", "question": "Did Jackie Kennedy wear Dolce & Gabbana to her husband's inauguration?", "answer": false, "facts": [ "Jackie Kennedy's husband was John F. Kennedy", "John F. Kennedy was inaugurated in 1961", "Dolce & Gabbana was founded in 1985" ], "decomposition": [ "Who was Jackie Kennedy married to?", "When was #1 inaugurated?", "When was Dolce & Gabbana founded?", "Was #3 before #2?" ], "evidence": [ [ [ [ "Jacqueline Kennedy Onassis-19" ] ], [ [ "Presidency of John F. Kennedy-8" ] ], [ [ "Dolce & Gabbana-4" ] ], [ [ "Dolce & Gabbana-4", "Presidency of John F. Kennedy-8" ], "operation" ] ], [ [ [ "Jacqueline Kennedy Onassis-18" ] ], [ [ "Presidency of John F. Kennedy-8" ] ], [ [ "Dolce & Gabbana-1" ] ], [ "operation" ] ], [ [ [ "Jacqueline Kennedy Onassis-1" ] ], [ [ "John F. Kennedy-47" ] ], [ [ "Dolce & Gabbana-1" ] ], [ "operation" ] ] ] }, { "qid": "28104d8b40e83617cd2a", "term": "Memory", "description": "information stored in the mind, or the mental processes involved in receiving, storing, and retrieving this information", "question": "Do quadragenarian's have little memory capacity?", "answer": false, "facts": [ "Quadragenarians are people that are in their 40s.", "As people age, their memory can get worse.", "Half of people over age 50 have mild to severe memory loss.", "Ken Jennings was 46 years old when he won Jeopardy! The Greatest of All Time tournament." ], "decomposition": [ "How old do people generally get before their memory capacity starts getting limited?", "Quadragenarians are people within what age-range?", "Is #1 within or less than the range of #2?" ], "evidence": [ [ [ [ "Memory-54" ] ], [ [ "2015 Chama Cha Mapinduzi presidential primaries-6" ] ], [ "operation" ] ], [ [ [ "Memory-54" ] ], [ [ "Aging and society-3" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Old age-26" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "1ffcadc478d4f408b988", "term": "Metre", "description": "SI unit of length", "question": "Are lengths measured in metres in the UK?", "answer": true, "facts": [ "Metres are part of the metric system.", "The UK uses the metric system for measurements." ], "decomposition": [ "What system of measurement does the UK use?", "Are meters a unit of measure in #1?" ], "evidence": [ [ [ [ "System of measurement-8" ] ], [ [ "Unit of length-6" ], "operation" ] ], [ [ [ "Metric system-37" ] ], [ [ "Metric units-2" ] ] ], [ [ [ "Imperial units-1" ], "no_evidence" ], [ [ "Metrication in the United Kingdom-70" ], "no_evidence" ] ] ] }, { "qid": "425683ababbbc31c733c", "term": "Islamophobia", "description": "Fear, hatred of, or prejudice against the Islamic religion or Muslims generally,", "question": "Was Godfrey of Bouillon an Islamaphobe?", "answer": true, "facts": [ " Godfrey of Bouillon lead troops during the Prince's Crusade.", "The Prince's Crusade was an attempt by Europeans to \"take back\" the city of Jerusalem from Islamic hands." ], "decomposition": [ "Islamaphobe indicates fear of what?", "What kind of interactions did Godfrey of Bouillon majorly have with Muslims?", "Is #2 an indication of #1?" ], "evidence": [ [ [ [ "Islamophobia-1" ] ], [ [ "Godfrey of Bouillon-14" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Godfrey of Bouillon-14" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Islamophobia-1" ] ], [ [ "Godfrey of Bouillon-10" ] ], [ "operation" ] ] ] }, { "qid": "58860d3595c8eb1f4690", "term": "Skull", "description": "bony structure that forms the skeleton of head in most vertebrates", "question": "Is the skull formed as one whole bone?", "answer": false, "facts": [ "The skull forms inwards from the outside.", "There are fission lines where the multiple pieces of bone came together to form a skull. " ], "decomposition": [ "How many bones are found in the skull?", "Is #1 equal to one?" ], "evidence": [ [ [ [ "Skull-10" ] ], [ "operation" ] ], [ [ [ "Skull-1" ] ], [ "operation" ] ], [ [ [ "Skull-10" ] ], [ "operation" ] ] ] }, { "qid": "4eeb74e13e58996f2d72", "term": "Painting", "description": "Practice of applying paint, pigment, color or other medium to a surface", "question": "Is The Joy of Painting TV show still producing new episodes?", "answer": false, "facts": [ "The Joy of Painting is hosted by painter Bob Ross.", "Bob Ross died in 1995.", "The episodes currently airing are reruns." ], "decomposition": [ "Who was the host of The Joy of Painting?", "Is #1 still alive?" ], "evidence": [ [ [ [ "The Joy of Painting-1" ] ], [ [ "Bob Ross-26" ], "operation" ] ], [ [ [ "The Joy of Painting-1" ] ], [ [ "Bob Ross-1" ], "operation" ] ], [ [ [ "The Joy of Painting-1" ] ], [ [ "Bob Ross-26" ] ] ] ] }, { "qid": "4cdeb92d520f5e531f85", "term": "James Watson", "description": "American molecular biologist, geneticist, and zoologist", "question": "Does James Watson believe that Africans are inferior to Europeans?", "answer": true, "facts": [ "James Watson is a geneticist, who believes in his own work.", "James Watson is quoted as saying that genetic testing \"proves\" that Africans aren't as smart." ], "decomposition": [ "What profession is James Watson in? `", "As #1, what was James quoted with saying about African Americans?", "Did James Watson believe in his own work about #2?" ], "evidence": [ [ [ [ "James Watson-29" ] ], [ [ "James Watson-48" ] ], [ "operation" ] ], [ [ [ "James Watson-1" ] ], [ [ "James Watson-48" ] ], [ [ "James Watson-3" ], "operation" ] ], [ [ [ "James Watson-1" ] ], [ [ "James Watson-48" ] ], [ [ "James Watson-50" ], "operation" ] ] ] }, { "qid": "8c754ab507b269281c30", "term": "Eiffel Tower", "description": "Tower located on the Champ de Mars in Paris, France", "question": "Was the Eiffel tower used as a symbol of the French Revolution?", "answer": false, "facts": [ "The French Revolution took place 1789-1799.", "The Eiffel Tower was built a century later in 1888." ], "decomposition": [ "When was the French Revolution?", "When was the Eiffel Tower built?", "Is #2 before #1?" ], "evidence": [ [ [ [ "French Revolution-1" ] ], [ [ "Eiffel Tower-2" ] ], [ "operation" ] ], [ [ [ "French Revolution-1" ] ], [ [ "Eiffel Tower-2" ] ], [ "operation" ] ], [ [ [ "French Revolution-1" ] ], [ [ "Eiffel Tower-2" ] ], [ "operation" ] ] ] }, { "qid": "7b4705e038192a496f29", "term": "Al Pacino", "description": "American actor", "question": "Will Al Pacino and Margaret Qualley score same amount of Bacon Number points?", "answer": true, "facts": [ "The Bacon Number refers to a game in which people find how close a person is to the actor Kevin Bacon based on similar costars.", "Margaret Qualley was in Novitiate with Julianne Nicholson who was in Black Mass with Kevin Bacon (Bacon Number of 2).", "Al Pacino was in The Devil's Advocate with Charlize Theron who was in Trapped with Kevin Bacon (Bacon Number of 2).", "The further away someone is from Kevin Bacon, the more points scored in Bacon Number.", "Major General William Rufus Shafter is believed to produce the highest Bacon Number score of 10." ], "decomposition": [ "What is Al Pacino's Bacon Number?", "What is Margaret Qualley's Bacon Number?", "Is #1 equal to #2?" ], "evidence": [ [ [ [ "Al Pacino-22", "Six Degrees of Kevin Bacon-10" ], "no_evidence" ], [ [ "Margaret Qualley-16" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Al Pacino-2" ], "no_evidence" ], [ [ "Six Degrees of Kevin Bacon-10" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "a2dfceff60d5b18fc70d", "term": "Salsa (sauce)", "description": "Sauce", "question": "Would Carolina Reaper decrease sales if added to all US salsa?", "answer": true, "facts": [ "On average, Americans prefer milder salsa than Europeans.", "The Carolina Reaper is the hottest pepper in the world. ", "The Carolina Reaper is rated as 2,200,000 Scoville Heat Units." ], "decomposition": [ "On average, what level of spice do Americans prefer for their salsa?", "Compared to other peppers, how hot is the Carolina Reaper?", "Would adding #2 to salsa create a salsa that is #1?" ], "evidence": [ [ [ [ "Salsa (sauce)-2" ] ], [ [ "Carolina Reaper-2" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Carolina Reaper-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Salsa (sauce)-2" ], "no_evidence" ], [ [ "Carolina Reaper-2" ] ], [ [ "Salsa (sauce)-6" ], "no_evidence" ] ] ] }, { "qid": "a969716a48788439f917", "term": "HIV", "description": "Human retrovirus, cause of AIDS", "question": "Would fans of Jonathan Larson be unaware of HIV?", "answer": false, "facts": [ "Jonathan Larson died of AIDS in 1996.", "Jonathan Larson produced music and plays about HIV, AIDS, and poverty." ], "decomposition": [ "What works did Jonathan Larson produce?", "Do all of #1 avoid the topic of HIV?" ], "evidence": [ [ [ [ "Jonathan Larson-1" ] ], [ [ "Tick, Tick... Boom!-12" ], "operation" ] ], [ [ [ "Jonathan Larson-1" ] ], [ [ "Rent (musical)-1" ] ] ], [ [ [ "Jonathan Larson-1" ] ], [ [ "Rent (musical)-1" ], "operation" ] ] ] }, { "qid": "409f6134b00905dba32c", "term": "Paralympic Games", "description": "Major international sport event for people with disabilities", "question": "Can Josh Blue participate in Paralympics Games? ", "answer": true, "facts": [ "Josh Blue has cerebral palsy. ", "People with cerebral palsy can compete in the Paralympic Games." ], "decomposition": [ "What chronic illness does Josh Blue have?", "What conditions make one eligible to compete in the Paralympic Games?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Josh Blue-1" ] ], [ [ "Paralympic Games-4" ] ], [ [ "Ataxia-1", "Cerebral palsy-1" ], "operation" ] ], [ [ [ "Josh Blue-4" ], "no_evidence" ], [ [ "Paralympic Games-40" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Josh Blue-1" ] ], [ [ "Paralympic Games-1", "Paralympic Games-40" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "b2312cb85e1d00d48947", "term": "Macaque", "description": "genus of Old World monkeys", "question": "Can you hide a pet macaque under your desk?", "answer": true, "facts": [ "Macaques grow up to 28 inches in length", "A typical desk is 29 to 30 inches from the ground" ], "decomposition": [ "How tall is a macaque?", "How tall is a typical desk?", "Is #2 more than #1?" ], "evidence": [ [ [ [ "Japanese macaque-3" ] ], [ [ "Standing desk-4" ] ], [ "operation" ] ], [ [ [ "Macaque-4" ] ], [ [ "Desk-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Macaque-3" ], "operation" ], [ [ "Desk-12" ] ], [ "no_evidence" ] ] ] }, { "qid": "2ed0dd0664d4583389bc", "term": "Horseradish", "description": "species of plant", "question": "Does horseradish have a fetlock?", "answer": false, "facts": [ "Horseradish is a type of plant that is used as a condiment.", "Fetlock is the common name used for a joint found in horses." ], "decomposition": [ "What kingdom is horseradish in?", "Where is a fetlock found?", "What kingdom is #2 in?", "is #1 the same as #3?" ], "evidence": [ [ [ [ "Horseradish-1" ], "no_evidence" ], [ [ "Fetlock-1" ] ], [ [ "Horse-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Horseradish-4" ] ], [ [ "Fetlock-1" ] ], [ [ "Horse-48" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Horseradish-1" ], "no_evidence" ], [ [ "Fetlock-1" ] ], [ [ "Horse-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "be1c91b7d7c455b614bc", "term": "Diamond", "description": "Allotrope of carbon often used as a gemstone and an abrasive", "question": "Can a diamond float on water?", "answer": false, "facts": [ "Diamonds are formed by extreme heat and pressure being applied to carbon under the earth's crust.", "The density of a diamond is 3.51 g/cm³.", "The density of water is 997 kg/m³.", "A diamond is more dense than water.", "A diamond will sink in water." ], "decomposition": [ "What is the density of a diamond?", "What is the density of water?", "Is #1 less than #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Maximum density-3" ] ], [ "operation" ] ], [ [ [ "Diamond-5" ] ], [ [ "Water-7" ] ], [ "no_evidence" ] ], [ [ [ "Diamond-5" ] ], [ [ "Water-7" ] ], [ "operation" ] ] ] }, { "qid": "7d435d8dd86f55ae536c", "term": "Canon Inc.", "description": "Japanese multinational corporation specialised in the manufacture of imaging and optical products", "question": "Is Canon Inc. a Kabushiki gaisha?", "answer": true, "facts": [ "Canon, Inc. is a large corporation listed on the Tokyo Stock Exchange.", "Companies are listed on a stock exchange so brokers can buy and sell stock in those companies.", "Kabushiki gaisha is often translated as \"stock company\", \"joint-stock company\" or \"stock corporation\". " ], "decomposition": [ "What is Kabushiki gaisha usually translated as?", "Is Canon Inc. listed in the appropriate organization to be considered #1?" ], "evidence": [ [ [ [ "Kabushiki gaisha-1" ] ], [ [ "Canon Inc.-1", "Canon Inc.-2" ] ] ], [ [ [ "Kabushiki gaisha-1" ] ], [ [ "Canon Inc.-1" ] ] ], [ [ [ "Kabushiki gaisha-1" ] ], [ [ "Canon Inc.-1" ] ] ] ] }, { "qid": "8cfde6ee28d059a5aff6", "term": "Very Large Telescope", "description": "telescope in the Atacama Desert, Chile", "question": "Is it possible to get killed walking to the Very Large Telescope?", "answer": true, "facts": [ "The Very Large Telescope is in the Atacama Desert", "The Atacama Desert is the driest hot desert in the world." ], "decomposition": [ "Where is the Very Large Telescope?", "How hot is it in #1?", "Is it possible to die from being somewhere that is #2?" ], "evidence": [ [ [ [ "Very Large Telescope-1" ] ], [ [ "Atacama Desert-7" ] ], [ [ "Desert-4" ] ] ], [ [ [ "Very Large Telescope-1" ] ], [ [ "Arabian Desert-8" ] ], [ [ "Heat stroke-1" ], "operation" ] ], [ [ [ "Very Large Telescope-1" ] ], [ [ "Atacama Desert-23" ], "no_evidence" ], [ [ "Evan Tanner-27", "Evan Tanner-29" ], "operation" ] ] ] }, { "qid": "f060444ce63ca139b8a1", "term": "June", "description": "sixth month in the Julian and Gregorian calendars", "question": "Is it possible that June got its name from mythology?", "answer": true, "facts": [ "June may have been named after Juno.", "Juno was a Roman goddess and wife of the Roman king of the gods Jupiter." ], "decomposition": [ "Who was June possibly named after?", "Is #1 a figure in mythology?" ], "evidence": [ [ [ [ "June-4" ] ], [ [ "Juno (mythology)-1" ] ] ], [ [ [ "June-4" ] ], [ "operation" ] ], [ [ [ "June-4" ] ], [ [ "Juno (mythology)-1" ] ] ] ] }, { "qid": "149617ff1b645db0e871", "term": "Reza Shah", "description": "Shah of Iran, Founder of the Imperial state of iran", "question": "Could Reza Shah be related to Queen Elizabeth I?", "answer": false, "facts": [ "Queen Elizabeth I was from English parents.", "Reza Shah was Mazanderani.", "Mazanderani people are indigenous people of Iran.", "Iran is nearly 4,000 miles from England." ], "decomposition": [ "Where are Queen Elizabeth I's parents from?", "Where is Reza Shah's family from?", "Is #1 near #2?" ], "evidence": [ [ [ [ "Anne Boleyn-6", "Elizabeth I of England-6", "Henry VIII of England-5" ] ], [ [ "Reza Shah-4" ] ], [ [ "England-1", "Iran-1" ] ] ], [ [ [ "Elizabeth I (disambiguation)-1" ] ], [ [ "Reza Shah-4" ] ], [ [ "Elizabeth I (disambiguation)-1" ] ] ], [ [ [ "Elizabeth I of England-6" ] ], [ [ "Reza Shah-4" ] ], [ "operation" ] ] ] }, { "qid": "2b9b1630e1c4b4a2b249", "term": "J. D. Salinger", "description": "American writer", "question": "Was Anthony Quinn more prolific at making children than J.D. Salinger?", "answer": true, "facts": [ "Author J.D. Salinger had two children.", "Actor Anthony Quinn had twelve children." ], "decomposition": [ "How many children did J. D. Salinger have?", "How many children did Anthony Quinn have?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "J. D. Salinger-29" ] ], [ [ "Anthony Quinn-30", "Anthony Quinn-31", "Anthony Quinn-32", "Anthony Quinn-33" ] ], [ "operation" ] ], [ [ [ "J. D. Salinger-29" ] ], [ [ "Anthony Quinn-31", "Anthony Quinn-33" ] ], [ "operation" ] ], [ [ [ "J. D. Salinger-29" ], "no_evidence" ], [ [ "Anthony Quinn-30" ] ], [ "operation" ] ] ] }, { "qid": "378ccb7bc29c9ce4eea0", "term": "Menthol", "description": "chemical compound", "question": "Does menthol make cigarettes less addictive?", "answer": false, "facts": [ "The addition of menthol to cigarettes does not reduce the amount of nicotine in them.", "Menthol itself is an addictive chemical. ", "Nicotine is the primary addictive component of cigarettes." ], "decomposition": [ "What is the primary addictive components in cigarettes?", "Does addition of menthol cause a reduction in #1?" ], "evidence": [ [ [ [ "Cigarette-3" ] ], [ [ "Menthol cigarette-32" ] ] ], [ [ [ "Nicotine-11" ] ], [ [ "Menthol cigarette-29" ] ] ], [ [ [ "Cigarette-1" ] ], [ [ "Menthol cigarette-29" ] ] ] ] }, { "qid": "caad75c2382565a4668c", "term": "French Revolution", "description": "Revolution in France, 1789 to 1798", "question": "Did France win the French Revolution?", "answer": false, "facts": [ "The French Revolution was a period of social and political upheaval in France and its colonies.", "War is an intense military conflict between two states.", "The French Revolution involved only France as citizens overthrew the monarchy." ], "decomposition": [ "Which parties were involved in the French Revolution?", "Did #1 involve France and another country or state?" ], "evidence": [ [ [ [ "French Revolution-1" ] ], [ "operation" ] ], [ [ [ "French Revolution-1" ] ], [ [ "The Old Regime and the Revolution-3" ], "operation" ] ], [ [ [ "French Revolution-1" ] ], [ "operation" ] ] ] }, { "qid": "a6a54c5e011a51952ff9", "term": "Earth Day", "description": "Annual event on 22 April", "question": "Do people celebrate Earth Day with a ceremonial tire fire?", "answer": false, "facts": [ "Earth Day is a global holiday to show support for environmental protection.", "Tire fire is a large quantity of tires burning at once.", "Smoke from burning tires contain heavy metals and other harmful pollutants.", "Smoke is harmful to the environment. " ], "decomposition": [ "What is the major focus/purpose of the Earth Day holiday?", "What are the environmental implications of tire fire?", "Is #2 consistent with #1?" ], "evidence": [ [ [ [ "Earth Day-30" ] ], [ [ "Tire fire-3" ] ], [ [ "Tire fire-3" ] ] ], [ [ [ "Earth Day-1" ] ], [ [ "Scientific consensus on climate change-43" ] ], [ "operation" ] ], [ [ [ "Earth Day-1" ] ], [ [ "Tire fire-1" ] ], [ "operation" ] ] ] }, { "qid": "ea8bdd791571893e082d", "term": "Olive", "description": "Species of plant", "question": "Would you find olives at a heladeria?", "answer": false, "facts": [ "Olives are fruits of the olive tree used in savory dishes and preparations like olive oil and tapenade", "A heladeria is an ice cream parlour" ], "decomposition": [ "What kinds of foods are served at a heladeria?", "Are olives a type of #1?" ], "evidence": [ [ [ [ "Lares Ice Cream Parlor-4" ] ], [ [ "Olive-6" ] ] ], [ [ [ "Heladería Coromoto-1" ] ], [ [ "Olive-2" ], "operation" ] ], [ [ [ "Heladería Coromoto-1" ] ], [ "operation" ] ] ] }, { "qid": "7776349dede20a0d6405", "term": "Pope Alexander VI", "description": "Pope of the Catholic Church 1492–1503", "question": "Was Pope Alexander VI's origin country least represented in papal history?", "answer": false, "facts": [ "Pope Alexander VI, born Rodrigo Borgia, was born in a town in eastern Spain.", "There have been two Popes whose origins are from Spain, including Pope Alexander VI and Pope Callixtus III.", "Pope John Paul II was born in Poland.", "Pope John Paul II is the only pope of Polish origin." ], "decomposition": [ "What is Pope Alexander VI's home country?", "How many popes have come from #1?", "Is it the case that no countries have produced a non-zero number of popes that is less than #2?" ], "evidence": [ [ [ [ "Pope Alexander VI-2" ] ], [ [ "Pope Alexander VI-2", "Pope Callixtus III-2" ], "no_evidence" ], [ [ "Pope-51" ], "operation" ] ], [ [ [ "Pope Alexander VI-2" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Pope Alexander VI-1" ] ], [ [ "House of Borgia-8", "Pope Callixtus I-1" ], "no_evidence" ], [ [ "Pope Francis-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "a00eca72873f3200976a", "term": "Newspeak", "description": "fictional language in the novel Nineteen Eighty-Four", "question": "Is Newspeak considered very straightforward?", "answer": false, "facts": [ "The vocabulary in Newspeak is extremely restricted.", "In Newspeak, the Ministry of Truth manufactures lies for the public to consume.", "In Newspeak, the Ministry of Plenty artificially creates scarcity.", "To be straightforward is to be uncomplicated and easy to understand." ], "decomposition": [ "In Newspeak, how do words relate to the ideas they represent?", "Is #1 explicit and easy to understand?" ], "evidence": [ [ [ [ "Newspeak-2" ] ], [ [ "Newspeak-11" ] ] ], [ [ [ "Ministries of Nineteen Eighty-Four-2" ] ], [ [ "Ministries of Nineteen Eighty-Four-3" ] ] ], [ [ [ "Newspeak-11" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "361f7e0cd93f9826b7f6", "term": "Myocardial infarction", "description": "Interruption of blood supply to a part of the heart", "question": "Is myocardial infarction a brain problem?", "answer": false, "facts": [ "Myocardial infarction is a problem in the heart.", "The equivalent in the brain would be similar to a stroke." ], "decomposition": [ "Which organ in the body does myocardial infarction affect?", "Is #1 the same as the brain?" ], "evidence": [ [ [ [ "Myocardial infarction-1" ] ], [ "operation" ] ], [ [ [ "Myocardial infarction-1" ] ], [ [ "Brain-1" ], "operation" ] ], [ [ [ "Myocardial infarction-1" ] ], [ "operation" ] ] ] }, { "qid": "0abc0874b74e714f943e", "term": "Northwest Airlines", "description": "1926–2010 major airline, merged into Delta Air Lines", "question": "Did Northwest Airlines' longevity surpass Betty White?", "answer": false, "facts": [ "Northwest Airlines lasted 84 years from 1926-2010.", "Betty White is 98 years old as of 2020." ], "decomposition": [ "How many years was Northwest Airlines in business?", "How old is Betty White?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Northwest Airlines-1" ] ], [ [ "Betty White-1" ] ], [ "operation" ] ], [ [ [ "Northwest Airlines-1" ] ], [ [ "Betty White-1" ] ], [ "operation" ] ], [ [ [ "Northwest Airlines-1" ] ], [ [ "Betty White-1" ] ], [ "operation" ] ] ] }, { "qid": "962d64c181ab75e0ace2", "term": "Hurricane Maria", "description": "Category 5 Atlantic hurricane in 2017", "question": "Could you windsurf in Puerto Rico during Hurricane Maria?", "answer": false, "facts": [ "Hurricane Maria was a deadly category 5 hurricane with wind speed up to 175mph.", "It is extremely dangerous and impossible to windsurf with wind speed higher than 100mph. " ], "decomposition": [ "What wind speed did Hurricane Maria reach?", "Above what wind speed is windsurf extremely dangerous?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Hurricane Maria-2" ] ], [ [ "Windsurfing-60" ] ], [ "operation" ] ], [ [ [ "Hurricane Maria-2" ] ], [ [ "Windsurfing-60" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Hurricane Maria-2" ] ], [ [ "Windsurfing-86" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0fbfd54c87436a2bcfb7", "term": "Tourism", "description": "travel for recreational or leisure purposes", "question": "Do tourists prefer Tuvalu to Niue?", "answer": false, "facts": [ "Tuvalu receives an average of 2,000 annual tourists.", "Niue receives an average of 10,000 annual tourists." ], "decomposition": [ "What is the average number of tourists that visit Tuvalu annually?", "What is the average number of tourists that visit Niue annually?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Tuvalu-6" ], "no_evidence" ], [ [ "Niue-17" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Tuvalu-105" ] ], [ [ "Niue-54" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Tuvalu-105" ] ], [ [ "Niue-54" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "c3fa97b16ea3e91e22d8", "term": "Pacific War", "description": "Theater of World War II fought in the Pacific and Asia", "question": "Were muskets used in the Pacific War?", "answer": false, "facts": [ "The Pacific War took place between 1941 and 1945.", "The musket became obsolete in modern warfare starting near 1870." ], "decomposition": [ "When was the Pacific War?", "When did muskets become obsolete?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Pacific War-2" ] ], [ [ "Musket-29" ] ], [ "operation" ] ], [ [ [ "Pacific War-2" ] ], [ [ "Musket-1" ] ], [ "operation" ] ], [ [ [ "Pacific War-1" ] ], [ [ "Musket-1" ] ], [ "operation" ] ] ] }, { "qid": "1624ab3ef12c87afc984", "term": "Autopilot", "description": "system to maintain vehicle trajectory in lieu of direct operator command", "question": "Does autopilot rely on fossil fuels?", "answer": true, "facts": [ "Autopilot is used in vehicles.", "Vehicles are powered by engines.", "Engines rely on fossil fuels to operate." ], "decomposition": [ "What uses autopilot?", "What powers #1?", "Does #2 rely on fossil fuels to run?" ], "evidence": [ [ [ [ "Autopilot-1" ] ], [ [ "Aircraft-1" ] ], [ [ "Jet engine-3" ], "operation" ] ], [ [ [ "Autopilot-12" ] ], [ [ "Jet fuel-1" ] ], [ "operation" ] ], [ [ [ "Autopilot-1" ] ], [ [ "Aviation fuel-4", "Jet fuel-5" ] ], [ "operation" ] ] ] }, { "qid": "a4526fcfad49a21eed15", "term": "Louvre", "description": "Art museum and Historic site in Paris, France", "question": "Can nitric acid break the Louvre?", "answer": true, "facts": [ "Parts of the Louvre are built of limestone.", "Nitric acid dissolves limestone." ], "decomposition": [ "What materials were used to build the Louvre?", "Can any of #1 be destroyed by nitric acid?" ], "evidence": [ [ [ [ "Louvre Pyramid-2" ] ], [ [ "Nitric acid-18" ], "no_evidence", "operation" ] ], [ [ [ "Louvre-21" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Louvre-1" ], "no_evidence" ], [ [ "Nitric acid-25" ], "no_evidence", "operation" ] ] ] }, { "qid": "2a145ebf8c392759e995", "term": "Durian", "description": "genus of plants", "question": "Would a Durian be dangerous if it fell on your head?", "answer": true, "facts": [ "Durian weight 2-7 lbs.", "Durian have a hard, spiky external shell.", "There are several reports of injury and death related to falling Durian fruit." ], "decomposition": [ "How much does a durian usually weigh?", "What is the rind of a durian covered with?", "Is an object with a weight of #1 covered with #2 dangerous?" ], "evidence": [ [ [ [ "Durian-2" ] ], [ [ "Durian-2" ] ], [ "operation" ] ], [ [ [ "Durian-2" ] ], [ [ "Durian-2" ] ], [ [ "Durian-2" ] ] ], [ [ [ "Durian-2" ] ], [ [ "Durian-14" ] ], [ "operation" ] ] ] }, { "qid": "65455e361b1bc45132db", "term": "Coen brothers", "description": "American filmmakers", "question": "Did the Coen brothers ever collaborate with the Brothers Grimm?", "answer": false, "facts": [ "The Coen brothers were born in 1954 and 1957.", "The Brothers Grimm died in 1859 and 1863." ], "decomposition": [ "In what century were the Coen brothers born?", "In what century did the Brothers Grimm die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Coen brothers-1" ] ], [ [ "Brothers Grimm-1" ] ], [ "operation" ] ], [ [ [ "Coen brothers-1" ] ], [ [ "Brothers Grimm-1" ] ], [ "operation" ] ], [ [ [ "20th century-2", "Coen brothers-1" ] ], [ [ "19th century-1", "Brothers Grimm-1" ] ], [ "operation" ] ] ] }, { "qid": "1660a6bd9b0e309aef9b", "term": "Asian black bear", "description": "species of mammal", "question": "Can an Asian black bear use chopsticks?", "answer": false, "facts": [ "Asian black bear are a species of bear found in asia. ", "Asian black bear don't have opposable thumbs", "Chopsticks are eating utensils use requires opposable thumbs." ], "decomposition": [ "In order to use chopsticks, what body part does one need?", "Do Asian black bears have #1?" ], "evidence": [ [ [ [ "Chopsticks-18" ] ], [ [ "Bear-1" ] ] ], [ [ [ "Finger-2" ], "no_evidence" ], [ [ "Asian black bear-6" ], "no_evidence" ] ], [ [ [ "Chopsticks-18" ] ], [ [ "Asian black bear-41" ], "operation" ] ] ] }, { "qid": "38b57970d1b2ba8279b8", "term": "Christopher Columbus", "description": "Italian explorer, navigator, and colonizer", "question": "Did Christopher Columbus sail representing a different country than his original home?", "answer": true, "facts": [ "Columbus was originally from Genoa in what is now Italy.", "His expeditions were funded by the Spanish monarchy." ], "decomposition": [ "What country was Christopher Columbus born in?", "What country did Christopher Columbus sail for?", "Is #1 different than #2?" ], "evidence": [ [ [ [ "Christopher Columbus-5" ] ], [ [ "Christopher Columbus-80" ] ], [ [ "Christopher Columbus-80" ] ] ], [ [ [ "Christopher Columbus-5" ] ], [ [ "Christopher Columbus-1" ] ], [ "operation" ] ], [ [ [ "Christopher Columbus-5" ] ], [ [ "Christopher Columbus-30" ] ], [ "operation" ] ] ] }, { "qid": "794b1acb8152b006e63e", "term": "Yin and yang", "description": "philosophical concept", "question": "Are rainbows devoid of color made by mixing yin and yang colors?", "answer": true, "facts": [ "Yin and Yang are a philosophical Chinese concept represented by the color black and white.", "Black and white when mixed together create the color gray.", "The rainbow contains the colors: red, orange, yellow, green, blue, indigo and violet." ], "decomposition": [ "What colors do the 'yin and yang' concept represent?", "What are the colors of the rainbow?", "What color would be obtained by mixing #1?", "Is #3 included in #2?" ], "evidence": [ [ [ [ "Yin and yang-1" ] ], [ [ "ROYGBIV-1" ] ], [ [ "Shades of gray-7" ] ], [ "operation" ] ], [ [ [ "Yin and yang-40" ] ], [ [ "ROYGBIV-1" ] ], [ [ "Grey-1" ] ], [ "operation" ] ], [ [ [ "Yin and yang-40" ] ], [ [ "ROYGBIV-1" ] ], [ [ "Grey-21" ] ], [ "operation" ] ] ] }, { "qid": "56464972cc1e47ef8a66", "term": "Jerry Seinfeld", "description": "American comedian and actor", "question": "Does Jerry Seinfeld hang out at the Budweiser Party Deck?", "answer": false, "facts": [ "The Budweiser Party Deck is a social gathering spot in Yankee Stadium", "Yankee Stadium is home to the New York Yankees baseball team", "Jerry Seinfeld is a fan of the New York Mets" ], "decomposition": [ "Where is The Budweiser Party Deck located?", "Which sports team is #1 home to?", "Is Jerry Seinfeld a fan of #2?" ], "evidence": [ [ [ [ "Appalachian Power Park-14" ] ], [ [ "Appalachian Power Park-1" ] ], [ [ "Jerry Seinfeld-28" ], "operation" ] ], [ [ [ "Yankee Stadium-22" ] ], [ [ "Yankee Stadium-1" ] ], [ [ "Jerry Seinfeld-28" ] ] ], [ [ [ "Appalachian Power Park-14" ] ], [ [ "Appalachian Power Park-1" ] ], [ [ "Jerry Seinfeld-28" ], "operation" ] ] ] }, { "qid": "aac689a5f2da92bb35e7", "term": "Gospel", "description": "description of the life of Jesus, canonical or apocryphal", "question": "Do most fans follow Katy Perry for gospel music?", "answer": false, "facts": [ "Katy Perry's gospel album sold about 200 copies.", "Katy Perry's most recent pop albums sold over 800,000 copies." ], "decomposition": [ "What type of music is Katy Perry known for?", "Is Gospel music the same as #1?" ], "evidence": [ [ [ [ "Katy Perry-2" ] ], [ "operation" ] ], [ [ [ "Katy Perry-2" ] ], [ [ "Gospel music-1" ], "operation" ] ], [ [ [ "Katy Perry-1", "Katy Perry-2" ] ], [ "operation" ] ] ] }, { "qid": "3996ee99b488820ea4fe", "term": "Eighth Amendment to the United States Constitution", "description": "prohibits cruel and unusual punishment and excessive bail", "question": "Does the Eighth Amendment to the United States Constitution protect freedom of speech?", "answer": false, "facts": [ "The Eighth Amendment (Amendment VIII) of the United States Constitution prohibits the federal government from imposing excessive bail, excessive fines, or cruel and unusual punishments.", "The First Amendment (Amendment I) to the United States Constitution protects freedom of speech." ], "decomposition": [ "What changes were made by the Eighth Amendment to the United States Constitution?", "Is the protection of freedom of speech among #1?" ], "evidence": [ [ [ [ "Eighth Amendment to the United States Constitution-1" ] ], [ "operation" ] ], [ [ [ "Eighth Amendment to the United States Constitution-1" ] ], [ "operation" ] ], [ [ [ "Eighth Amendment to the United States Constitution-5" ] ], [ "operation" ] ] ] }, { "qid": "8e03ed9fce31618b095a", "term": "Apollo", "description": "God in Greek mythology", "question": "Could all of the famous Apollo's hypothetically defeat all of the famous D'Artagnan's?", "answer": true, "facts": [ "The famous D'artagnan was a musketeer based on a count that served Louis XIV", "There are at least three famous Apollo's: Apollo Creed, Apollo (Greek mythology), and Apollo Crews.", "Apollo, the Greek god of the sun and healing, is immortal." ], "decomposition": [ "Who were the famous D'artagnan?", "Who were the famous Apollos?", "What special power did one of the #2's have?", "Can #1 be defeated by someone who is #3?" ], "evidence": [ [ [ [ "Charles de Batz de Castelmore d'Artagnan-1" ] ], [ [ "Apollo-1" ] ], [ [ "Apollo-155" ] ], [ [ "Apollo-155", "Charles de Batz de Castelmore d'Artagnan-3" ] ] ], [ [ [ "Charles de Batz de Castelmore d'Artagnan-1" ], "no_evidence" ], [ [ "Apollo (band)-1", "Apollo program-2", "Apollo-1" ], "no_evidence" ], [ [ "Apollo-183", "Coronis (lover of Apollo)-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Charles de Batz de Castelmore d'Artagnan-1" ] ], [ [ "Apollo Creed-1", "Apollo-1" ] ], [ [ "Apollo-208" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "43e781a9076ad3f1d415", "term": "Family of Barack Obama", "description": "List of members of the family of Barack Obama", "question": "Can Family of Barack Obama ride comfortably in 2020 Jaguar F Type?", "answer": false, "facts": [ "Barack Obama has a wife and two children.", "The 2020 Jaguar F Type is a car that seats two people." ], "decomposition": [ "How many people are in Barack Obama's immediate family?", "How many people can sit in a 2020 Jaguar F Type?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Family of Barack Obama-2" ], "no_evidence" ], [ [ "Jaguar F-Type-13" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Family of Barack Obama-5" ] ], [ [ "Car-42" ] ], [ "operation" ] ], [ [ [ "Barack Obama-15" ] ], [ [ "Jaguar F-Type-1" ] ], [ "operation" ] ] ] }, { "qid": "9cba30635f5cfd2ce0d4", "term": "Student", "description": "learner, or someone who attends an educational institution", "question": "Do Elementary School students typically need graphing calculators?", "answer": false, "facts": [ "Elementary schools typically teach K-5th grade.", "5th Graders are reaching a point in their education where they are just beginning to understand decimals and fractions.", "Graphing calculators are used for higher level math work including complex equations and functions.", "Students are usually introduced to functions in late middle or high school math." ], "decomposition": [ "What grades are part of elementary schools?", "Out of all the grades in #1, what do students in the highest grade learn in math?", "What topics in math require students to use graphing calculators?", "Is #2 the same s #3?" ], "evidence": [ [ [ [ "Primary school-1" ] ], [ [ "Arithmetic-54" ] ], [ [ "Graphing calculator-10" ] ], [ "operation" ] ], [ [ [ "Primary school-17" ] ], [ [ "Primary education-2" ], "no_evidence" ], [ [ "Graphing calculator-10" ] ], [ "operation" ] ], [ [ [ "Primary school-17" ] ], [ [ "Mathematics education-15" ], "no_evidence" ], [ [ "Graphing calculator-10" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "8be4778ed338c7308e19", "term": "Composer", "description": "person who creates music, either by musical notation or oral tradition", "question": "Would King Leonidas have succeeded with an army the size of Mozart's compositions?", "answer": false, "facts": [ "King Leonidas led 300 Spartans and 700 Thespians against the Persian army at the Battle of Thermopylae.", "The Persian army had around 150,000 soldiers at the Battle of Thermopylae.", "Mozart composed 600 works." ], "decomposition": [ "How many compositions did Mozart write?", "How many soldiers did King Leonidas lead?", "Is #1 larger than #2?" ], "evidence": [ [ [ [ "Wolfgang Amadeus Mozart-3" ] ], [ [ "Leonidas I-10" ] ], [ "operation" ] ], [ [ [ "Wolfgang Amadeus Mozart-3" ] ], [ [ "Leonidas I-9" ] ], [ "operation" ] ], [ [ [ "Wolfgang Amadeus Mozart-3" ] ], [ [ "Leonidas I-1" ] ], [ "operation" ] ] ] }, { "qid": "162458ca6672c642a00f", "term": "Prophet", "description": "person claiming to speak for divine beings", "question": "Did the leader of Heaven's Gate consider himself a prophet?", "answer": true, "facts": [ "The leader of Heaven's Gate was Marshall Applewhite.", "Marshall Applewhite said he was called to be a messenger of the divine." ], "decomposition": [ "Who was the leader of Heaven's Gate?", "What did #1 say he was called upon to do?", "What is the definition of a prophet?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Marshall Applewhite-1" ] ], [ [ "Marshall Applewhite-10", "Marshall Applewhite-2" ] ], [ [ "Prophet-1" ] ], [ "operation" ] ], [ [ [ "Heaven's Gate (religious group)-1" ] ], [ [ "Marshall Applewhite-13" ] ], [ [ "Prophet-1" ] ], [ "operation" ] ], [ [ [ "Heaven's Gate (religious group)-1" ] ], [ [ "Marshall Applewhite-20" ] ], [ [ "Prophet-1" ] ], [ "operation" ] ] ] }, { "qid": "46fc399a48a40e78dc60", "term": "The Powerpuff Girls", "description": "American animated television series", "question": "Are the names of The Powerpuff Girls alliterative? ", "answer": true, "facts": [ "To be alliterative, words must have the same initial consonant sound.", "The names of The Powerpuff Girls are Blossom, Buttercup, and Bubbles." ], "decomposition": [ "What are the names of The Powerpuff Girls?", "What features are necessary for a group of words to be considered alliterative?", "Are #2 present in #1?" ], "evidence": [ [ [ [ "The Powerpuff Girls-7" ] ], [ [ "Alliteration-1" ] ], [ "operation" ] ], [ [ [ "The Powerpuff Girls-16" ] ], [ [ "Alliteration-1" ] ], [ "operation" ] ], [ [ [ "The Powerpuff Girls-1" ] ], [ [ "Alliteration-1" ] ], [ "operation" ] ] ] }, { "qid": "b1c014fc0205f28e8101", "term": "Soy milk", "description": "Beverage made from soybeans", "question": "Would Cardi B. benefit from soy milk?", "answer": true, "facts": [ "Cardi B became lactose intolerant in her early twenties.", "People who are lactose intolerant cannot have dairy.", "Soy milk is an alternative to dairy milk." ], "decomposition": [ "What food intolerance does Cardi B. suffer from?", "What must people with #1 avoid?", "Is soy milk free from #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Lactose intolerance-1" ] ], [ [ "Soy milk-1" ], "operation" ] ], [ [ [ "Cardi B-1" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "9fe0a73c0db0f034f859", "term": "Pope John Paul I", "description": "263rd Pope of the Catholic Church", "question": "Phileas Fogg's around the world would be difficult to achieve during Pope John Paul I's reign?", "answer": true, "facts": [ "Phileas Fogg is a character in Jules Verne's Around the World in Eighty Days.", "Phileas Fogg attempts to circumnavigate the globe in 80 days.", "Pope John Paul I reigned for only 33 days." ], "decomposition": [ "How long did it take Phileas Fogg to go around the world?", "How long did Pope John Paul I reign?", "Is #1 longer than #2?" ], "evidence": [ [ [ [ "Phileas Fogg-1" ] ], [ [ "Pope John Paul I-1" ] ], [ "operation" ] ], [ [ [ "Around the World in Eighty Days-1" ] ], [ [ "Pope John Paul I-1" ] ], [ "operation" ] ], [ [ [ "Phileas Fogg-2" ] ], [ [ "Pope John Paul I-1" ] ], [ "operation" ] ] ] }, { "qid": "748b072c995cf5147ac9", "term": "Music", "description": "form of art using sound and silence", "question": "Are deaf people left out of enjoying music?", "answer": false, "facts": [ "Deafness exists on a spectrum of total hearing loss to partial hearing loss.", "Individuals with total hearing loss can still enjoy the bass and beat of music through vibration.", "Deaf people with cochlear implants can hear music, albeit in a different way than hearing people." ], "decomposition": [ "In what different ways can music be perceived?", "Does partial or total hearing loss make one unable to detect any of #1?" ], "evidence": [ [ [ [ "Hearing loss-38" ], "no_evidence" ], [ [ "Vibration-4" ], "no_evidence", "operation" ] ], [ [ [ "Music-1" ], "no_evidence" ], [ [ "Dance-1" ], "no_evidence", "operation" ] ], [ [ [ "Sound-6" ] ], [ [ "Deaf hearing-2" ], "operation" ] ] ] }, { "qid": "3413c919d24c59eadde3", "term": "Potato", "description": "plant species producing the tuber used as a staple food", "question": "Can someone with celiac disease have potato vodka?", "answer": true, "facts": [ "Celiac disease makes it unsafe for someone to eat gluten.", "Potato vodka is a gluten free product." ], "decomposition": [ "For people with celiac disease, what must they avoid?", "Does Potato Vodka contain #1?" ], "evidence": [ [ [ [ "Gluten-15" ] ], [ [ "Grey Goose (vodka)-7" ] ] ], [ [ [ "Coeliac disease-2" ] ], [ [ "Vodka-24" ], "no_evidence", "operation" ] ], [ [ [ "Coeliac disease-2" ] ], [ [ "Potato-1" ], "operation" ] ] ] }, { "qid": "d1dbb41001ab4ad3b924", "term": "Blue", "description": "A primary colour between purple and green", "question": "Are blue lips normal?", "answer": false, "facts": [ "Lips and skin turn blue in response to a lack of oxygen.", "When oxygen flow is inadequate, someone's health can be in danger.", "Lips can turn blue due to extreme cold." ], "decomposition": [ "What are the possible causes of blue lips?", "Is any of #1 a natural or normal process?" ], "evidence": [ [ [ [ "Cyanosis-1" ] ], [ "operation" ] ], [ [ [ "Cyanosis-5", "Cyanosis-6" ] ], [ [ "Cyanosis-6" ] ] ], [ [ [ "Cyanosis-5" ] ], [ [ "Oxygen saturation (medicine)-1" ] ] ] ] }, { "qid": "f1a3fc559616155c5931", "term": "Cucumber", "description": "species of plant", "question": "Are cucumbers often found in desert?", "answer": false, "facts": [ "Cucumbers are a kind of vegetable.", "With the exception of carrot cake, deserts are not typically made with vegetables.", "Cucumbers are not the vegetable used in carrot cake." ], "decomposition": [ "What food group is a cucumber?", "Does #1 grow in the desert?" ], "evidence": [ [ [ [ "Cucumber-1" ] ], [ "operation" ] ], [ [ [ "Cucumber-1" ] ], [ [ "Desert-1" ] ] ], [ [ [ "Cucumber-25" ] ], [ [ "Cucumber-17" ], "no_evidence" ] ] ] }, { "qid": "57f6562167d81856052d", "term": "Lactic acid", "description": "group of stereoisomers", "question": "Is it bad to have lactic acid in your body?", "answer": false, "facts": [ "The body naturally produces and uses lactic acid to convert glucose into energy", "Lactic acid bacteria are particularly good for digestive health." ], "decomposition": [ "What are the functions of lactic acid in the human body?", "Is a majority of #1 harmful to the body?" ], "evidence": [ [ [ [ "Lactic acid-14" ] ], [ "operation" ] ], [ [ [ "Lactic acid-4" ] ], [ [ "Exercise-1" ], "operation" ] ], [ [ [ "Lactic acid-4" ], "no_evidence" ], [ [ "Lactic acid-14" ], "no_evidence" ] ] ] }, { "qid": "8479a1dd193b75d6cd26", "term": "Dermatitis", "description": "skin disease", "question": "Can someone with dermatitis be a hand model?", "answer": false, "facts": [ "Dermatitis causes rashes, redness, blisters, cracking of the skin, and scarring. ", "Hand models tend to have flawless skin and hands." ], "decomposition": [ "What does dermatitis cauas a person's skin to look like?", "What must a hand model's skin look like?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Dermatitis-1" ] ], [ [ "Hand model-1" ] ], [ "operation" ] ], [ [ [ "Dermatitis-5" ] ], [ [ "Hand model-1" ] ], [ [ "Dermatitis-5", "Hand model-1" ] ] ], [ [ [ "Dermatitis-1" ] ], [ [ "Hand model-1" ] ], [ "operation" ] ] ] }, { "qid": "68e7ad683f1090b19a61", "term": "Tonsure", "description": "hairstyle related to religious devotion", "question": "Does ancient Olympics crown fail to hide tonsure?", "answer": true, "facts": [ "Tonsure refers to the religious hairstyle in which hair from the top of the head is shaved off.", "Winners of the ancient Olympics were given a laurel crown to wear.", "A laurel crown involves leaves arranged in a circle that cover only the edges of the head." ], "decomposition": [ "Which part of the head is affected by tonsure?", "What kind of crown was given to winners in ancient Olympics?", "Does #2 cover #1 when worn?" ], "evidence": [ [ [ [ "Tonsure-1" ] ], [ [ "Ancient Olympic Games-2" ] ], [ [ "Olive wreath-1" ], "operation" ] ], [ [ [ "Tonsure-1" ] ], [ [ "Olive wreath-1" ] ], [ "operation" ] ], [ [ [ "Tonsure-9" ] ], [ [ "Olive wreath-1" ] ], [ "operation" ] ] ] }, { "qid": "05e69c19a536222d90db", "term": "Anorexia nervosa", "description": "Eating disorder characterized by refusal to maintain a healthy body weight, and fear of gaining weight due to a distorted self image", "question": "Did Jon Brower Minnoch suffer from anorexia nervosa?", "answer": false, "facts": [ "Jon Brower Minnoch was an American man who, at his peak weight, was the heaviest human being ever recorded, weighing 1,400 lb.", "Anorexia nervosa,Anorexia nervosa is an eating disorder, characterized by low weight, food restriction, fear of gaining weight, and a strong desire to be thin. Many people with anorexia see themselves as overweight even though they are, in fact, underweight." ], "decomposition": [ "What are characteristics of anorexia nervosa?", "How much did Jon Brower Minnoch weigh?", "Is #2 a weight that would be considered #1?" ], "evidence": [ [ [ [ "Anorexia nervosa-1" ] ], [ [ "Jon Brower Minnoch-1" ] ], [ [ "Anorexia nervosa-2", "Jon Brower Minnoch-5" ] ] ], [ [ [ "Anorexia nervosa-1" ] ], [ [ "Jon Brower Minnoch-1" ] ], [ "operation" ] ], [ [ [ "Anorexia nervosa-4" ] ], [ [ "Jon Brower Minnoch-1" ] ], [ [ "Jon Brower Minnoch-1" ], "operation" ] ] ] }, { "qid": "e12a00a2c45fcb4f38e7", "term": "Othello", "description": "play by Shakespeare", "question": "Would Othello be Shakespeare's play to buy Scheherazade most time with king?", "answer": false, "facts": [ "Scheherazade was a character in Middle Eastern folklore that delayed her execution by telling the king long stories.", "Shakespeare's play Othello contained 26,450 words.", "Hamlet is Shakespeare's longest play consisting of 4000 lines and 30,000 words." ], "decomposition": [ "How long is Othello?", "Are all of Shakespeare's other plays shorter than #1?" ], "evidence": [ [ [ [ "Othello-41" ], "no_evidence" ], [ [ "Shakespeare's plays-1" ], "no_evidence", "operation" ] ], [ [ [ "Othello-20" ], "no_evidence" ], [ [ "Hamlet-2", "The Comedy of Errors-1" ], "no_evidence", "operation" ] ], [ [ [ "Othello-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7065ff7b7641acd0330b", "term": "Dessert", "description": "A course that concludes a meal; usually sweet", "question": "Can dessert be made with vegetables?", "answer": true, "facts": [ "A popular desert is carrot cake.", "Carrot cake is made with carrots.", "Carrots are a kind of vegetable." ], "decomposition": [ "What are some popularly known desserts?", "Do any of #1 contain vegetables?" ], "evidence": [ [ [ [ "Cake-14" ] ], [ [ "Carrot cake-1" ] ] ], [ [ [ "Dessert-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Carrot cake-1" ] ], [ [ "Carrot-1" ] ] ] ] }, { "qid": "350182c4e4a832fc4bc5", "term": "Baptism", "description": "Christian rite of admission and adoption, almost invariably with the use of water", "question": "Was Hillary Clinton's deputy chief of staff in 2009 baptised?", "answer": false, "facts": [ "Huma Abedin was Hillary Clinton's deputy chief of staff in 2009", "Huma Abedin is an American Muslim", "Baptism is a Christian tradition" ], "decomposition": [ "Who was Hilary Clinton's deputy chief of staff in 2009?", "What religion does #1 practice?", "What religion practices baptism?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Huma Abedin-8" ] ], [ [ "Huma Abedin-10" ] ], [ [ "Baptism-1" ] ], [ "operation" ] ], [ [ [ "Huma Abedin-1" ] ], [ "operation" ], [ [ "Baptism-1" ] ], [ "operation" ] ], [ [ [ "Huma Abedin-1" ] ], [ [ "Huma Abedin-5" ] ], [ [ "Baptism-1" ] ], [ "operation" ] ] ] }, { "qid": "3e5437a7c20da69c6778", "term": "Edgar Allan Poe", "description": "19th-century American author, poet, editor and literary critic", "question": "Was proofreading Edgar Allan Poe works lucrative?", "answer": false, "facts": [ "Proofreaders get paid a set rate based on the number of words in a document.", "Edgar Allan Poe wrote many short stories including the Oval Portrait which is two pages in length.", "Edgar Allan Poe's only complete novel: The Narrative of Arthur Gordon Pym of Nantucket was a mere 166 pages.", "A book like Jeyamohan's Venmurasu is 11,159 pages." ], "decomposition": [ "What is the typical length of each of Edgar Allan Poe's works?", "Is #1 relatively long?" ], "evidence": [ [ [ [ "Edgar Allan Poe-1" ] ], [ [ "Artamène-1", "Short story-7" ], "operation" ] ], [ [ [ "Edgar Allan Poe-1" ] ], [ "operation" ] ], [ [ [ "Edgar Allan Poe-1" ] ], [ "operation" ] ] ] }, { "qid": "d8fd81e475df8c2f6972", "term": "Crane (bird)", "description": "family of birds", "question": "Is a Cassowary safer pet than a crane?", "answer": false, "facts": [ "Crane's that are fed by humans can exhibit domestic tendencies and rarely peck at humans.", "The Cassowary, known as the world's most dangerous bird, becomes very aggressive and has even killed humans and dogs." ], "decomposition": [ "What behaviors do cranes have with humans?", "What behaviors do cassowaries have with humans?", "Are the behaviors of #2 less violent than #1?" ], "evidence": [ [ [ [ "Crane (bird)-22" ], "no_evidence" ], [ [ "Cassowary-36", "Cassowary-41" ] ], [ "operation" ] ], [ [ [ "Crane (bird)-24" ], "no_evidence" ], [ [ "Cassowary-36" ] ], [ "no_evidence" ] ], [ [ [ "Crane (bird)-5" ], "no_evidence" ], [ [ "Cassowary-3" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "012786196d9ab6df3425", "term": "Sudoku", "description": "Logic-based number-placement puzzle", "question": "Can Roman numerals fill the normal number of Sudoku box options?", "answer": false, "facts": [ "Sudoku boxes can be filled with one of 9 numbers.", "There are only seven Roman numerals: I, V, X, L, C, D and M" ], "decomposition": [ "How many symbols are used in the Roman numeral system?", "How many numbers are employed in Sudoku?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Roman numerals-1" ] ], [ [ "Sudoku-1" ] ], [ "operation" ] ], [ [ [ "Roman numerals-1" ] ], [ [ "Sudoku-1" ] ], [ "operation" ] ], [ [ [ "Roman numerals-1" ] ], [ [ "Sudoku-1" ] ], [ "operation" ] ] ] }, { "qid": "668c900cd1405a15d60b", "term": "Guitarist", "description": "person who plays the guitar", "question": "Does being good at guitar hero make you a good guitarist?", "answer": false, "facts": [ "Guitar Hero is a game that features a guitar-shaped controller with buttons that the player must hit in time with a song.", "Guitars as instruments do not have any buttons, but have strings that must be strummed in a particular way to create sound." ], "decomposition": [ "How is a guitar played?", "How is Guitar Hero played?", "Do the steps in #1 match those of #2?" ], "evidence": [ [ [ [ "Guitar-1" ] ], [ [ "Guitar Hero-44" ] ], [ "operation" ] ], [ [ [ "Guitar-1" ] ], [ [ "Guitar controller-1" ] ], [ "operation" ] ], [ [ [ "Guitar-1" ] ], [ [ "Guitar Hero-1" ] ], [ "operation" ] ] ] }, { "qid": "4496d5cac14132b6e7ea", "term": "San Antonio", "description": "City in Texas, United States", "question": "Did any citizen of San Antonio vote for Boris Johnson?", "answer": false, "facts": [ "San Antonio is a city in Texas in the United States of America", "Boris Johnson is the Prime Minister of the UK", "Only UK and commonwealth citizens may vote in UK elections" ], "decomposition": [ "Is San Antonio a city in the UK?", "Is Boris Johnson the Prime Minister of the UK?", "Are American citizens allowed to vote in the UK elections?", "Are #1 and #3 the same answer as #2?" ], "evidence": [ [ [ [ "San Antonio-18" ] ], [ [ "Boris Johnson-100" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "San Antonio-41" ] ], [ [ "Boris Johnson-100" ] ], [ [ "Elections in the United Kingdom-7" ] ], [ "operation" ] ], [ [ [ "San Antonio-1" ] ], [ [ "Boris Johnson-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "89e1a3d04dc74dc36dac", "term": "Justin Bieber", "description": "Canadian singer-songwriter and actor", "question": "Will Justin Bieber take over Mike Pence's position in 2020?", "answer": false, "facts": [ "Mike Pence is Vice President of the United States.", "The Vice President must be a US citizen.", "The Vice President must be at least 35 years of age.", "Justin Bieber is a Canadian citizen.", "Justin Bieber is 26 years old in 2020." ], "decomposition": [ "What is Mike Pence's present position?", "What is the age/nationality requirement to be a #1?", "What is Justin Bieber's age/nationality by 2020?", "Does #3 match #2?" ], "evidence": [ [ [ [ "Mike Pence-4" ] ], [ [ "Age of candidacy-8" ] ], [ [ "Justin Bieber-1" ] ], [ "operation" ] ], [ [ [ "Mike Pence-4" ] ], [ "no_evidence" ], [ [ "Justin Bieber-1" ] ], [ [ "Justin Bieber-1" ], "operation" ] ], [ [ [ "Natural-born-citizen clause-1", "Vice President of the United States-24" ] ], [ [ "Vice President of the United States-25" ] ], [ [ "Justin Bieber-1" ] ], [ "operation" ] ] ] }, { "qid": "b6713901d33bc1020596", "term": "Richard Wagner", "description": "German composer", "question": "Was Mozart accused of stealing from Richard Wagner?", "answer": false, "facts": [ "Mozart died in 1791.", "Richard Wagner was born in 1813." ], "decomposition": [ "When did Mozart die?", "When was Richard Wagner born?", "Is #2 an earlier date than #1?" ], "evidence": [ [ [ [ "Wolfgang Amadeus Mozart-1" ] ], [ [ "Richard Wagner-1" ] ], [ "operation" ] ], [ [ [ "Wolfgang Amadeus Mozart-50" ] ], [ [ "Richard Wagner-1" ] ], [ "operation" ] ], [ [ [ "Wolfgang Amadeus Mozart-50" ] ], [ [ "Richard Wagner-1" ] ], [ "operation" ] ] ] }, { "qid": "096acbba9f3e51c0f722", "term": "Hamburger", "description": "Sandwich consisting of buns, a patty, and some other fillings", "question": "Do seven McDonald's hamburgers exceed USDA recommended fat allowance?", "answer": false, "facts": [ "The fat content of one McDonald's hamburgers is 10 grams.", "The USDA recommends between 44 and 77 grams of fat a day." ], "decomposition": [ "How much fat is in a McDonald's hamburger?", "What is #1 multiplied by 7?", "How many grams of fat per day does the USDA recommend for the average person?", "Is #2 greater than the maximum value of #3?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "McDonald's France-3" ], "no_evidence" ], [ "no_evidence", "operation" ], [ [ "Dietary Reference Intake-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "operation" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "71cb683bdb77f668dce0", "term": "Sainsbury's", "description": "chain of supermarkets in the United Kingdom", "question": "Could you drive from New England to a Sainsbury's?", "answer": false, "facts": [ "New England is located in the United States of America.", "The U.K and New England are separated by a large ocean." ], "decomposition": [ "What country is Sainsbury located on?", "What country is New England in?", "What separates #1 and #2?", "Can a car drive over #3?" ], "evidence": [ [ [ [ "Sainsbury's-1" ] ], [ [ "New England-1" ] ], [ [ "Atlantic Ocean-2" ] ], [ "operation" ] ], [ [ [ "Sainsbury's-1" ] ], [ [ "Eastern United States-8" ] ], [ [ "Atlantic Ocean-1" ] ], [ "operation" ] ], [ [ [ "Sainsbury's-1" ] ], [ [ "New England-1" ] ], [ [ "Atlantic Ocean-2" ] ], [ [ "Amphibious automobile-1", "Amphibious vehicle-22" ] ] ] ] }, { "qid": "8d06743969021bb22a3d", "term": "Lieutenant", "description": "junior commissioned officer in many nations' armed forces", "question": "Can children become lieutenants?", "answer": true, "facts": [ "Lieutenant the second junior-most or in some cases the junior-most commissioned officer in the armed forces, fire services, police, and other organizations of many nations.", "Many gangs use military rankings to describe their internal heirarchy.", "Many young children are inducted into gangs with heirarchies.", "Children can grow up to be whatever they want to be. " ], "decomposition": [ "What position in a hierarchical system does lieutenant imply?", "Would a gang use the system and assign children to #1 positions?" ], "evidence": [ [ [ [ "Lieutenant-11" ] ], [ [ "Ghetto Boys-7" ] ] ], [ [ [ "Lieutenant-1" ] ], [ [ "Gang-24", "Gang-47" ], "no_evidence", "operation" ] ], [ [ [ "Lieutenant-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "15586c73003c301ef18f", "term": "Cousin", "description": "any descendant of an ancestor's sibling", "question": "Could SNL be why Jenny McCarthy does not get along with her cousin?", "answer": true, "facts": [ "Jenny McCarthy is cousin's with Melissa McCarthy.", "Melissa McCarthy and Jenny McCarthy are not close and Melissa did not even attend Jenny's wedding.", "Jenny McCarthy was spoofed in n episode of SNL (Saturday Night Live) for a rant she did on The View.", "Melissa McCarthy has been a frequent guest on SNL (Saturday Night Live) from 2011-2017.", "Melissa McCarthy was nominated five times for a Primetime Emmy Award for Outstanding Guest Actress in a Comedy Series for her appearances on SNL." ], "decomposition": [ "Who is Jenny McCarthy's cousin?", "What show has #1 been a frequent guest of?", "Was Jenny McCarthy made fun of on #2?" ], "evidence": [ [ [ [ "Jenny McCarthy-3" ] ], [ [ "Melissa McCarthy-2" ] ], [ "no_evidence" ] ], [ [ [ "Melissa McCarthy-1" ] ], [ [ "Melissa McCarthy-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Melissa McCarthy-5" ] ], [ [ "Melissa McCarthy-8" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "90ac2bf04314d357f0e5", "term": "Winter", "description": "one of the Earth's four temperate seasons, occurring between autumn and spring", "question": "Would Phineas and Ferb enjoy winter?", "answer": false, "facts": [ "Phineas and Ferb is a tv show that takes place in the summer time.", "Phineas and Ferb are students and they get summer break in the summer.", "They enjoy summer break because of the freetime they have.", "If it were winter, they would not have summer break." ], "decomposition": [ "What is Phineas and Ferb?", "What season does #1 take place during?", "Why do Phineas and Ferb enjoy #2?", "If it were winter, would Phineas and Ferb still have #3?" ], "evidence": [ [ [ [ "Phineas and Ferb-1" ] ], [ [ "Phineas and Ferb-1" ] ], [ [ "Phineas and Ferb-3" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Phineas and Ferb-1" ] ], [ [ "Summer vacation-108" ] ], [ [ "Phineas and Ferb-1" ] ], [ "operation" ] ], [ [ [ "Phineas and Ferb-1" ] ], [ [ "Phineas and Ferb-1" ] ], [ [ "Phineas and Ferb-16", "Phineas and Ferb-3" ] ], [ "operation" ] ] ] }, { "qid": "e684937aefb2df3eebcf", "term": "H", "description": "letter in the Latin alphabet", "question": "Is H's most common two letter pair partner a freebie in Wheel of Fortune bonus round?", "answer": true, "facts": [ "H forms the most common two letter pair in the English language along with the letter T.", "The Wheel of Fortune bonus round gives the player six free letters: R, S, T, L, N, E." ], "decomposition": [ "What letter forms the most common two letter pair in English along with the letter H?", "What free letters does the Wheel of Fortune bonus round give players?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Th (digraph)-1" ] ], [ [ "Wheel of Fortune (American game show)-13" ] ], [ "operation" ] ], [ [ [ "Letter frequency-11" ] ], [ [ "Wheel of Fortune (Australian game show)-33" ] ], [ "operation" ] ], [ [ [ "Most common words in English-5" ], "no_evidence" ], [ [ "Wheel of Fortune (Australian game show)-33" ] ], [ "operation" ] ] ] }, { "qid": "e475171d9c62141a6835", "term": "Curiosity (rover)", "description": "American robotic rover exploring the crater Gale on Mars", "question": "Can Curiosity take samples of rocks from Lacus Temporis?", "answer": false, "facts": [ "Curiosity is a rover exploring Mars", "Lacus Temporis is located on the moon" ], "decomposition": [ "Which planet is Curiosity on?", "Where is Lacus Temporis located?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Curiosity (rover)-1" ] ], [ [ "Lacus Temporis-1" ] ], [ "operation" ] ], [ [ [ "Curiosity (rover)-1" ] ], [ [ "Lacus Temporis-1" ] ], [ "operation" ] ], [ [ [ "Curiosity (rover)-1" ] ], [ [ "Lacus Temporis-1" ] ], [ "operation" ] ] ] }, { "qid": "51752ec2cb34e4ff1da0", "term": "P. G. Wodehouse", "description": "English author", "question": "Would P. G. Wodehouse be taught in second grade?", "answer": false, "facts": [ "Second graders are often aged seven or eight.", "The works of Wodehouse are intended for an adult audience." ], "decomposition": [ "How old are typical second graders?", "What age group is P. G. Wodehouse works intended for?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Second grade-1" ] ], [ [ "P. G. Wodehouse-52" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Second grade-8" ] ], [ [ "P. G. Wodehouse-14" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Second grade-8" ] ], [ [ "P. G. Wodehouse-26" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "8d06b619a7045ed02f51", "term": "Diamond", "description": "Allotrope of carbon often used as a gemstone and an abrasive", "question": "Is the title of Shirley Bassey's 1971 diamond song a true statement?", "answer": false, "facts": [ "Shirley Bassey recorded the song Diamonds are Forever in 1971,", "Over time, diamonds degrade and turn into graphite.", "Graphite is the same chemical composition found in pencils." ], "decomposition": [ "What is the title to Shirley Bassey's 1971 diamond song?", "Do diamonds last for the time span in #1?" ], "evidence": [ [ [ [ "Diamonds Are Forever (soundtrack)-2" ] ], [ [ "Material properties of diamond-8" ], "no_evidence", "operation" ] ], [ [ [ "Shirley Bassey-1" ] ], [ [ "Material properties of diamond-31" ], "operation" ] ], [ [ [ "Shirley Bassey-1" ], "no_evidence" ], [ [ "Diamond-48" ], "no_evidence" ] ] ] }, { "qid": "a2272d539335ff78b429", "term": "Potato", "description": "plant species producing the tuber used as a staple food", "question": "Can Tame Impala's studio band play a proper game of Hot Potato?", "answer": false, "facts": [ "Hot Potato is a game in which two or more people toss a potato until the music stops.", "Tame Impala is a band with one member, multi-instrumentalist Kevin Parker." ], "decomposition": [ "How many studio members are in Tame Impala's band?", "What is the minimum number of people that can play hot potato?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Tame Impala-1" ] ], [ [ "Hot potato-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Tame Impala-1" ] ], [ [ "Hot potato-1" ] ], [ "operation" ] ], [ [ [ "Tame Impala-1" ] ], [ [ "Hot potato-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0b577826d5e7b6a50aad", "term": "Alice's Adventures in Wonderland", "description": "book by Lewis Carroll", "question": "Is tobacco use made to seem enjoyable in Alice's Adventures in Wonderland?", "answer": true, "facts": [ "In Alice's Adventures in Wonderland, one of the characters is a caterpillar that smokes hookah.", "Hookah is a water pipe used to smoke tobacco products.", "The caterpillar speaks to Alice while making letters out of the smoke he blows." ], "decomposition": [ "In Alice's Adventures in Wonderland, what is a caterpillar seen smoking?", "What do you use #1 to do?", "Does it seem like the caterpillar enjoys #2?" ], "evidence": [ [ [ [ "Alice's Adventures in Wonderland-13" ] ], [ [ "Hookah-1" ] ], [ "no_evidence" ] ], [ [ [ "Alice's Adventures in Wonderland-13" ] ], [ [ "Hookah-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Alice's Adventures in Wonderland-13" ] ], [ [ "Hookah-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e0fb32d50fba9fe0aa32", "term": "Cell (biology)", "description": "The basic structural and functional unit of all organisms; the smallest unit of life.", "question": "Can a cell fit inside of a shoebox?", "answer": true, "facts": [ "The average shoebox is around 14 inches by 10 inches by 5 inches", "The average eukaryotic cell is between 1 and 100 micrometers in diameter" ], "decomposition": [ "How big is a cell?", "How big is a shoebox?", "Is #1 smaller than #2?" ], "evidence": [ [ [ [ "Cell (biology)-2" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Cell (biology)-2" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Cell (biology)-1", "Electrochemical cell-1", "Fuel cell-1", "Monastic cell-1", "Prison cell-1", "Solar cell-1", "Storm cell-1" ], "no_evidence" ], [ [ "Shoe-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c60ed53423d39fb996e2", "term": "U.S. Route 1", "description": "highway in the United States", "question": "Is US route 1 dominated by historically red states?", "answer": false, "facts": [ "US route 1 is a highway in the US that spans 15 states.", "There are 5 historically red states along US Route 1.", "There are 10 historically blue states along US route 1." ], "decomposition": [ "What states does US Rte. 1 pass through?", "How many states in #1 are historically \"red states\"?", "How many states in #1 are historically \"blue states\"?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "U.S. Route 1-1" ] ], [ [ "Red states and blue states-1" ], "no_evidence" ], [ [ "Red states and blue states-17" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "U.S. Route 1-1" ] ], [ [ "Red states and blue states-1" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "U.S. Route 1-1" ] ], [ [ "Red states and blue states-29" ], "no_evidence" ], [ [ "Red states and blue states-29" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "a0d0c2ac289c7a59911d", "term": "Lactobacillus", "description": "genus of bacteria", "question": "Is overfeeding Lactobacillus unwise for people without dental insurance?", "answer": true, "facts": [ "Lactobacillus species convert sugars they digest to lactic acid ", "The lactic acid of some Lactobacillus species is associated with tooth decay", "Dental procedures can be expensive without insurance" ], "decomposition": [ "What are the products of Lactobacillus?", "What conditions are caused by #1?", "What medical procedures would be required to fix #2?", "Would #3 be more affordable with dental insurance?" ], "evidence": [ [ [ [ "Lactobacillus-1" ] ], [ [ "Lactic acid-5" ] ], [ [ "Tooth decay-77" ] ], [ [ "Dental insurance-1" ] ] ], [ [ [ "Lactobacillus-1", "Lactobacillus-2" ] ], [ [ "Lactic acid bacteria-14" ] ], [ [ "Tooth decay-77" ] ], [ [ "Dental insurance-1" ] ] ], [ [ [ "Lactobacillus-10" ] ], [ [ "Lactobacillus-10" ] ], [ [ "Tooth decay-76" ] ], [ [ "Dental insurance-1" ] ] ] ] }, { "qid": "79af6e281b295c2a2e85", "term": "Simon Cowell", "description": "English reality television judge, television producer and music executive", "question": "Can Simon Cowell vote for the next Supreme Court judge?", "answer": false, "facts": [ "The Supreme Court is the highest court in the USA.", "Simon Cowell is a British talent competition judge.", "Members of the Supreme Court are appointed, rather than elected." ], "decomposition": [ "Who appoints US Supreme Court judges?", "Is Simon Cowell currently serving as #1?" ], "evidence": [ [ [ [ "Appointments Clause-1" ] ], [ [ "Simon Cowell-27", "Simon Cowell-43" ], "operation" ] ], [ [ [ "Appointment and confirmation to the Supreme Court of the United States-3" ] ], [ [ "Simon Cowell-1" ] ] ], [ [ [ "Supreme Court of the United States-2" ] ], [ [ "Donald Trump-1" ] ] ] ] }, { "qid": "71e9bed5efda19d9c4ce", "term": "Pikachu", "description": "Pokémon species and the mascot of the Pokémon franchise", "question": "Does Pikachu like Charles Darwin?", "answer": true, "facts": [ "Charles Darwin developed the theory of evolution", "Pikachu is a Pokemon", "Pokemon rely on evolution in order to transform into stronger forms" ], "decomposition": [ "How does a Pokemon improve upon itself? ", "What theory is Charles Darwin best known for?", "Is #1 and #2 related?" ], "evidence": [ [ [ [ "Pokémon-6" ] ], [ [ "Charles Darwin-1" ] ], [ "operation" ] ], [ [ [ "Pokémon-6" ] ], [ [ "Charles Darwin-2" ] ], [ "operation" ] ], [ [ [ "Gameplay of Pokémon-57" ] ], [ [ "Charles Darwin-1" ] ], [ "operation" ] ] ] }, { "qid": "00d3e54a34c0d33d8407", "term": "Nickel", "description": "Chemical element with atomic number 28", "question": "Is nickel a better payout than mercury if given a dollar per atomic number?", "answer": false, "facts": [ "Nickel is a metallic substance with the chemical atomic number of 28.", "Mercury is a silvery liquid substance with a chemical number of 80." ], "decomposition": [ "What is nickel's atomic number?", "What is Mercury's atomic number?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Nickel-1" ] ], [ [ "Mercury (element)-1" ] ], [ "operation" ] ], [ [ [ "Nickel-1" ] ], [ [ "Mercury (element)-1" ] ], [ "operation" ] ], [ [ [ "Nickel-1" ] ], [ [ "Mercury (element)-1" ] ], [ "operation" ] ] ] }, { "qid": "cb19ebb5cbaa71bc32d6", "term": "Duck", "description": "common name for many species in the bird family Anatidae", "question": "Would a duck ever need a Caesarean section?", "answer": false, "facts": [ "A Caesarean section is a medical procedure in which surgery is performed to remove the baby from inside the mother.", "Ducks do not give live birth, they lay eggs." ], "decomposition": [ "Cesarean sections are only performed on animals that produce offspring via what method?", "What method do ducks use to produce offspring?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Caesarean section-1" ] ], [ [ "Duck-15" ] ], [ "operation" ] ], [ [ [ "Caesarean section-1" ] ], [ [ "Mallard-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Caesarean section-1" ] ], [ [ "Duck-15" ] ], [ [ "Egg-10" ], "operation" ] ] ] }, { "qid": "45db806b7177c71f659f", "term": "Great Depression", "description": "20th-century worldwide economic depression", "question": "Could all the unemployed people due to 1933 Great Depression fit in Tiger Stadium?", "answer": false, "facts": [ "There were approximately 15 million people unemployed in 1933 due to the Great Depression.", "In the 1930s Tiger Stadium had a capacity around 50,000." ], "decomposition": [ "How many people became unemployed due to 1933 Great Depression?", "What is the seating capacity of Tiger Stadium?", "Is #1 less than or equal to #2?" ], "evidence": [ [ [ [ "Recession of 1937–38-2", "United States-1" ], "no_evidence" ], [ [ "Tiger Stadium (LSU)-18" ] ], [ "operation" ] ], [ [ [ "Great Depression-65" ], "no_evidence" ], [ [ "Tiger Stadium (LSU)-11" ] ], [ "operation" ] ], [ [ [ "Unemployment-139" ], "no_evidence" ], [ [ "Tiger Stadium (Detroit)-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e39e7996cb59f6c932e0", "term": "Mitsubishi", "description": "group of autonomous, Japanese multinational companies", "question": "Can Aerosmith fit in a 2020 Mitsubishi Outlander?", "answer": true, "facts": [ "Aerosmith is an American rock band that has five active members.", "The 2020 Mitsubishi Outlander has flexible seating that allows for seven seat capacity." ], "decomposition": [ "How many people are members of the band Aerosmith?", "What is the searing capacity of the 2020 Mitsubishi Outlander?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Aerosmith-1" ] ], [ [ "Mitsubishi Outlander-10" ] ], [ [ "Aerosmith-1", "Mitsubishi Outlander-10" ], "operation" ] ], [ [ [ "Aerosmith-1" ] ], [ [ "Mitsubishi Outlander-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Aerosmith-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d692a072dbe7acefbc6b", "term": "30th Street Station", "description": "United States historic place", "question": "Could all of the people who pass through 30th Street Station every day fit in Dorton Arena?", "answer": false, "facts": [ "J. S. Dorton Arena is a 7,610-seat multi-purpose arena located in Raleigh, North Carolina.", "On an average day in fiscal 2013, about 12,000 people boarded or left trains at 30th Street." ], "decomposition": [ "How many people can sit in J. S. Dorton Arena?", "How many people passed through the 30th Street Station daily in 2013?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Dorton Arena-1" ] ], [ [ "30th Street Station-5" ] ], [ "operation" ] ], [ [ [ "Dorton Arena-1" ] ], [ [ "30th Street Station-5" ] ], [ "operation" ] ], [ [ [ "Dorton Arena-1" ] ], [ [ "30th Street Station-5" ] ], [ [ "30th Street Station-5", "Dorton Arena-1" ], "operation" ] ] ] }, { "qid": "afa32406c205674efb7f", "term": "Amazons", "description": "warrior women from Greek mythology", "question": "Did any of the amazons on Xena: Warrior Princess star on later shows?", "answer": true, "facts": [ "Xena\" Warrior Princess was a fantasy TV series based on Greek mythology.", "Amazons on Xena: Warrior Princess were played by numerous actresses including: Danielle Cormack and Melinda Clarke.", "Melinda Clarke starred in numerous TV shows after Xena: Warrior Princess including The O.C. and Nikita." ], "decomposition": [ "Who played the roles of Amazons on Xena: Warrior Princess?", "Did Melinda Clarke do any other shows after Xena\" Warrior Princess?", "Is #2 listed in #1?" ], "evidence": [ [ [ [ "Melinda Clarke-3" ] ], [ [ "Melinda Clarke-4" ] ], [ "operation" ] ], [ [ [ "Xena: Warrior Princess-14" ], "no_evidence" ], [ [ "Melinda Clarke-3" ] ], [ "operation" ] ], [ [ [ "Melinda Clarke-3" ] ], [ [ "Melinda Clarke-1" ] ], [ "operation" ] ] ] }, { "qid": "260f4a917a88cc48335a", "term": "General Motors", "description": "American automotive manufacturing company", "question": "Can you purchase General Motors products at a movie theater?", "answer": false, "facts": [ "General Motors sells automobiles, automobile parts, and financial services", "Movie theaters sell movie tickets, snacks, and beverages" ], "decomposition": [ "What kinds of products does General Motors sell?", "What kinds of products are sold in movie theaters?", "Are #1 the same as #2?" ], "evidence": [ [ [ [ "General Motors-1" ] ], [ [ "Movie theater-23" ] ], [ "operation" ] ], [ [ [ "General Motors-6" ] ], [ [ "Movie theater-51" ] ], [ "operation" ] ], [ [ [ "General Motors-1" ] ], [ [ "Movie theater-3" ] ], [ "operation" ] ] ] }, { "qid": "3b1834a6dd331eb1d791", "term": "Chief executive officer", "description": "Highest-ranking corporate officer or administrator", "question": "Would a CEO typically clean the toilets in a company's building?", "answer": false, "facts": [ "The CEO is the highest-ranking corporate position in an organization.", "Cleaning toilets is a job typically done by janitors or facility workers in a company." ], "decomposition": [ "What are the general duties of the CEO of an organization?", "Is cleaning the toilets of the company's building one of #1?" ], "evidence": [ [ [ [ "Chief executive officer-3" ] ], [ "operation" ] ], [ [ [ "Chief executive officer-3" ] ], [ "operation" ] ], [ [ [ "Founder CEO-7" ] ], [ "operation" ] ] ] }, { "qid": "a68cfdfc49fdc73dbd2e", "term": "Lapidary", "description": "gemstone cutter", "question": "Was Dioskourides a lapidary?", "answer": true, "facts": [ "A lapidary is a person or machine who cuts gemstones; classically, it refers to a person who engraves gemstones.", "Dioskourides put his signature on a Roman amethyst ringstone with a portrait of Demosthenes circa late 1st century BC.", "Artists sign their work." ], "decomposition": [ "What is a lapidary?", "What do #1's do once they finish their work?", "Did Dioskourides do #1 and #2?" ], "evidence": [ [ [ [ "Lapidary-1" ] ], [ "no_evidence" ], [ [ "Pedanius Dioscorides-1" ], "operation" ] ], [ [ [ "Lapidary-1" ] ], [ [ "Lapidary-12" ], "no_evidence" ], [ [ "Pedanius Dioscorides-1" ], "no_evidence", "operation" ] ], [ [ [ "Lapidary-1" ] ], [ [ "Lapidary-11" ] ], [ [ "Pedanius Dioscorides-1" ], "operation" ] ] ] }, { "qid": "67c026fe6a026c575755", "term": "Donatello", "description": "Italian painter and sculptor", "question": "Can an adult male stand on top Donatello's bronze David and touch the Sistine Chapel ceiling?", "answer": false, "facts": [ "Donatello's created a bronze David statue that stands over five feet tall.", "The average height of an adult male is five feet and nine inches.", "The tallest adult male in history was eight feet and eleven inches.", "The Sistine Chapel ceiling is sixty eight feet high." ], "decomposition": [ "How tall is Donatello's bronze David statue?", "How tall is the tallest person to ever live?", "How tall is the Sistine Chapel ceiling?", "Is #1 plus #2 greater than or equal to #3?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Robert Wadlow-1", "Robert Wadlow-2" ] ], [ [ "Sistine Chapel-15" ] ], [ "no_evidence", "operation" ] ], [ [ [ "David (Donatello)-5" ], "no_evidence" ], [ [ "Robert Wadlow-2" ] ], [ [ "Sistine Chapel-15" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Bronze Horseman-8" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "ff8cbfd0e19d8c30b1c1", "term": "Arab–Israeli conflict", "description": "geopolitical conflict in the Middle East and North Africa", "question": "Was England directly involved in the Arab-Israeli conflict?", "answer": true, "facts": [ "The Arab-Israeli conflict began hundreds of years ago.", "England occupied what is now Israel until 1945 when Israel became independent.", "During Israel's war for independence England allied with their Arab neighbors to fight the Israeli rebels." ], "decomposition": [ "Which nation did Israel gain independence from?", "Which other nation did #1 ally with to resist Israeli rebels during the war for independence?", "Is #1 England and #2 Arab?" ], "evidence": [ [ [ [ "Israel-5" ] ], [ [ "Arab–Israeli conflict-3" ] ], [ "no_evidence" ] ], [ [ [ "United Nations Partition Plan for Palestine-1", "United Nations Partition Plan for Palestine-2" ], "no_evidence" ], [ [ "1948 Arab–Israeli War-14", "1948 Arab–Israeli War-9", "Israel-30" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Israel-32" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "2e508efbf0b72f1af2c2", "term": "Quartz", "description": "mineral composed of silicon and oxygen atoms in a continuous framework of SiO₄ silicon–oxygen tetrahedra, with each oxygen being shared between two tetrahedra, giving an overall chemical formula of SiO₂", "question": "Could Quartz be useful to humans if plants died off and there was no oxygen?", "answer": true, "facts": [ "Plants produce oxygen which is needed by humans to survive.", "Quartz is a hard mineral substance made of several elements.", "Quartz is composed of silicon and oxygen.", "Quartz can be melted at high temperatures." ], "decomposition": [ "What are the constituents elements of quartz?", "Is oxygen included in #1?" ], "evidence": [ [ [ [ "Quartz-1" ] ], [ "operation" ] ], [ [ [ "Quartz-1" ] ], [ "operation" ] ], [ [ [ "Quartz-1" ] ], [ [ "Oxygen-1" ] ] ] ] }, { "qid": "c03e06230ef966bacf58", "term": "Alice's Adventures in Wonderland", "description": "book by Lewis Carroll", "question": "Did Alice's Adventures in Wonderland inspire Macbeth?", "answer": false, "facts": [ "Alice's Adventures in Wonderland was published in 1865", "Macbeth was first performed in 1606" ], "decomposition": [ "When was Alice's Adventures in Wonderland first published?", "When was Macbeth first performed?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Alice's Adventures in Wonderland-1" ] ], [ [ "Macbeth-1" ] ], [ "operation" ] ], [ [ [ "Alice's Adventures in Wonderland-3" ] ], [ [ "Macbeth-32" ] ], [ "operation" ] ], [ [ [ "Alice's Adventures in Wonderland-1" ] ], [ [ "Macbeth-1" ] ], [ "operation" ] ] ] }, { "qid": "9d279701351e38e0400f", "term": "United States Secretary of State", "description": "U.S. cabinet member and head of the U.S. State Department", "question": "Can United States Secretary of State do crimes in U.K. without being arrested?", "answer": true, "facts": [ "Diplomatic Immunity allows for diplomats in other countries to not be tried for their transgressions.", "Countries that signed the Vienna Convention on Diplomatic Relations allow for Diplomatic Immunity.", "All UN member states besides Palau, The Solomon Islands, and South Sudan have signed the Vienna Convention on Diplomatic Relations treaty.", "The U.K. is one of the original UN member nations." ], "decomposition": [ "Under which agreement is modern diplomatic immunity applicable?", "Which countries have signed #1?", "Is the U.K. included in #2?" ], "evidence": [ [ [ [ "Diplomatic immunity-14" ] ], [ [ "Vienna Convention on Consular Relations-7" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Diplomatic immunity-11", "Diplomatic immunity-14", "Diplomatic immunity-17" ], "no_evidence" ], [ [ "Vienna Convention on Diplomatic Relations-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Diplomatic immunity-14" ] ], [ [ "Vienna Convention on Diplomatic Relations-8" ], "no_evidence" ], [ [ "Diplomatic immunity-10", "Diplomatic immunity-53" ], "operation" ] ] ] }, { "qid": "a593481c1ddb91e9f96b", "term": "Asian black bear", "description": "species of mammal", "question": "Is the Asian black bear multicolored?", "answer": true, "facts": [ "The Asian black bear is an animal that lives in habitats with trees.", "Multicolored refers to anything that is composed of more than one color.", "The Sian black bear has a black coat with a white V-shaped patch." ], "decomposition": [ "How many colors of fur does the asian black bear have?", "Is #1 greater than 1?" ], "evidence": [ [ [ [ "Asian black bear-2" ] ], [ "operation" ] ], [ [ [ "Asian black bear-2" ] ], [ "operation" ] ], [ [ [ "Asian black bear-2" ] ], [ "operation" ] ] ] }, { "qid": "7965756a4cb26817954b", "term": "Wembley Arena", "description": "An indoor arena in Wembley, London", "question": "Can you see the moon in Wembley Arena?", "answer": false, "facts": [ "Wembley Arena is an indoor arena.", "The moon is located in the sky.", " You cannot see the sky if you are indoors." ], "decomposition": [ "What is Wembley Arena?", "Where is the moon located?", "Can you see #2 from #1?" ], "evidence": [ [ [ [ "Wembley Arena-1" ] ], [ [ "Moon-1" ] ], [ "operation" ] ], [ [ [ "Wembley Arena-1" ] ], [ [ "Moon-3" ] ], [ "operation" ] ], [ [ [ "Wembley Arena-59" ] ], [ [ "Moon-1" ] ], [ [ "Moon-1" ], "operation" ] ] ] }, { "qid": "97f9a0a30c9dc2e77d42", "term": "5", "description": "Natural number", "question": "Does Homer Simpson need two hands worth of fingers to count to 5?", "answer": true, "facts": [ "Homer Simpson is a character of the long running comedy animated series \"The Simpsons\".", "All characters in \"The Simpsons\" have 4 fingers on each hand." ], "decomposition": [ "How many fingers does Homer Simpson have on each hand?", "Is #1 less than 5?" ], "evidence": [ [ [ [ "Trilogy of Error-1" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Homer Simpson-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "cd1a9cb828c1cbc7b2d1", "term": "Swan Lake", "description": "Ballet by Pyotr Ilyich Tchaikovsky", "question": "Does open heart surgery finish before entirety of American Ballet Theatre's Swan Lake?", "answer": false, "facts": [ "The American Ballet theatre's Swan Lake has a run time of 145 minutes.", "The National Heart, Lung, and Blood Institute states that a coronary artery bypass takes 3 to 6 hours" ], "decomposition": [ "How long is a performance of Swan Lake?", "How long does it take to perform a coronary artery bypass?", "Is #1 longer than #2?" ], "evidence": [ [ [ [ "Swan Lake-2" ], "no_evidence" ], [ [ "Coronary artery bypass surgery-2" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "80e1855030cd5c8f136a", "term": "Lieutenant", "description": "junior commissioned officer in many nations' armed forces", "question": "Are pirate lieutenants like navy lieutenants?", "answer": true, "facts": [ "Lieutenant Richards was a pirate that sailed alongside Blackbeard.", "Lieutenant RIchards had many duties and was placed in charge of the ship, Bonnet's Revenge.", "In the Royal Navy and Commonwealth navies, the second-in-command of a vessel is known as the first lieutenant.", "Royal Navy lieutenants are in charge of other crew members and have many duties." ], "decomposition": [ "What are the duties of a pirate lieutenant?", "What are the duties of a navy lieutenant?", "Is there significant overlap between #1 and #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Lieutenant (navy)-10", "Lieutenant (navy)-11" ] ], [ "operation" ] ], [ [ [ "Piracy-87" ], "no_evidence" ], [ [ "Lieutenant (navy)-1", "Lieutenant (navy)-11" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Governance in 18th-century piracy-4" ], "no_evidence" ], [ [ "Lieutenant (navy)-12" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "249673d73deb8d6c28f9", "term": "Rand Paul", "description": "American politician, ophthalmologist, and United States Senator from Kentucky", "question": "Is Rand Paul guilty of catch-phrase used to attack John Kerry in 2004?", "answer": true, "facts": [ "John Kerry was attacked by his opponents in the 2004 Presidential Campaign by being called a Flip-Flopper.", "A Flip-Flopper is someone that makes a complete change in policy from one thing to another.", "In May 2010 Rand Paul called for an electronic fence to keep out immigrants and deplored amnesty in any form.", "In 2013 Rand Paul said he was in favor of undocumented immigrants being granted legal status." ], "decomposition": [ "What catch-phrase was used against John Kerry in 2004?", "What policy changes has Rand Paul expressed?", "Would #2 be considered #1?" ], "evidence": [ [ [ [ "Flip-flop (politics)-5" ] ], [ [ "Political positions of Rand Paul-63" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Rand Paul-85" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Flip-flop (politics)-5" ] ], [ [ "Rand Paul-67" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "5a83f0148ac78b495f2f", "term": "Robert De Niro", "description": "American actor, director and producer", "question": "Does Robert De Niro use a microscope at work?", "answer": false, "facts": [ "A microscope is a tool used by scientists.", "Robert De Niro works on movies and television shows, and he is not a scientist." ], "decomposition": [ "What general profession uses a microscope?", "What is Robert De Niro's profession?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Microscope-1" ], "no_evidence" ], [ [ "Robert De Niro-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Microscope-1" ] ], [ [ "Robert De Niro-1" ] ], [ "operation" ] ], [ [ [ "Microscope-1", "Scientist-1" ] ], [ [ "Robert De Niro-4" ] ], [ "operation" ] ] ] }, { "qid": "5179b1021cc1b2fe1aa6", "term": "Uranium", "description": "Chemical element with atomic number 92", "question": "Would Gordon Ramsey use uranium as a seasoning?", "answer": false, "facts": [ "Gordon Ramsey is a chef known for producing high quality food ", "Uranium is a toxic and weakly radioactive metal" ], "decomposition": [ "What was Gordon Ramsay's major occupation?", "Is Uranium commonly used as seasoning by a #1?" ], "evidence": [ [ [ [ "Gordon Ramsay-1" ] ], [ [ "Uranium-13" ], "operation" ] ], [ [ [ "Gordon Ramsay-1" ] ], [ [ "Depleted uranium-50", "Seasoning-2", "Uranium-3" ] ] ], [ [ [ "Gordon Ramsay-1" ] ], [ [ "Uranium-3" ], "operation" ] ] ] }, { "qid": "904898291b7b33f260e7", "term": "Richard Dawkins", "description": "English ethologist, evolutionary biologist and author", "question": "Would Jacques Duèze have been friends with Richard Dawkins?", "answer": false, "facts": [ "Jacques Duèze was later Pope John XXII.", "The Pope is the head of the Catholic Church, a Christian organization.", "Christianity is a religion.", "Richard Dawkins is a prominent critic of religion." ], "decomposition": [ "What is the occupation of Jacques Duèze?", "In what field or industry is #1?", "Is #2 a field or industry which Richard Dawkins supports?" ], "evidence": [ [ [ [ "Pope John XXII-1" ] ], [ [ "Pope-1" ] ], [ [ "Richard Dawkins-4" ], "operation" ] ], [ [ [ "Pope John XXII-2" ] ], [ [ "Pope John XXII-1" ] ], [ [ "Richard Dawkins-1", "Richard Dawkins-3" ], "operation" ] ], [ [ [ "Pope John XXII-1" ] ], [ [ "Catholic Church-1", "Christianity-1" ] ], [ [ "Atheism-1", "Richard Dawkins-3" ] ] ] ] }, { "qid": "08271060e7c75ea1e8b8", "term": "Lorem ipsum", "description": "Placeholder text used in publishing and graphic design", "question": "Should a finished website have lorem ipsum paragraphs?", "answer": false, "facts": [ "Lorem Ipsum paragraphs are meant to be temporary.", "Web designers always remove lorem ipsum paragraphs before launch." ], "decomposition": [ "What is a lorem ipsum paragraph? ", "Is #1 good to have on a website?" ], "evidence": [ [ [ [ "Lorem ipsum-1" ] ], [ [ "Lorem ipsum-1", "Lorem ipsum-2" ] ] ], [ [ [ "Lorem ipsum-1" ] ], [ "operation" ] ], [ [ [ "Lorem ipsum-1" ] ], [ "operation" ] ] ] }, { "qid": "fccaf0383b7315827b07", "term": "Robert Downey Jr.", "description": "American actor", "question": "Was Robert Downey Jr. a good role model as a young man?", "answer": false, "facts": [ "As a young man, Robert Downey Jr. struggled with drug and alcohol addiction.", "Robert Downey Jr. shot a gun out of a car window while doing drugs as a young adult." ], "decomposition": [ "Did Robert Downey Jr. completely avoid dangerous, irresponsible behavior as a young adult?" ], "evidence": [ [ [ [ "Robert Downey Jr.-12", "Robert Downey Jr.-13" ], "operation" ] ], [ [ [ "Robert Downey Jr.-12", "Robert Downey Jr.-5" ] ] ], [ [ [ "Robert Downey Jr.-12", "Robert Downey Jr.-5" ] ] ] ] }, { "qid": "3d7050c0e9bfbeef27c1", "term": "Melania Trump", "description": "First Lady of the United States", "question": "Did Melania Trump have same profession as Olga Kurylenko?", "answer": true, "facts": [ "Melania Trump is the first lady of the United States and was previously a model.", "Olga Kurylenko is a professional actress that also works as a model." ], "decomposition": [ "What professions has Melania Trump had?", "What professions has Olga Kurylenko had?", "Is at least one profession listed in #1 also found in #2?" ], "evidence": [ [ [ [ "Melania Trump-1" ] ], [ [ "Olga Storozhenko-1" ] ], [ "operation" ] ], [ [ [ "Melania Trump-1" ] ], [ [ "Olga Kurylenko-2" ] ], [ "operation" ] ], [ [ [ "Melania Trump-1" ] ], [ [ "Olga Kurylenko-2" ] ], [ "operation" ] ] ] }, { "qid": "964da699d1ada747b266", "term": "BBC World Service", "description": "The BBC's international Chor radio station", "question": "Is the BBC World Service hosted in Europe?", "answer": true, "facts": [ "The BBC World Service is part of the BBC network.", "The BBC operates in England.", "England is part of Europe." ], "decomposition": [ "Where is the BBC World Service located?", "Is #1 located in Europe?" ], "evidence": [ [ [ [ "BBC World Service-15" ] ], [ [ "London-1", "Outline of the United Kingdom-1" ] ] ], [ [ [ "BBC World Service-15" ] ], [ "operation" ] ], [ [ [ "BBC World Service-2" ] ], [ [ "United Kingdom-25" ] ] ] ] }, { "qid": "83e79d9b291f5619b6ed", "term": "Atheism", "description": "Absence of belief in the existence of deities", "question": "Was Mother Theresa a follower of atheism?", "answer": false, "facts": [ "Mother Theresa was a Catholic nun.", "Atheism is the absence of belief in a diety.", "Catholics believe in the Holy Trinity, which is a representation of God." ], "decomposition": [ "What was Mother Teresa's religion?", "Can an adherent of #1 be regarded a follower of atheism?" ], "evidence": [ [ [ [ "Mother Teresa-1" ] ], [ [ "Atheism-1", "Catholic Church-1", "Christianity-1" ] ] ], [ [ [ "Missionaries of Charity-1" ] ], [ [ "Atheism-1" ], "operation" ] ], [ [ [ "Mother Teresa-19" ] ], [ [ "Atheism-7" ] ] ] ] }, { "qid": "9576ab36999e196b87bb", "term": "EastEnders", "description": "British soap opera", "question": "Is it possible to binge entire EastEnders series without water?", "answer": false, "facts": [ "British TV series EastEnders has over 6,000 episodes as of 2020.", "It would take approximately 125 days to binge watch the entire EastEnders TV series.", "A human can last only 4 days without water." ], "decomposition": [ "How many days can a human last without water?", "How many episodes are there in the EastEnders series?", "How many days would it take to binge watch #2 average-length episodes?", "Is #3 less than or equal to #1?" ], "evidence": [ [ [ [ "Survival skills-13" ] ], [ "no_evidence" ], [ [ "EastEnders-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Dehydration-2" ], "no_evidence" ], [ [ "EastEnders-1" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Dehydration-2" ], "no_evidence" ], [ [ "EastEnders-86" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "384f2454d749208556d8", "term": "Telescope", "description": "Optical instrument that makes distant objects appear magnified", "question": "Would stargazers prefer binoculars over a telescope?", "answer": false, "facts": [ "Depending on a stargazer's goal, the scope of view necessary can change. ", "Companies produce both telescopes and binoculars for stargazing. " ], "decomposition": [ "How does the scope of a stargazer's observation vary?", "Does #1 stay the same?" ], "evidence": [ [ [ [ "Telescope-1" ] ], [ [ "Binoculars-1" ] ] ], [ [ [ "Observational astronomy-1" ], "no_evidence" ], [ [ "Binoculars-1" ], "operation" ] ], [ [ [ "Amateur astronomy-1" ] ], [ "operation" ] ] ] }, { "qid": "9aceffc7f768c0ecf400", "term": "Psychic", "description": "person who claims to use extrasensory perception to identify information hidden from the normal senses", "question": "Would Carrie Poppy be likely to trust a psychic?", "answer": false, "facts": [ "Carrie Poppy is an American podcaster who is on a show called 'Oh No! Ross and Carrie.\"", "\"Oh No Ross and Carrie\" is a show that critically examines religious claims and those of the supernatural.", "Carrie Poppy considers herself a skeptic and an investigative reporter." ], "decomposition": [ "What is Carrie Poppy's profession?", "What show is Carrie Poppy the #1 of?", "What is the main of #2?", "On #3, what stance does Carrie Poppy take as a reporter?", "Would someone who is #4 likely trust psychics?" ], "evidence": [ [ [ [ "Oh No, Ross and Carrie!-14" ] ], [ [ "Oh No, Ross and Carrie!-1" ] ], [ [ "Oh No, Ross and Carrie!-4" ] ], [ [ "Oh No, Ross and Carrie!-14" ] ], [ "operation" ] ], [ [ [ "Oh No, Ross and Carrie!-2" ] ], [ [ "Oh No, Ross and Carrie!-1" ] ], [ [ "Oh No, Ross and Carrie!-1" ], "no_evidence" ], [ [ "Oh No, Ross and Carrie!-8" ] ], [ "no_evidence" ] ], [ [ [ "Oh No, Ross and Carrie!-14" ] ], [ [ "Oh No, Ross and Carrie!-1" ] ], [ "no_evidence" ], [ [ "Oh No, Ross and Carrie!-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "61a3344640c947f7ce81", "term": "François Mitterrand", "description": "21st President of the French Republic", "question": "Did Francois Mitterrand ever meet Barak Obama while they both held the position of President?", "answer": false, "facts": [ "Mitterand was President of France from 1981 through 1995.", "Obama was President of the United States from 2009 to 2017" ], "decomposition": [ "When was Mitterand's final year as President of France?", "When was Obama's first year as President of the United States?", "Is #2 before #1?" ], "evidence": [ [ [ [ "François Mitterrand-2" ] ], [ [ "Presidency of Barack Obama-1" ] ], [ "operation" ] ], [ [ [ "François Mitterrand-1" ] ], [ [ "Barack Obama-1" ] ], [ "operation" ] ], [ [ [ "François Mitterrand-1" ] ], [ [ "Barack Obama-1" ] ], [ "operation" ] ] ] }, { "qid": "e39243fb96d47aba82a2", "term": "Jackson Pollock", "description": "American painter", "question": "Was Jackson Pollock trained by Leonardo da Vinci?", "answer": false, "facts": [ "Leonardo lived during the Italian Renaissance in the 17th century.", "Jackson Pollock lived during the 20th century." ], "decomposition": [ "When did Leonardo da Vinci die?", "When was Jackson Pollock born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Leonardo da Vinci-1" ] ], [ [ "Jackson Pollock-1" ] ], [ "operation" ] ], [ [ [ "Leonardo da Vinci-1" ] ], [ [ "Jackson Pollock-1" ] ], [ "operation" ] ], [ [ [ "Leonardo da Vinci-27" ], "no_evidence" ], [ [ "Jackson Pollock-4" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "3b0c98640caf02aa66e8", "term": "Beaver", "description": "Genus of mammals", "question": "Does the land in close proximity to beaver dams suffer?", "answer": true, "facts": [ "Beaver dams often lead to flooding in the areas around them.", "Flooding can lead to loosening of the soil.", "Loosened soil can cause trees to fall over. ", "Flooding can lead to soil erosion." ], "decomposition": [ "What are the effects of beaver dams on surrounding lands?", "Are any of #1 significantly negative?" ], "evidence": [ [ [ [ "Beaver eradication in Tierra del Fuego-4", "North American beaver-9" ] ], [ "operation" ] ], [ [ [ "Beaver dam-11" ] ], [ [ "Beaver dam-26" ] ] ], [ [ [ "Beaver dam-26" ] ], [ "no_evidence" ] ] ] }, { "qid": "05046d2981f68118f047", "term": "Mona Lisa", "description": "Painting by Leonardo da Vinci", "question": "Is the Mona Lisa in the same museum as the Venus de Milo?", "answer": true, "facts": [ "The Mona Lisa is in the Louvre.", "The Venus de Milo is in the Louvre." ], "decomposition": [ "What museum stores the Mona Lisa?", "What museum stores the Venus de Milo?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Mona Lisa-54" ] ], [ [ "Venus de Milo-2" ] ], [ "operation" ] ], [ [ [ "Mona Lisa-18" ] ], [ [ "Venus de Milo-17" ] ], [ "operation" ] ], [ [ [ "Mona Lisa-2" ] ], [ [ "Venus de Milo-2" ] ], [ "operation" ] ] ] }, { "qid": "ef8cd9d65ecb85d74a19", "term": "Will Ferrell", "description": "American actor, comedian, producer, writer and businessman", "question": "Would it be difficult for Will Ferrell to win Empire Award for Best Newcomer?", "answer": true, "facts": [ "The Empire Award for Best Newcomer was awarded for an actor in their debut role.", "Will Ferrell debuted in 1995." ], "decomposition": [ "When do actors get to win the Empire Award for Best Newcomer?", "When did Will Ferrell participate in #1?", "Is #2 a long time ago?" ], "evidence": [ [ [ [ "Empire Award for Best Newcomer-1" ] ], [ [ "Will Ferrell-1" ] ], [ "operation" ] ], [ [ [ "23rd Empire Awards-1", "Empire Award for Best Male Newcomer-1" ] ], [ [ "Will Ferrell-1" ] ], [ "operation" ] ], [ [ [ "Empire Award for Best Newcomer-1" ] ], [ [ "On Our Own (1994 TV series)-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "15ea72669f16beecac5a", "term": "Confederate States Army", "description": "Southern army in American Civil War", "question": "Did Confederate States Army influence West Point fashion?", "answer": true, "facts": [ "The Confederate States Army was clad in cadet gray uniforms.", "West Point uniforms are cadet gray and white.", "Confederate States Army uniforms contained Generally, the uniform jacket of the Confederate soldier was single breasted, made of gray or brown fabric, with a six to nine button front and hat.", " West Point uniforms contain a standing collar, white trousers, and black shakos (known as a \"tarbucket hat\" in U.S. Army nomenclature)." ], "decomposition": [ "What were the main features of the Confederate States Army uniforms?", "What are the most notable features of West Point uniforms?", "Is there a significant overlap between #1 and #2?" ], "evidence": [ [ [ [ "Uniforms of the Confederate States Armed Forces-17" ] ], [ [ "United States Military Academy-62" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Confederate States Army-49" ], "no_evidence" ], [ [ "United States Military Academy-74" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Uniforms of the Confederate States Armed Forces-12" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "6db1f3951b0c7bf21acd", "term": "Biologist", "description": "Scientist studying living organisms", "question": "Is it possible for biologist Rachel Carson to have flown to the moon?", "answer": false, "facts": [ "Rachel Carlson died in 1964", "The first humans landed on the moon in 1969" ], "decomposition": [ "When did Rachel Carson die?", "When did humans first land on the moon?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Rachel Carson-44" ] ], [ [ "Moon landing-12" ] ], [ "operation" ] ], [ [ [ "Rachel Carson-1" ] ], [ [ "Apollo 11-1" ] ], [ "operation" ] ], [ [ [ "Rachel Carson-44" ] ], [ [ "Apollo 11-1" ] ], [ "operation" ] ] ] }, { "qid": "b76f30e808b1bfdbee2b", "term": "John Lennon", "description": "English singer and songwriter, founding member of the Beatles", "question": "Did Cynthia Powell celebrate a silver anniversary with John Lennon?", "answer": false, "facts": [ "A silver anniversary takes place during the 25th year of marriage.", "Cynthia Powell married John Lennon in 1962.", "Cynthia Powell and John Lennon got divorced in 1968." ], "decomposition": [ "People have to be married for how many years for them to celebrate a silver anniversary?", "When did Cynthia Powell marry John Lennon?", "When Cynthia Powell divorce John Lennon?", "What is #3 minus #2?", "Is #4 greater than or equal to #1?" ], "evidence": [ [ [ [ "Silver jubilee-1" ] ], [ [ "Cynthia Lennon-12" ] ], [ [ "Cynthia Lennon-31" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Silver jubilee-1" ] ], [ [ "Cynthia Lennon-12" ] ], [ [ "Cynthia Lennon-31" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Silver jubilee-1" ] ], [ [ "Cynthia Lennon-12" ] ], [ [ "John Lennon-41" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "f087d393e2ab1f8783f5", "term": "1980 United States presidential election", "description": "49th quadrennial presidential election in the United States", "question": "Can 1980 United States presidential election result be considered a photo finish?", "answer": false, "facts": [ "A photo finish refers to a race or finish in which contestants are so close that a photograph of them as they cross the finish line has to be examined.", "Ronald Reagan had 489 electoral votes while Jimmy Carter had 49 in the 1980 United States presidential election.", "Ronald Reagan won 44 states while Jimmy Carter won 6 in the 1980 United States presidential election." ], "decomposition": [ "Who ran for the Democrats in the 1980 US presidential election?", "Who ran for the Republicans in the 1980 US presidential election?", "How many electoral votes did #1 receive?", "How many electoral votes did #2 receive?", "Is #3 close to #4?" ], "evidence": [ [ [ [ "1980 United States presidential election-1" ] ], [ [ "1980 United States presidential election-1" ] ], [ [ "1980 United States presidential election-4" ] ], [ [ "1980 United States presidential election-4" ] ], [ [ "History of the United States (1980–1991)-22" ] ] ], [ [ [ "1980 United States presidential election-1" ] ], [ [ "1980 United States presidential election-1" ] ], [ [ "History of the United States (1980–1991)-22" ] ], [ [ "History of the United States (1980–1991)-22" ] ], [ [ "History of the United States (1980–1991)-22" ], "operation" ] ], [ [ [ "1980 United States presidential election-1" ] ], [ [ "1980 United States presidential election-1" ] ], [ [ "1980 United States presidential election-50" ] ], [ [ "1980 United States presidential election-50" ] ], [ "operation" ] ] ] }, { "qid": "e5b8fb5f2431ac1396ec", "term": "Lenovo", "description": "Chinese multinational technology company", "question": "Could a monolingual American read Lenovo's native name?", "answer": false, "facts": [ "Lenovo's native name is 联想集团有限公司.", "Someone who is monolingual only speaks one language.", "The typical monolingual American would only be able to read English.", "Someone who can only read English is therefore unable to read Chinese." ], "decomposition": [ "What is Lenovo's native name?", "What language is #1 in?", "What language does a monolingual American speak?", "Is #3 the same as #2?" ], "evidence": [ [ [ [ "Lenovo-51" ] ], [ [ "Lenovo-1" ] ], [ [ "American English-2" ] ], [ "operation" ] ], [ [ [ "Lenovo-1" ], "no_evidence" ], [ [ "Chinese language-1" ], "no_evidence" ], [ [ "American English-2" ] ], [ "operation" ] ], [ [ [ "Lenovo-51" ] ], [ [ "Lenovo-51" ] ], [ [ "Official language-39" ] ], [ "operation" ] ] ] }, { "qid": "ac3de52d3bc2e3b5d8a4", "term": "Reproduction", "description": "Biological process by which new organisms are generated from one or more parent organisms", "question": "Would an environmentalist advocate for preventing domestic canine reproduction?", "answer": true, "facts": [ "Domestic dogs are a large contributor to species depopulation and displacement.", "Domestic dogs have a diet that largely contributes to a harmful environmental impact. " ], "decomposition": [ "What do environmentalists try to protect?", "Do domestic dogs harm #1?" ], "evidence": [ [ [ [ "Environmentalism-1" ] ], [ [ "Dog-61" ], "no_evidence", "operation" ] ], [ [ [ "Environmentalist-1" ] ], [ [ "Dog-42" ], "no_evidence", "operation" ] ], [ [ [ "Environmentalist-1" ] ], [ [ "Dog-30", "Overpopulation in domestic pets-4" ], "no_evidence" ] ] ] }, { "qid": "b679b7210ebf7fec17b8", "term": "Othello", "description": "play by Shakespeare", "question": "Are there options for students who struggle to understand the writing style of Othello?", "answer": true, "facts": [ "\"No Fear Shakespeare\" is a line of books that translate the language of original Shakespeare plays into modern English.", "Scripts can be understood more easily when read alongside a production of the play itself.", "\"No Fear Shakespeare\" features Othello in their book lineup." ], "decomposition": [ "What is the name of a line of books that translate the language of Shakespeare plays into modern English?", "Does #1 feature Othello in their book lineup?" ], "evidence": [ [ [ [ "Hamlet-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "SparkNotes-5" ] ], [ "no_evidence", "operation" ] ], [ [ [ "SparkNotes-5" ] ], [ "no_evidence" ] ] ] }, { "qid": "7162ff9d73e524fef983", "term": "French Defence", "description": "Chess opening", "question": "Would most grand masters know what the French Defense is?", "answer": true, "facts": [ "Grand master is the highest title a chess player can get.", "The French Defense is a well known chess opening that is in many books." ], "decomposition": [ "What is the French Defence?", "What is a Grandmaster?", "Would #2 likely know what #1 is?" ], "evidence": [ [ [ [ "French Defence-1" ] ], [ [ "Grandmaster (chess)-1" ] ], [ "operation" ] ], [ [ [ "French Defence-46" ] ], [ [ "Grandmaster (chess)-1" ] ], [ "operation" ] ], [ [ [ "French Defence-2" ] ], [ [ "Grandmaster (chess)-1" ] ], [ "operation" ] ] ] }, { "qid": "67079beff747978ed4c2", "term": "MF Doom", "description": "US-based English rapper and producer", "question": "Is MF Doom a Fantastic Four villain?", "answer": false, "facts": [ "MF Doom is a British rapper raised on Long Island.", "Doctor Victor Von Doom is a fictional supervillain that made his debut in the Fantastic Four.", "The MF in MF Doom stands for Metal Face." ], "decomposition": [ "Which villains are featured in Fantastic Four?", "Is MF doom one of #1?" ], "evidence": [ [ [ [ "MF Doom-1" ] ], [ "operation" ] ], [ [ [ "Fantastic Four-54" ] ], [ "operation" ] ], [ [ [ "Fantastic Four-54" ] ], [ "operation" ] ] ] }, { "qid": "df1ec5fee0dec5410ab0", "term": "Snow White", "description": "fairy tale", "question": "Is Snow White an example of good consent?", "answer": false, "facts": [ "Snow White is unknowingly poisoned by a witch.", "Snow White is unconscious when a man kisses her without her knowledge.", "Consent involves knowingly allowing something to happen to oneself. " ], "decomposition": [ "What conditions can prevent someone from giving consent?", "Was Snow White free of any of #1?" ], "evidence": [ [ [ [ "Sexual consent-25" ] ], [ [ "Snow White and the Seven Dwarfs (1937 film)-9" ], "operation" ] ], [ [ [ "Consent-18" ] ], [ [ "Snow White and the Seven Dwarfs (1937 film)-9" ], "operation" ] ], [ [ [ "Consent-3" ] ], [ "no_evidence" ] ] ] }, { "qid": "2780b837af5373ff2cb4", "term": "The Matrix", "description": "1999 science fiction action film directed by the Wachowskis", "question": "Will Gremlins sequels tie number of Matrix sequels?", "answer": true, "facts": [ "The Matrix films had two sequels.", "Gremlins has one sequel, Gremlins 2: The New Batch.", "The script for a Gremlins 3 is being written by Carl Ellsworth." ], "decomposition": [ "How many sequels did The Matrix have?", "How many sequels did Gremlins have?", "How many Gremlins movies are currently being worked on?", "What is the sum of #2 and #3?", "Is #4 equal to #1?" ], "evidence": [ [ [ [ "The Matrix (franchise)-1" ] ], [ [ "Gremlins-48" ] ], [ [ "Gremlins-51" ] ], [ [ "The Matrix (franchise)-1" ] ], [ [ "The Matrix (franchise)-1" ] ] ], [ [ [ "The Matrix (franchise)-1" ] ], [ [ "Gremlins-2" ] ], [ [ "Gremlins: Secrets of the Mogwai-2" ], "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "The Matrix (franchise)-1" ] ], [ [ "Gremlins-2" ] ], [ [ "Gremlins 2: The New Batch-46" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "02fe9cc089adf6f19d22", "term": "Leipzig", "description": "Place in Saxony, Germany", "question": "Is the tree species that the name Leipzig refers to an evergeen tree?", "answer": false, "facts": [ "Leipzig is derived from the Slavic word Lipsk", "Lipsk means \"settlement where the linden trees stand\"", "Linden trees are deciduous trees" ], "decomposition": [ "Which species of tree is mentioned in the meaning of the name (of a city) Leipzig?", "Classifying by seasonal traits, what kind of tree is #1?", "Is being evergreen a characteristic of #2?" ], "evidence": [ [ [ [ "Leipzig-6" ] ], [ [ "Tilia-1", "Tilia-2" ] ], [ "operation" ] ], [ [ [ "Leipzig-6" ] ], [ [ "Tilia-2" ] ], [ [ "Evergreen-6" ] ] ], [ [ [ "Leipzig-6" ] ], [ [ "Tilia-1" ] ], [ "operation" ] ] ] }, { "qid": "16d7c42444ef13ef4643", "term": "Bandy", "description": "ballgame on ice played using skates and sticks", "question": "Can Kate Gosselin's household fill out a Bandy team?", "answer": false, "facts": [ "The game of Bandy includes 11 players on each team.", "Reality star Kate Gosselin has eight children." ], "decomposition": [ "How many players make up a Bandy team?", "How many people are in Kate Gosselin's household?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Bandy-2" ] ], [ [ "Kate Gosselin-12", "Kate Gosselin-4" ] ], [ "operation" ] ], [ [ [ "Bandy-2" ] ], [ [ "Kate Plus 8-1" ] ], [ "operation" ] ], [ [ [ "Bandy-2" ] ], [ [ "Kate Gosselin-1" ] ], [ "operation" ] ] ] }, { "qid": "e87b63e92165b417d37f", "term": "People for the Ethical Treatment of Animals", "description": "American animal rights organization", "question": "Is Michael Vick on People for the Ethical Treatment of Animals's hypothetical blacklist?", "answer": true, "facts": [ "People for the Ethical Treatment of Animals (PETA) is an animal rights organization that fights for the welfare of animals.", "A blacklist is a list of people that are unacceptable and should be avoided or excluded.", "Michael Vick is an ex-football player that spent 21 months in prison for his vicious dog fighting enterprise." ], "decomposition": [ "What do the People for the Ethical Treatment of Animals advocate for?", "What crime has Michael Vick done time for?", "Does #2 strongly violate #1?" ], "evidence": [ [ [ [ "People for the Ethical Treatment of Animals-1" ] ], [ [ "Michael Vick-2" ] ], [ [ "Dog fighting in the United States-1" ] ] ], [ [ [ "People for the Ethical Treatment of Animals-11" ] ], [ [ "Michael Vick-2" ] ], [ "operation" ] ], [ [ [ "People for the Ethical Treatment of Animals-1" ] ], [ [ "Michael Vick-2" ] ], [ [ "Dog fighting-2" ], "operation" ] ] ] }, { "qid": "67f4435b81df96894ef8", "term": "Middle Ages", "description": "Period of European history from the 5th to the 15th century", "question": "Was dynamite used during Middle Ages warfare?", "answer": false, "facts": [ "The Middle Ages ended with the Fall of Constantinople in 1453.", "Dynamite was invented by Swedish chemist Alfred Nobel in the 1870s." ], "decomposition": [ "When was dynamite invented?", "When did the Middle Ages warfare take place?", "Is #1 within or before #2?" ], "evidence": [ [ [ [ "Dynamite-3" ] ], [ [ "Middle Ages-7" ] ], [ "operation" ] ], [ [ [ "Dynamite-1" ] ], [ [ "Middle Ages-1" ] ], [ "operation" ] ], [ [ [ "Dynamite-1" ] ], [ [ "Medieval warfare-25" ] ], [ "operation" ] ] ] }, { "qid": "ff2f4c5a037ca826fe2a", "term": "Downton Abbey", "description": "British historical drama television series", "question": "Would Downton Abbey finale viewership defeat every Kazakhstan citizen in tug of war?", "answer": false, "facts": [ "Downton Abbey's finale had a total of 9.6 million viewers.", "Kazakhstan has 18.7 million citizens as of 2020." ], "decomposition": [ "How many people watched Downton Abbey finale?", "How many people are Kazakh citizens?", "Is #1 greater than #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Kazakhstan-2" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Kazakhstan-167" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Demographics of Kazakhstan-2" ] ], [ "operation" ] ] ] }, { "qid": "d57b4438cf31f3ca05fe", "term": "Roman numerals", "description": "Numbers in the Roman numeral system", "question": "Is MIX a word and a roman numeral?", "answer": true, "facts": [ "\"Mix\" means to combine in english.", "M equals one thousand in roman numerals", "I equals one in roman numerals ", "I before X in roman numerals equals nine.", "MIX equals one thousand nine in roman numerals. " ], "decomposition": [ "What does Mix mean in english language?", "Is Mix a number in Roman numerals?", "Based on #1 and #2, is mix both a word and a roman numeral?" ], "evidence": [ [ [ [ "Audio mixing (recorded music)-1" ] ], [ [ "Roman numerals-5" ] ], [ [ "Audio mixing (recorded music)-1", "Roman numerals-5" ] ] ], [ [ [ "Mix (magazine)-1", "Mixing (process engineering)-38" ], "no_evidence" ], [ [ "1009-1", "Roman numerals-1" ] ], [ "operation" ] ], [ [ [ "DJ mix-1" ] ], [ [ "Roman numerals-5" ] ], [ "operation" ] ] ] }, { "qid": "6047d085ff9246481d12", "term": "Little Women", "description": "1860s novel by Louisa May Alcott", "question": "Could Little Women have been a book read by veterans of the civil war?", "answer": true, "facts": [ "Little Women was published in 1868.", "The civil war ended in 1865." ], "decomposition": [ "When was the book Little Women written?", "When did the civil war take place?", "Could adults as of #2 still be alive by #1?" ], "evidence": [ [ [ [ "Little Women-1" ] ], [ [ "American Civil War-1" ] ], [ "operation" ] ], [ [ [ "Little Women-1" ] ], [ [ "American Civil War-1" ] ], [ "operation" ] ], [ [ [ "Little Women-1" ] ], [ [ "American Civil War-1" ] ], [ "operation" ] ] ] }, { "qid": "b94f96243e515dba1dac", "term": "Stoning", "description": "execution method", "question": "Will a celibate cleric likely suffer a stoning in Somalia?", "answer": false, "facts": [ "A cleric is the term for a Muslim priest.", "Celibate people remain chaste and do not engage in relations with others.", "Stoning is a penalty in Somalia used to punish adulterers.", "Many Islamic militants have been in control of various parts of Somalia." ], "decomposition": [ "Which crime is punishable by stoning in Somalia?", "What relationship must a person guilty of #1 be in in order to be deemed guilty?", "Would a celibate cleric be involved in #2?" ], "evidence": [ [ [ [ "Sharia-16", "Sharia-4", "Somalia-158" ] ], [ [ "Adultery-1" ] ], [ [ "Celibacy-1" ], "operation" ] ], [ [ [ "Stoning-65" ] ], [ [ "Adultery-1" ] ], [ [ "Celibacy-1" ], "operation" ] ], [ [ [ "Stoning-83" ] ], [ [ "Stoning-83" ] ], [ [ "Stoning-83" ] ] ] ] }, { "qid": "9450ed6bc48b134956c5", "term": "Gray whale", "description": "species of mammal", "question": "Would a Gray Whale fit easily in an above ground pool?", "answer": false, "facts": [ "Gray whales are, on average, 39ft long.", "The average above ground pool is 10-33ft in diameter. " ], "decomposition": [ "What is the average size of an above ground pool?", "What is the size of average gray whale?", "Is #2 smaller than #1?" ], "evidence": [ [ [ [ "Swimming pool-18" ] ], [ [ "Gray whale-7" ] ], [ "operation" ] ], [ [ [ "Swimming pool-1", "Swimming pool-23" ], "no_evidence" ], [ [ "Gray whale-1" ] ], [ "operation" ] ], [ [ [ "Swimming pool-23" ], "no_evidence" ], [ [ "Gray whale-1" ] ], [ "operation" ] ] ] }, { "qid": "eb4f40e76cc557e81cf0", "term": "Lactobacillus", "description": "genus of bacteria", "question": "Do you need lactobacillus to make pickles?", "answer": false, "facts": [ "Lactobacillus is used in the production of fermented-style pickles. ", "\"Quick\" pickles are made with a mixture of brine and vinegar." ], "decomposition": [ "What are some common methods of making pickles?", "Is lactobacillus required for all of #1?" ], "evidence": [ [ [ [ "Pickled cucumber-12", "Pickled cucumber-3", "Pickled cucumber-7" ], "no_evidence" ], [ [ "Lactobacillus-1" ], "operation" ] ], [ [ [ "Pickling-35", "Pickling-36", "Pickling-38" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Pickling-1" ] ], [ [ "Lactobacillus-1" ] ] ] ] }, { "qid": "9642838ebb5c1c382ade", "term": "Holy Saturday", "description": "Saturday before Easter Sunday", "question": "Did Holy Saturday 2019 have special significance to pot smokers?", "answer": true, "facts": [ "Holy Saturday 2019 took place on April 20th.", "April 20th, known as 4/20 day, National Pot Smokers Day, Weed Day or National Weed Day, is a holiday for pot smokers." ], "decomposition": [ "What date was Holy Saturday in 2019?", "What date is an unofficial holiday for pop smokers?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Holy Saturday-3" ], "no_evidence" ], [ [ "420 (cannabis culture)-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "420 (cannabis culture)-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "2019 Australian federal election-38" ], "no_evidence" ], [ [ "420 (cannabis culture)-15" ] ], [ "operation" ] ] ] }, { "qid": "1aa02d78c5add228c611", "term": "Korea under Japanese rule", "description": "Japanese occupation of Korea from 1910–1945", "question": "Did people in Korea under Japanese Rule watch a lot of Iron Chef?", "answer": false, "facts": [ "The first televisions were sold in 1946.", "Iron Chef started airing in 1993." ], "decomposition": [ "During what years was Korea under the rule of the Japanese?", "In what year did Iron Chef first appear on television?", "Did #1 occur after #2?" ], "evidence": [ [ [ [ "Korea under Japanese rule-1" ] ], [ [ "Iron Chef-1" ] ], [ "operation" ] ], [ [ [ "Korea under Japanese rule-1" ] ], [ [ "Iron Chef-1" ] ], [ "operation" ] ], [ [ [ "World War II by country-161" ] ], [ [ "Iron Chef-1" ] ], [ "operation" ] ] ] }, { "qid": "ec71dcb7d6ace3e73ef9", "term": "Soup", "description": "primarily liquid food", "question": "Is shoe soup innocuous?", "answer": true, "facts": [ "Soup is a primarily liquid food containing various meats and beans.", "Director Werner Herzog lost a bet and cooked his shoe into a soup and ate it in 1980.", "Werner Herzog turned 77 in 2019 and had a role in the hit TV series the Mandalorian." ], "decomposition": [ "What film director ate shoe soup in the year 1980?", "Is #1 still alive?" ], "evidence": [ [ [ [ "Werner Herzog Eats His Shoe-1" ] ], [ [ "Werner Herzog-1" ], "operation" ] ], [ [ [ "Werner Herzog-12" ] ], [ [ "Werner Herzog-30" ], "operation" ] ], [ [ [ "Werner Herzog Eats His Shoe-1" ] ], [ [ "Werner Herzog-1" ] ] ] ] }, { "qid": "6497da19a2bb3f0dfcbb", "term": "Swallow", "description": "family of birds", "question": "In a hypothetical race between a Swallow and an American Woodcock, would the Swallow win?", "answer": true, "facts": [ "Swallow can fly about 30-40mph. ", "The American woodcock can fly approximately 5mph. " ], "decomposition": [ "How quickly can a swallow fly?", "How quickly can an American woodcock fly?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Barn swallow-18" ] ], [ [ "American woodcock-14" ] ], [ [ "Barn swallow-18" ] ] ], [ [ "no_evidence" ], [ [ "American woodcock-14" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Swallow-16" ], "no_evidence" ], [ [ "American woodcock-14" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "069c88860654c9c5df0b", "term": "Sea turtle", "description": "superfamily of reptiles", "question": "Can a sea turtle play tennis using a tennis racket?", "answer": false, "facts": [ "to play tennis, a human-like hand is needed to properly hold the tennis racket", "sea turtles have flippers and not human-like hands" ], "decomposition": [ "What body part does one need to hold a tennis racket?", "Do turtles have #1?" ], "evidence": [ [ [ [ "Hand-3", "Hand-6" ] ], [ [ "Turtle-26", "Turtle-27" ], "operation" ] ], [ [ [ "Racket (sports equipment)-17" ] ], [ [ "Turtle-27" ], "operation" ] ], [ [ [ "Racket (sports equipment)-5" ] ], [ [ "Turtle-26", "Turtle-27" ], "operation" ] ] ] }, { "qid": "2da278a3d315ec89e21c", "term": "Marxism", "description": "Economic and sociopolitical worldview based on the works of Karl Marx", "question": "Are right wing Amreicans opposed to marxism?", "answer": true, "facts": [ "Right Wing Americans view socialism as an enemy to civil liberties and the economy.", "Socialism is a tenant of Marxism, giving workers the means of the production." ], "decomposition": [ "What stance do most right-wing Americans take towards socialism?", "Is #1 against that which Marxists proposes on the subject?" ], "evidence": [ [ [ [ "Right-wing politics-7" ] ], [ [ "Marxism-21" ], "operation" ] ], [ [ [ "Conservatism-1", "Conservatism-7" ] ], [ [ "Means of production-5" ], "operation" ] ], [ [ [ "Right-wing politics-7" ] ], [ [ "Timeline of Karl Marx-2" ] ] ] ] }, { "qid": "7b82cd99c7c68aa1b022", "term": "Confederate States Army", "description": "Southern army in American Civil War", "question": "Are there Americans still enlisted in the Confederate States Army?", "answer": false, "facts": [ "The Confederate States Army disbanded in 1865.", "The last living confederate soldier died in 1951." ], "decomposition": [ "What is the present status of the Confederate States Army?", "Considering #1, can there still be anyone enlisted?" ], "evidence": [ [ [ [ "Confederate States of America-8" ] ], [ "operation" ] ], [ [ [ "Confederate States Army-5" ] ], [ "operation" ] ], [ [ [ "Confederate States Army-1", "Confederate States Army-5" ] ], [ "operation" ] ] ] }, { "qid": "e83400194f9cf2a3b1ef", "term": "Stone Cold Steve Austin", "description": "American professional wrestler", "question": "Can Stone Cold Steve Austin apply his finisher to a mule deer?", "answer": true, "facts": [ "Steve Austin's finisher, The Stone Cold Stunner, involves lifting the opponent and then pulling their head down.", "The largest male mule deer's are around 330 pounds.", "Steve Austin has applied his finisher to the wrestler The Big Show.", "The Big Show weighs 383 pounds." ], "decomposition": [ "What activity is involved in Stone Cold Steve Austin's finisher?", "How much does a mule deer weigh?", "What was the weight of Stone Cold Steve Austin's largest opponent that he used #1 on?", "Is #2 less than or equal to #3?" ], "evidence": [ [ [ [ "Stone Cold Steve Austin-70", "Stunner (professional wrestling)-13" ] ], [ [ "Mule deer-6" ] ], [ [ "The Undertaker-72" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Stone Cold Steve Austin-70" ] ], [ [ "Mule deer-6" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Stone Cold Steve Austin-15", "Stunner (professional wrestling)-1" ] ], [ [ "Mule deer-6" ] ], [ [ "Stone Cold Steve Austin-21" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "6265249e94a68463f6fe", "term": "DC Comics", "description": "U.S. comic book publisher", "question": "Did President William Howard Taft read DC Comics?", "answer": false, "facts": [ "DC Comics were founded in 1934.", "President William Howard Taft died on March 8, 1930." ], "decomposition": [ "When was DC Comics founded?", "When did President William Howard Taft die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "DC Comics-4" ] ], [ [ "William Howard Taft-1" ] ], [ "operation" ] ], [ [ [ "DC Comics-4" ] ], [ [ "William Howard Taft-1" ] ], [ "operation" ] ], [ [ [ "DC Comics-4" ] ], [ [ "William Howard Taft-1" ] ], [ "operation" ] ] ] }, { "qid": "2484c935b239f1454f8e", "term": "Reformation", "description": "Schism within the Christian Church in the 16th century", "question": "Did Barack Obama participate in the Reformation?", "answer": false, "facts": [ "The Reformation took place in the 16th century. ", "Barack Obama was born in 1961." ], "decomposition": [ "When did the Reformation take place?", "When was Barack Obama born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Christianity in the 16th century-39" ] ], [ [ "Barack Obama-1" ] ], [ "operation" ] ], [ [ [ "Reformation-19" ] ], [ [ "Barack Obama-6" ] ], [ "operation" ] ], [ [ [ "Reformation-1" ] ], [ [ "Barack Obama-1" ] ], [ "operation" ] ] ] }, { "qid": "69ccc55206ac47a0d312", "term": "Seinfeld", "description": "American sitcom", "question": "Could you watch a new Seinfeld episode every day for a year?", "answer": false, "facts": [ "There are 365 days in a year.", "There are a total of 180 Seinfeld episodes." ], "decomposition": [ "How many days are there in a year?", "How many Seinfeld episodes are there?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Seinfeld-31" ] ], [ [ "Year-3" ] ], [ "operation" ] ], [ [ [ "Seinfeld-1" ] ], [ [ "Year-4" ] ], [ "operation" ] ], [ [ [ "Year-3" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "307c7850922085972bd1", "term": "Al Pacino", "description": "American actor", "question": "Did Al Pacino act in a movie during World War II?", "answer": false, "facts": [ "Al Pacino was born in 1940.", "World War II took place from 1939-1945.", "Al Pacino's first movie role was in 1969." ], "decomposition": [ "When did World War II end?", "When did Al Pacino first have a movie role?", "Is #2 before #1?" ], "evidence": [ [ [ [ "World War II-1" ] ], [ [ "Al Pacino-2" ] ], [ "operation" ] ], [ [ [ "World War II-1" ] ], [ [ "Al Pacino-2" ] ], [ "operation" ] ], [ [ [ "The Second World War (disambiguation)-1" ] ], [ [ "Al Pacino-2" ] ], [ "operation" ] ] ] }, { "qid": "1a015715a9ffcfb67c8e", "term": "Honey badger", "description": "species of mammal", "question": "Are honey badgers and hyenas anatomically dissimilar? ", "answer": false, "facts": [ "Honey Badgers and Hyenas both have anal sacs.", "The anal sacs of Honey Badgers and Hyenas are both able to turn inside out." ], "decomposition": [ "What are the features of the anal sacs of the Honey Badgers?", "What are the features of the Hyenas' anal sacs?", "Is #1 anatomically dissimilar from #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Honey badger-13" ] ], [ [ "Hyena-18" ] ], [ "operation" ] ], [ [ [ "Honey badger-13" ] ], [ [ "Honey badger-13" ] ], [ "operation" ] ] ] }, { "qid": "d1c8f0835896d18c99c2", "term": "Martin Luther", "description": "Saxon priest, monk and theologian, seminal figure in Protestant Reformation", "question": "If Martin Luther did one theses a day would he run out in half a year?", "answer": true, "facts": [ "Martin Luther published a list of 95 theses as his critique of the church.", "There are approximately 182 days in 6 months." ], "decomposition": [ "How many theses did Martin Luther publish in a list?", "How many days are in a year?", "What is #2 divided by 2?", "Is #1 less than #3?" ], "evidence": [ [ [ [ "Martin Luther-1" ] ], [ [ "Year-4" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Ninety-five Theses-1" ] ], [ [ "Calendar year-2" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Martin Luther-1" ] ], [ [ "Year-3" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "3d726babfd864d2fd6a3", "term": "Antarctic Peninsula", "description": "peninsula", "question": "Would a 75 degree Fahrenheit day be unusual on the Antarctic Peninsula? ", "answer": true, "facts": [ "The warmest days on record in the Antarctic Peninsula are in the high 50's. ", "On average, the Antarctic Peninsula is between -4 and 36 degrees Fahrenheit. " ], "decomposition": [ "What is the average temperature in the Antarctic Peninsula?", "Does 75 degrees Fahrenheit fall outside the range of #1?" ], "evidence": [ [ [ [ "Antarctic Peninsula-21" ] ], [ "operation" ] ], [ [ [ "Antarctic Peninsula-21" ] ], [ "operation" ] ], [ [ [ "Antarctic Peninsula-21" ] ], [ "operation" ] ] ] }, { "qid": "fa6c3c6b8471d6489f43", "term": "Napoleonic Wars", "description": "Series of early 19th century European wars", "question": "Did earth complete at least one orbit around the sun during the Napoleonic Wars?", "answer": true, "facts": [ "Earth orbits around the Sun in 365 days.", "Napoleonic Wars lasted 12 years, 5 months and 4 weeks." ], "decomposition": [ "How long is the orbit of the earth around the sun?", "How long were the Napoleonic Wars?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Year-1" ] ], [ [ "Napoleonic Wars-1" ] ], [ "operation" ] ], [ [ [ "Earth's orbit-1" ] ], [ [ "Napoleonic Wars-1" ] ], [ "operation" ] ], [ [ [ "Earth's orbit-1" ] ], [ [ "Napoleonic Wars-1" ] ], [ "operation" ] ] ] }, { "qid": "a3f23831cabcf29b0cc9", "term": "Gujarati script", "description": "Indian script", "question": "Is the Gujarati script the same category of script as Kanji?", "answer": false, "facts": [ "Gujarati script is an abugida script", "Kanji is an adopted logographic script " ], "decomposition": [ "What type of script is the Gujarati script?", "What type of script is Kanji?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Gujarati script-1" ] ], [ [ "Kanji-1" ] ], [ "operation" ] ], [ [ [ "Gujarati script-1" ] ], [ [ "Kanji-1" ] ], [ "operation" ] ], [ [ [ "Gujarati script-1" ] ], [ [ "Kanji-1" ] ], [ "operation" ] ] ] }, { "qid": "9d277acbb432c3ddab16", "term": "Morris County, New Jersey", "description": "County in New Jersey", "question": "Was Morris County named after a chief justice?", "answer": true, "facts": [ "The Morris County was named after Colonel Lewis Morris.", "Colonel Lewis Morris was the chief justice of New York." ], "decomposition": [ "Who was Morris County, New Jersey named after?", "Did #1 serve as a chief justice?" ], "evidence": [ [ [ [ "Morris County, New Jersey-4" ] ], [ [ "Lewis Morris (governor)-1" ] ] ], [ [ [ "Morris County, New Jersey-4" ] ], [ [ "Lewis Morris (governor)-1" ] ] ], [ [ [ "Morris County, New Jersey-4" ] ], [ [ "Lewis Morris (governor)-1" ] ] ] ] }, { "qid": "70e7c1ad517f7f78d21d", "term": "Very Large Telescope", "description": "telescope in the Atacama Desert, Chile", "question": "Can the Very Large Telescope observe the largest mountain on Earth?", "answer": false, "facts": [ "The Very Large Telescope observes outer space.", "The largest mountain on earth is underneath the ocean." ], "decomposition": [ "What area does the Very Large Telescope observe?", "Is the answer to #1 the same as earth?" ], "evidence": [ [ [ [ "Very Large Telescope-3" ] ], [ "operation" ] ], [ [ [ "Very Large Telescope-3" ] ], [ "operation" ] ], [ [ [ "Very Large Telescope-1" ] ], [ "operation" ] ] ] }, { "qid": "c096042cd5db3c7ed663", "term": "Dodo", "description": "Extinct large flightless pigeon from Mauritius", "question": "Would a Dodo hypothetically tower over Ma Petite?", "answer": true, "facts": [ "A Dodo was an extinct bird that was over 3 feet tall.", "Ma Petite was a character on American Horror Story played by Jyoti Amge.", "Jyoti Amge is around 2 feet tall." ], "decomposition": [ "How tall were dodos?", "Who played the role of Ma Petite?", "How tall is #2?", "Is #1 greater than #3?" ], "evidence": [ [ [ [ "Dodo-2" ] ], [ [ "Jyoti Amge-1" ] ], [ [ "Jyoti Amge-2" ] ], [ "operation" ] ], [ [ [ "Dodo-2" ] ], [ [ "Jyoti Amge-3" ] ], [ [ "Jyoti Amge-2" ] ], [ "operation" ] ], [ [ [ "Dodo-2" ] ], [ [ "Jyoti Amge-3" ] ], [ [ "Jyoti Amge-2" ] ], [ "operation" ] ] ] }, { "qid": "0c79bdba76e80f70f6f4", "term": "World of Warcraft", "description": "video game by Blizzard Entertainment", "question": "Can you find Depala's race in World of Warcraft?", "answer": true, "facts": [ "World of Warcraft has several races including humans, night elves, and dwarves.", "Depala is a character in the Magic the Gathering card game that is a dwarf." ], "decomposition": [ "What are the different races found in World of Warcraft?", "What race is Depala?", "Is #2 listed in #1?" ], "evidence": [ [ [ [ "World of Warcraft-8" ] ], [ [ "Magic: The Gathering-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Gameplay of World of Warcraft-5" ], "no_evidence" ], [ [ "Depala Vas-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Gameplay of World of Warcraft-5" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e9d41346aa0123ca54a7", "term": "Klingon", "description": "Fictional species in Star Trek", "question": "Did Klingons appear in the movie The Last Jedi?", "answer": false, "facts": [ "Klingons are a race in the fictional universe of Star Trek.", "The Last Jedi is a movie set in the fictional universe of Star Wars." ], "decomposition": [ "Which fictional universe do the Klingons exist in?", "Which fictional universe is The Last Jedi movie set in?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Klingon-2" ] ], [ [ "Star Wars: The Last Jedi-1" ] ], [ "operation" ] ], [ [ [ "Klingon-1" ] ], [ [ "Star Wars: The Last Jedi-1" ] ], [ "operation" ] ], [ [ [ "Klingon-1" ] ], [ [ "Star Wars: The Last Jedi-1" ] ], [ "operation" ] ] ] }, { "qid": "34827a7fd17411042f2c", "term": "Sandal", "description": "Type of footwear with an open upper", "question": "If one of your feet is in a leg cast, should the other be in a sandal?", "answer": false, "facts": [ "If you are using crutches, it is advised to have non-slip shoes with a closed toe.", "Most sandals do not have non-slip traction and, by definition, don't have closed toes. ", "If you are in a leg cast you are likely to be using crutches." ], "decomposition": [ "If you are in a leg cast, what are you likely using to help yourself walk?", "What kind of shoes do doctors recommend for you to wear if you are using #1?", "Do sandals fit the requirements of #2?" ], "evidence": [ [ [ [ "Crutch-1" ], "no_evidence" ], [ [ "Orthotics-1" ], "no_evidence" ], [ [ "Sandal-1" ], "operation" ] ], [ [ [ "Crutch-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Crutch-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "ff93998995b7b2a8f125", "term": "Apollo 15", "description": "Fourth crewed mission to land on the Moon", "question": "Would a triples tandem bike support Apollo 15 crew?", "answer": true, "facts": [ "A tandem bike has seats for more than one rider.", "A triples tandem bike has three seats and can support three riders.", "The crew of Apollo 15 consisted of three people." ], "decomposition": [ "How many people were on the Apollo 15 mission?", "How many people can ride a triple tandem bike?", "Is #2 at least #1?" ], "evidence": [ [ [ [ "Alfred Worden-1", "Apollo 15-2" ] ], [ [ "Tandem bicycle-11" ] ], [ "operation" ] ], [ [ [ "Apollo 15-8" ] ], [ [ "Tandem bicycle-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Apollo 15-6" ] ], [ [ "Tandem bicycle-11" ] ], [ "operation" ] ] ] }, { "qid": "a071a8b56496258746d6", "term": "Spider wasp", "description": "family of insects", "question": "Would a spider wasp be more effective than a bullet ant to stop a criminal?", "answer": false, "facts": [ "Tasers are used by police to jolt criminals and temporarily paralyze them.", "Spider wasps sting their prey and cause intense pain.", "The Schmidt sting pain index rates the sting of spider wasps as a 4.", "The sting of a bullet ant earns the highest rank on the Schmidt sting pain index with a rating of 4+." ], "decomposition": [ "What is the Schmidt sting pain index of the spider wasp's sting?", "What is the Schmidt sting pain index of the bullet ant's sting?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Spider wasp-18" ] ], [ [ "Spider wasp-18" ] ], [ "operation" ] ], [ [ [ "Spider wasp-18" ] ], [ [ "Ant-31" ] ], [ "operation" ] ], [ [ [ "Spider wasp-18" ] ], [ [ "Schmidt sting pain index-14" ] ], [ "operation" ] ] ] }, { "qid": "3d63f800400fd9505ea5", "term": "Library of Alexandria", "description": "one of the largest libraries in the ancient world, located in Alexandria, Egypt", "question": "Would Library of Alexandria need less shelf space than Library of Congress?", "answer": true, "facts": [ "The Library of Alexandria was an ancient library that was destroyed.", "The Library of Alexandria is estimated to have had around 100,000 books.", "The Library of Congress has over 170 million items." ], "decomposition": [ "What is the number of books (and other materials) housed in the Library of Congress?", "What was the number of books (and other materials) housed in the Library of Alexandria?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Library of Congress-33" ] ], [ [ "Library of Alexandria-45" ] ], [ [ "Library of Alexandria-45", "Library of Congress-33" ] ] ], [ [ [ "Library of Congress-33" ] ], [ [ "Library of Alexandria-1" ] ], [ "operation" ] ], [ [ [ "Library of Congress-33" ] ], [ [ "Library of Alexandria-45" ] ], [ "operation" ] ] ] }, { "qid": "64b4d43fa8671c798921", "term": "Cucumber", "description": "species of plant", "question": "Are all cucumbers the same texture?", "answer": false, "facts": [ "Kirby cucumbers are known for being covered in bumps.", "English cucumbers are usually covered in ridges." ], "decomposition": [ "What texture do kirby cucumbers have?", "What texture do English cucumbers have?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Cucumber-11" ] ], [ [ "Cucumber-14", "European cucumber-1", "European cucumber-2" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "European cucumber-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Cucumber-9" ], "no_evidence" ], [ [ "Cucumber-14" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2206eb47fc70f106a91d", "term": "Drain fly", "description": "family of insects", "question": "Do calico cat patterns cover every drain fly color variety?", "answer": false, "facts": [ "Drain flies come in two color varieties, black and grey.", "Calico cats have a traditional color pattern that includes white, orange, and black." ], "decomposition": [ "What colors do drain flies come in?", "What colors are seen on calico cats?", "Is every element of #1 also in #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Calico cat-1" ] ], [ "operation" ] ], [ [ [ "Drain fly-1" ], "no_evidence" ], [ [ "Calico cat-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Drain fly-1" ], "no_evidence" ], [ [ "Calico cat-1" ] ], [ "operation" ] ] ] }, { "qid": "cc6e607ec6b68023be26", "term": "Auburn, New York", "description": "City in New York, United States", "question": "Can you fit every resident of Auburn, New York, in Tropicana Field?", "answer": true, "facts": [ "The capacity of Tropicana Field is 36,973", "The population of Auburn, NY is 27,687" ], "decomposition": [ "What is the capacity of Tropicana Field?", "What is the population of Auburn, NY?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Tropicana Field-31" ] ], [ [ "Auburn, New York-1" ] ], [ [ "Tropicana Field-31" ], "operation" ] ], [ [ [ "Tropicana Field-31" ] ], [ [ "Auburn, New York-1" ] ], [ "operation" ] ], [ [ [ "Tropicana Field-31" ] ], [ [ "Auburn, New York-1" ] ], [ "operation" ] ] ] }, { "qid": "9cd06c9dc94ad737d0a6", "term": "Beaver", "description": "Genus of mammals", "question": "Would a Beaver's teeth rival that of a Smilodon?", "answer": false, "facts": [ "A beaver has teeth measuring 25 mm on average.", "The Smilodon was a prehistoric saber-toothed tiger.", "The Smilodon's teeth were around 11 inches long." ], "decomposition": [ "How long is a beaver's tooth?", "How long was a Smilodon's tooth?", "Is #1 larger than #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Beaver-1", "Beaver-7" ], "no_evidence" ], [ [ "Smilodon-10" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Beaver-6" ], "no_evidence" ], [ [ "Smilodon-10" ] ], [ "operation" ] ] ] }, { "qid": "b09deccfa0bca4973aff", "term": "Albanian Declaration of Independence", "description": "declaration of independence", "question": "Can an Arvanite Greek understand some of the Albanian Declaration of Independence?", "answer": true, "facts": [ "The Albanian Declaration of Independence is written in Albanian, Gheg, Tosk, and Ottoman Turkish.", "The Arvanite Greek's are a major Tosk speaking group of southern Albania." ], "decomposition": [ "What languages is the Albanian Declaration of Independence written in?", "What language do Arvanite Greeks speak?", "Is #2 in #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Arvanites-1" ] ], [ "operation" ] ], [ [ [ "Albanian Declaration of Independence-3" ] ], [ [ "Arvanites-1" ] ], [ "operation" ] ], [ [ [ "Albanian Declaration of Independence-1", "Albanian Declaration of Independence-7", "Albanian language-1" ], "no_evidence" ], [ [ "Arvanites-1" ] ], [ "operation" ] ] ] }, { "qid": "748150aac686d4aac256", "term": "Panthéon", "description": "mausoleum in Paris", "question": "Does Pantheon in Paris have a unique name?", "answer": false, "facts": [ "The Pantheon in Paris is a historical monument.", "The Pantheon was a former Roman temple in antiquity.", "The Pantheon is a mythical or imaginary creature used in heraldry, particularly in Britain often depicted as white deer with the tail of a fox with purple stars along their back." ], "decomposition": [ "What is referred to as the Pantheon in Paris?", "What other concepts are named Pantheon?", "Is #1 differently-named from #2?" ], "evidence": [ [ [ [ "Panthéon-1" ] ], [ [ "Pantheon, Rome-1" ] ], [ "operation" ] ], [ [ [ "Panthéon-1" ] ], [ [ "Pantheon (mythical creature)-1", "Pantheon (religion)-1", "Pantheon (software)-1" ] ], [ "operation" ] ], [ [ [ "Panthéon-1" ] ], [ [ "Pantheon (religion)-1", "Pantheon, Rome-1" ] ], [ "operation" ] ] ] }, { "qid": "21305b3d437a146b0b2d", "term": "1960", "description": "Year", "question": "Were there footprints on the moon in 1960?", "answer": false, "facts": [ "The first man to walk on the moon was aboard Apollo 11.", "Apollo 11 took off in 1969." ], "decomposition": [ "When did humans first land on the moon?", "Is #1 before or in 1960?" ], "evidence": [ [ [ [ "Moon landing-2" ] ], [ "operation" ] ], [ [ [ "Moon landing-2" ] ], [ "operation" ] ], [ [ [ "Apollo 11-1" ] ], [ "operation" ] ] ] }, { "qid": "73ca2ef1da65b2a2ebe6", "term": "Darth Vader", "description": "fictional character in the Star Wars franchise", "question": "Was Darth Vader monogamous?", "answer": true, "facts": [ "Monogamy refers to when a person is married or committed to only one person.", "Darth Vader was only married to Padme Amidala and had two children with her." ], "decomposition": [ "If someone is monogamous, how many people are they committed to?", "How many people was Darth Vader committed to?", "Does #2 equal #1?" ], "evidence": [ [ [ [ "Monogamy-1" ] ], [ [ "Padmé Amidala-1" ] ], [ "operation" ] ], [ [ [ "Monogamy-1" ] ], [ [ "Padmé Amidala-1" ] ], [ "operation" ] ], [ [ [ "Monogamy-1" ] ], [ [ "Darth Vader-2" ] ], [ [ "Darth Vader-2" ] ] ] ] }, { "qid": "088215475460521a5688", "term": "Methane", "description": "Simplest organic molecule with one carbon atom and four hydrogen", "question": "Is cow methane safer for environment than cars?", "answer": false, "facts": [ "Methane is a gas that pollutes the environment and leads to shifts in temperature.", "Cars produce 2.7 tons of methane per year.", "Cows produce 4 tons of methane gas per year." ], "decomposition": [ "How much methane is produced by cars annually?", "How much methane is produced by cows annually?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Methane-2" ], "no_evidence" ], [ [ "Cattle-88" ] ], [ "operation" ] ], [ [ [ "Natural gas vehicle-2" ], "no_evidence" ], [ [ "Methane emissions-2" ] ], [ "operation" ] ], [ [ [ "Methane-21" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "91917c5e30dbf8552b34", "term": "Paella", "description": "Valencian rice dish", "question": "Would a vegan eat a traditional Paella dish?", "answer": false, "facts": [ "Vegans do not consume animals or products derived from animals.", "The traditional Paella recipe includes rabbit and chicken as two of the ingredients." ], "decomposition": [ "What kind of products are forbidden in a vegan diet?", "What are the main ingredients of Paella?", "Are all of #2 totally excluded from #1?" ], "evidence": [ [ [ [ "Veganism-1" ] ], [ [ "Paella-5" ] ], [ "operation" ] ], [ [ [ "Veganism-1" ] ], [ [ "Paella-5" ] ], [ "operation" ] ], [ [ [ "Vegan Outreach-3" ] ], [ [ "Paella-19" ] ], [ "operation" ] ] ] }, { "qid": "5633fa480c01d39119ee", "term": "B", "description": "letter in the Latin alphabet", "question": "Is B's place in alphabet same as Prince Harry's birth order?", "answer": true, "facts": [ "B is the second letter of the alphabet.", "Prince Harry was the second son of Charles, Prince of Wales and Diana, Princess of Wales." ], "decomposition": [ "What position in the alphabet does \"B\" hold?", "What is the nominal number associated with #1?", "Does Prince Harry have exactly #2 minus 1 older siblings?" ], "evidence": [ [ [ [ "B-1" ] ], [ [ "Nominal number-1", "Nominal number-2" ], "operation" ], [ [ "Prince Harry, Duke of Sussex-1", "Prince Harry, Duke of Sussex-2" ], "operation" ] ], [ [ [ "B-1" ] ], [ [ "Ordinal numeral-9" ] ], [ [ "Prince Harry, Duke of Sussex-4" ] ] ], [ [ [ "B-1" ] ], [ "operation" ], [ [ "Prince Harry, Duke of Sussex-4" ], "operation" ] ] ] }, { "qid": "465c5d8486aa87851072", "term": "Mark Twain", "description": "American author and humorist", "question": "Was Mark Twain a struggling inventor?", "answer": false, "facts": [ "Twain patented three inventions.", "Twain created and sold over 25,000 self-pasting scrapbook creations.", "Twain invented an improvement in adjustable and detachable straps for garments to replace suspenders." ], "decomposition": [ "What did Mark Twain invent?", "How much of #1 did Mark Twain sell?", "Were the sales of #2 not enough to turn a profit?" ], "evidence": [ [ [ [ "Mark Twain-23" ] ], [ [ "Mark Twain-23" ] ], [ [ "Mark Twain-23" ] ] ], [ [ [ "Mark Twain-23" ] ], [ [ "Mark Twain-23" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Mark Twain-23" ] ], [ [ "Mark Twain-23" ] ], [ "operation" ] ] ] }, { "qid": "a649eb84fd37b11f9a01", "term": "Al Capone", "description": "American gangster and businessman", "question": "Did Al Capone carry a smartphone?", "answer": false, "facts": [ "Al Capone died in 1947.", "Smartphones were invented in 1992." ], "decomposition": [ "In what year did Al Capone die?", "What year was the first smartphone invented?", "Is #1 after #2?" ], "evidence": [ [ [ [ "Al Capone-1", "Al Capone-34" ] ], [ [ "Smartphone-16" ] ], [ "operation" ] ], [ [ [ "Al Capone-1" ] ], [ [ "Smartphone-5" ] ], [ "operation" ] ], [ [ [ "Al Capone-34" ] ], [ [ "Smartphone-6" ] ], [ "operation" ] ] ] }, { "qid": "3c106ba490a3706f3931", "term": "Water skiing", "description": "surface water sport", "question": "Is Morocco an ideal location for water skiing?", "answer": false, "facts": [ "Water skiing is a sport that involves gliding over the surface of large bodies of water.", "Morocco is one of the leading countries plagued by drought." ], "decomposition": [ "What are the minimum requirements to engage in water skiing?", "Does Morocco have #1?" ], "evidence": [ [ [ [ "Water skiing-1" ] ], [ [ "Morocco-1", "Morocco-39" ], "no_evidence", "operation" ] ], [ [ [ "Water skiing-1" ] ], [ [ "Morocco-41" ], "operation" ] ], [ [ [ "Water skiing-5" ] ], [ [ "Morocco-51" ] ] ] ] }, { "qid": "71711173efbb350885b3", "term": "New Brunswick", "description": "province in Canada", "question": "Can Burundi's communicate with citizens of New Brunswick?", "answer": true, "facts": [ "French and English are the official languages of New Brunswick.", "French is one of the official languages of Burundi." ], "decomposition": [ "What are the official languages of New Brunswick, Canada?", "What are the official languages of Burundi?", "Are some elements of #2 also in #1?" ], "evidence": [ [ [ [ "New Brunswick-36" ] ], [ [ "Burundi-87" ] ], [ "operation" ] ], [ [ [ "Official language-14" ] ], [ [ "Burundi-6" ] ], [ "operation" ] ], [ [ [ "Languages of Canada-58" ], "operation" ], [ [ "Languages of Burundi-1" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "9e78c01675a198781cca", "term": "Michael Bloomberg", "description": "American billionaire businessman and politician, former mayor of New York City", "question": "Can Michael Bloomberg fund the debt of Micronesia for a decade?", "answer": true, "facts": [ "Michael Bloomberg is worth an estimated 60 billion dollars as of 2020.", "Micronesia has annual expenses of nearly 200 million dollars." ], "decomposition": [ "What is Micheal Bloomberg's worth?", "What is the annual expense for Micronesia?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Michael Bloomberg-2" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Michael Bloomberg-2" ] ], [ [ "Micronesia-31" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Michael Bloomberg-2" ] ], [ [ "Economy of the Federated States of Micronesia-7" ] ], [ "operation" ] ] ] }, { "qid": "c69b07a0cff831ce6791", "term": "Reiki", "description": "Pseudoscientific healing technique", "question": "Would a physician be unlikely to recommend Reiki?", "answer": true, "facts": [ "Physicians typically only recommend treatments that have a significant amount of clinical evidence behind them.", "Reiki has no clinical evidence of effectiveness, and there is no scientific basis for its utility." ], "decomposition": [ "What is the basis for physician's recommendations?", "Does Reiki have #1?" ], "evidence": [ [ [ [ "Evidence-based medicine-1" ] ], [ [ "Reiki-1", "Reiki-2" ], "operation" ] ], [ [ [ "Physician-1" ] ], [ [ "Reiki-2" ], "no_evidence", "operation" ] ], [ [ [ "Physician-1" ] ], [ "operation" ] ] ] }, { "qid": "2251a4d6e090572a63d7", "term": "Bengal fox", "description": "species of mammal", "question": "Could Ryan Crouser throw a bengal fox with ease?", "answer": true, "facts": [ "Ryan Crouser is a professional shot putter who won the gold medal at the 2016 Olympics.", "The men's shot weighs 16.01 pounds.", "The typical weight of a Bengal fox is between 5 to 9 pounds." ], "decomposition": [ "What sport is Ryan Crouser a professional in?", "How much does the equipment for #1 weigh?", "How much does a Bengal fox weigh?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Ryan Crouser-1" ] ], [ [ "Shot put-11" ] ], [ [ "Bengal fox-2" ] ], [ "operation" ] ], [ [ [ "Ryan Crouser-1" ] ], [ [ "Shot put-11" ] ], [ [ "Bengal fox-2" ] ], [ "operation" ] ], [ [ [ "Ryan Crouser-1" ] ], [ [ "Shot put-11" ], "no_evidence" ], [ [ "Bengal fox-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c2c6e32ccdd81e5df7f6", "term": "Paralympic Games", "description": "Major international sport event for people with disabilities", "question": "Would Jimmy Vee be eligible to compete in the Paralympic Games?", "answer": true, "facts": [ "Jimmy Vee is a dwarf.", "Dwarfism is defined as someone who is medically short-statured.", "Short stature due to a bone deficiency is one of the categories for paralympic athletes." ], "decomposition": [ "What disability does Jimmy Vee suffer from?", "What is the medical definition of #1?", "Is #2 one of the categories for the paralympic athletes?" ], "evidence": [ [ [ [ "Jimmy Vee-5" ] ], [ [ "Dwarfism-1" ] ], [ [ "Paralympic Games-1" ] ] ], [ [ [ "Jimmy Vee-5" ] ], [ [ "Dwarfism-2" ] ], [ [ "Paralympic Games-42" ], "operation" ] ], [ [ [ "Jimmy Vee-5" ] ], [ [ "Dwarfism-1" ] ], [ [ "Paralympic Games-4" ] ] ] ] }, { "qid": "40eb773857f1933b4b36", "term": "Society", "description": "Social group involved in persistent social interaction", "question": "Can a jet plane be made without society?", "answer": false, "facts": [ "A jet plane requires many materials to build.", "A jet plane requires much prior knowledge to build.", "The specialized knowledge and materials is not obtainable without other people." ], "decomposition": [ "What materials do jet planes require to be built?", "Is #1 obtainable without people?" ], "evidence": [ [ [ [ "Jet aircraft-8" ] ], [ [ "Jet engine-5" ], "operation" ] ], [ [ [ "Aircraft-43" ] ], [ [ "Aircraft-41" ], "no_evidence" ] ], [ [ [ "Components of jet engines-3", "Jet engine-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "826cdebd34c07f92fca5", "term": "Tsar", "description": "title given to a male monarch in Russia, Bulgaria and Serbia", "question": "Would a duke hypothetically be subservient to a Tsar?", "answer": true, "facts": [ "The Tsar was the highest ruler in several eastern countries.", "A duke was a title given to important european nobles.", "Dukes ranked below princes, kings, and queens.", "Tsars were the equivalents of English Kings." ], "decomposition": [ "What is the equivalent of a Tsar in English hierarchy/royalty?", "Do dukes rank below #1?" ], "evidence": [ [ [ [ "Tsar-1" ] ], [ [ "Duke-1" ] ] ], [ [ [ "Tsar-1" ] ], [ [ "Duke-1" ], "operation" ] ], [ [ [ "Tsar-6" ] ], [ [ "Duke-1" ], "operation" ] ] ] }, { "qid": "e3481f169664aa561368", "term": "Louvre", "description": "Art museum and Historic site in Paris, France", "question": "Is the Louvre in billionaire George Soros's price range?", "answer": false, "facts": [ "The Louvre including all of its paintings has a value of around 45 billion.", "George Soros has a net worth around 8 billion as of 2020." ], "decomposition": [ "What is the estimated value of the Louvre?", "What is George Soros' estimated 2020 net worth?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Louvre-1" ], "no_evidence" ], [ [ "George Soros-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Louvre-16" ], "no_evidence" ], [ [ "George Soros-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "George Soros-113" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "bda0fd7cd1f5a90b72ec", "term": "Chinese calendar", "description": "Lunisolar calendar from China", "question": "Are quadrupeds represented on Chinese calendar?", "answer": true, "facts": [ "Quadrupeds are animals that walk on four legs.", "The Chinese calendar has a number of symbols including monkeys, goats, and tigers.", "Tigers have four paws and balance themselves by walking on their toes." ], "decomposition": [ "What are the symbols of the Chinese calendar?", "What is the defining characteristic of quadrapeds?", "Do any of the animals in #1 have #2?" ], "evidence": [ [ [ [ "Chinese calendar-1", "Earthly Branches-6" ] ], [ [ "Quadrupedalism-1" ] ], [ [ "Ox-10" ], "operation" ] ], [ [ [ "Chinese zodiac-20" ] ], [ [ "Quadrupedalism-1" ] ], [ "operation" ] ], [ [ [ "Chinese zodiac-5" ] ], [ [ "Quadrupedalism-1" ] ], [ "operation" ] ] ] }, { "qid": "089b0eb6cdf0fe53a863", "term": "Scottish people", "description": "ethnic inhabitants of Scotland", "question": "Are Scottish people descended from Mary, Queen of Scots part French?", "answer": true, "facts": [ "Mary, Queen of Scots was Queen of Scotland in the 1500s.", "Mary, Queen of Scots was the daughter of Mary of Guise.", "Mary of Guise was born to a French nobleman, and her mother was French as well." ], "decomposition": [ "Who was the mother of Mary, Queen of Scots?", "Who were the parents of #1?", "Were #2 French?" ], "evidence": [ [ [ [ "Mary of Guise-1" ] ], [ [ "Antoinette de Bourbon-1", "Claude, Duke of Guise-1" ] ], [ "operation" ] ], [ [ [ "Mary, Queen of Scots-5" ] ], [ [ "Mary of Guise-2" ] ], [ [ "Claude, Duke of Guise-1" ], "operation" ] ], [ [ [ "Mary, Queen of Scots-5" ] ], [ [ "Mary of Guise-2" ] ], [ [ "Antoinette de Bourbon-1", "Claude, Duke of Guise-1" ] ] ] ] }, { "qid": "8cab175fdcffeba3ec52", "term": "Eurovision Song Contest", "description": "Annual song competition held among the member countries of the European Broadcasting Union", "question": "Can actress Dafne Keen win the Eurovision Song Contest finals in 2020?", "answer": false, "facts": [ "Contestants must be at least 16 years of age to compete in the finals of Eurovision Song Contest.", "Dafne Keen is 15 years old in 2020." ], "decomposition": [ "What is the minimum age for constests on \"Eurovision Song Contest\"?", "How old is Dafne Keen?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Eurovision Song Contest-68" ], "no_evidence" ], [ [ "Dafne Keen-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Rules of the Eurovision Song Contest-11" ] ], [ [ "Dafne Keen-1" ] ], [ "operation" ] ], [ [ [ "Eurovision Song Contest-68" ] ], [ [ "Dafne Keen-1" ] ], [ "operation" ] ] ] }, { "qid": "0121baf3240fcbf7a64f", "term": "Amy Winehouse", "description": "English singer and songwriter", "question": "Was Amy Winehouse familiar with Brexit?", "answer": false, "facts": [ "Amy Winehouse died in 2011.", "Brexit began in 2017." ], "decomposition": [ "When did Amy Winehouse die?", "When did Brexit begin?", "Did #2 occur before #1?" ], "evidence": [ [ [ [ "Amy Winehouse-1" ] ], [ [ "Brexit-1" ] ], [ "operation" ] ], [ [ [ "Amy Winehouse-1" ] ], [ [ "Brexit-1" ] ], [ "operation" ] ], [ [ [ "Amy Winehouse-1" ], "operation" ], [ [ "Brexit-3" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "140258f12a2ed0221ea5", "term": "Eleventh grade", "description": "educational year", "question": "Would a student in eleventh grade be unable to run for president of the United States?", "answer": true, "facts": [ "Students in the eleventh grade are typically 16–17 years of age.", "To serve as president, one must be at least 35 years old." ], "decomposition": [ "What is the minimum age one must be to run for president?", "Would a typical eleventh grader be way younger than #1?" ], "evidence": [ [ [ [ "President of the United States-5" ] ], [ [ "Eleventh grade-1" ], "operation" ] ], [ [ [ "Age of candidacy-60" ] ], [ [ "Education in Alberta-13" ], "operation" ] ], [ [ [ "President of the United States-5" ] ], [ "operation" ] ] ] }, { "qid": "bac94f0b8587bf0e8f5e", "term": "Cauliflower", "description": "cauliflower plants (for the vegetable see Q23900272)", "question": "Would a cauliflower farmer prosper at a latitude of 75° N?", "answer": false, "facts": [ "Cauliflower grows best between temperatures of 70 to 85 degrees", "The latitude of 75° N is in the Arctic" ], "decomposition": [ "What country is at the latitude of 75° N?", "What is the average temperature of #1?", "What is the best temperature to grow cauliflower?", "Is there any overlap between #2 and #3?" ], "evidence": [ [ [ [ "75th parallel north-1" ] ], [ [ "Antarctica-42" ] ], [ [ "Cauliflower-6" ] ], [ "operation" ] ], [ [ [ "75th parallel north-1" ], "no_evidence" ], [ [ "New Siberian Islands-16" ] ], [ [ "Cauliflower-6" ] ], [ "operation" ] ], [ [ [ "75th parallel north-1", "75th parallel north-2" ] ], [ [ "Arctic-5" ] ], [ [ "Cauliflower-6" ] ], [ "operation" ] ] ] }, { "qid": "690a36d5fed5b17969aa", "term": "Jack Dempsey", "description": "American boxer", "question": "Did Jack Dempsey fight the current WBC heavyweight champion?", "answer": false, "facts": [ "Jack Dempsey died in 1983", "The current WBC heavyweight champion is Tyson Fury", "Tyson Fury was born in 1988" ], "decomposition": [ "When did Jack Dempsey die?", "When was the current WBC heavyweight champion born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Jack Dempsey-1" ] ], [ [ "Tyson Fury-1" ] ], [ "operation" ] ], [ [ [ "Jack Dempsey-1" ] ], [ [ "Tyson Fury-1" ] ], [ "operation" ] ], [ [ [ "Jack Dempsey-53" ] ], [ [ "Deontay Wilder-1" ] ], [ "operation" ] ] ] }, { "qid": "e530682239bb5e59ebf0", "term": "Tank", "description": "Tracked heavy armored fighting vehicle", "question": "Could a cat ride Panzer VIII Maus tank missile from Barcelona to Madrid?", "answer": false, "facts": [ "The Panzer VIII Maus tank missile had a range of around 2.2 miles.", "The distance from Barcelona to Madrid is 385 miles.", "Motion sickness is a common problem in cats." ], "decomposition": [ "What is the maximum range of a Panzer VIII Maus tank missile?", "What is the distance between Barcelona and Madrid?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Panzer VIII Maus-2" ] ], [ [ "AVE-17" ] ], [ "operation" ] ], [ [ [ "Panzer VIII Maus-1" ], "no_evidence" ], [ [ "Barcelona-1", "Madrid-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Panzer VIII Maus-2" ] ], [ [ "Madrid–Barcelona high-speed rail line-1" ] ], [ "operation" ] ] ] }, { "qid": "bc8b9bd5c95738769076", "term": "Elizabeth I of England", "description": "Queen regnant of England and Ireland from 17 November 1558 until 24 March 1603", "question": "Did Elizabeth I of England own any viscose fiber?", "answer": false, "facts": [ "Viscose is a rayon fiber invented in 1892.", "Elizabeth I of England died in the 1600s." ], "decomposition": [ "When did Elizabeth I die?", "When was viscose fiber invented?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Elizabeth I (disambiguation)-1" ] ], [ [ "Viscose-10" ] ], [ "operation" ] ], [ [ [ "Elizabeth I of England-68" ] ], [ [ "Viscose-10" ] ], [ "operation" ] ], [ [ [ "Elizabeth I of England-1" ] ], [ [ "Rayon-16" ] ], [ "operation" ] ] ] }, { "qid": "c1fe5e55bb99845e364a", "term": "Sternum", "description": "flat bone in the middle front part of the rib cage", "question": "Is sternum connected to any of the bones mentioned in James Weldon Johnson's Dem Bones?", "answer": true, "facts": [ "Dem Bones is a spiritual song composed by James Weldon Johnson.", "The lyrics to Dem Bones mention the hip bone, back bone, and knee bone among others.", "The back bone is the spine, which is connected to the first 7 rib sets.", "The sternum, or breastbone, is a flat bone at the front center of the chest.", "The sternum and ribs are connected and make up the ribcage." ], "decomposition": [ "Which bones were mentioned in the spiritual song Dem Bones by James Weldon Johnson?", "Is the sternum connected to any of #1?" ], "evidence": [ [ [ [ "Dem Bones-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Dem Bones-1" ], "no_evidence" ], [ [ "Sternum-5" ], "operation" ] ], [ [ [ "Dem Bones-3" ], "no_evidence" ], [ [ "Sternum-2" ], "operation" ] ] ] }, { "qid": "76821a3d4561f872b607", "term": "Futurama", "description": "American animated sitcom for the Fox Broadcasting Company and Comedy Central", "question": "Has the creator of Futurama lived in multiple centuries?", "answer": true, "facts": [ "The creator of Futurama is Matt Groening.", "Matt Groening was born in 1954.", "The 20th (twentieth) century was a century that began on January 1, 1901 and ended on December 31, 2000.", "The 21st (twenty-first) century began on January 1, 2001, and will end on December 31, 2100." ], "decomposition": [ "Who is the creator of Futurama?", "How old is #1?", "What is 2020 minus #2?", "When did the most recent new century begin?", "Is #4 between #3 and 2020?" ], "evidence": [ [ [ [ "Futurama-1" ] ], [ [ "Matt Groening-1" ] ], [ "operation" ], [ [ "2000-1" ] ], [ "operation" ] ], [ [ [ "Futurama-1" ] ], [ [ "Matt Groening-1" ] ], [ "operation" ], [ [ "21st century-1" ] ], [ "operation" ] ], [ [ [ "Futurama-1" ] ], [ [ "Matt Groening-1" ] ], [ "operation" ], [ [ "21st century-1" ] ], [ "operation" ] ] ] }, { "qid": "e0e27f835ab064c59bb4", "term": "French Riviera", "description": "Riviera", "question": "Did Pink Floyd have a song about the French Riviera?", "answer": true, "facts": [ "Pink Floyd included the song San Tropez on the album Meddle", "San Tropez, also known as Saint Tropez, is a town located on the French Riviera" ], "decomposition": [ "What songs by Pink Floyd are named after towns in France?", "What are the names of the major towns in the French Riviera?", "Are any towns in #1 also in #2?" ], "evidence": [ [ [ [ "San Tropez (song)-1" ] ], [ [ "French Riviera-1" ] ], [ "operation" ] ], [ [ [ "San Tropez (song)-1" ] ], [ [ "Saint-Tropez-1" ] ], [ "operation" ] ], [ [ [ "San Tropez (song)-1" ] ], [ [ "French Riviera-4" ] ], [ [ "Saint-Tropez-1" ], "operation" ] ] ] }, { "qid": "506c7e618094c78b7614", "term": "Microsoft Excel", "description": "Spreadsheet editor, part of Microsoft Office", "question": "Is electricity necessary to balance an account in Microsoft Excel?", "answer": true, "facts": [ "Microsoft Excel is a computer program", "Computers require a power source" ], "decomposition": [ "Which devices can run Microsoft Excel software?", "Do all of #1 require electricity to work?" ], "evidence": [ [ [ [ "Microsoft Excel-1" ] ], [ [ "Smartphone-2" ], "operation" ] ], [ [ [ "Microsoft Excel-25" ] ], [ [ "Electricity-4" ] ] ], [ [ [ "Microsoft Excel-1", "Spreadsheet-1" ] ], [ [ "Computer-43", "Digital electronics-6" ], "no_evidence", "operation" ] ] ] }, { "qid": "2295eaf3cdbecc17ca0a", "term": "Bucharest", "description": "Capital of Romania", "question": "Was historical Dracula from a town in Bucharest?", "answer": false, "facts": [ "Vlad III Prince of Wallachia, also called Vlad the Impaler, is believed to be the historical inspiration for Dracula.", "Vlad III was born in Sighișoara, Romania, which is located in the historic region of Transylvania.", "Bucharest is located 276 km away from Transylvania." ], "decomposition": [ "What was Dracula's real name?", "Where was #1 born?", "What is the distance from #2 to Bucharest?" ], "evidence": [ [ [ [ "Count Dracula-1" ] ], [ [ "Vlad the Impaler-8" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Vlad the Impaler-1" ] ], [ [ "Vlad the Impaler-8" ] ], [ "no_evidence" ] ], [ [ [ "Vlad the Impaler-1" ] ], [ [ "Vlad the Impaler-8" ] ], [ [ "Bucharest-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "ed0ae39d6a7399f4a5a3", "term": "Richard Wagner", "description": "German composer", "question": "Did Richard Wagner support the Nazis?", "answer": false, "facts": [ "Richard Wagner died in 1883.", "The Nazi Party was established in 1919." ], "decomposition": [ "When did Richard Wagner die?", "When was the Nazi Party formed?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Richard Wagner-1" ] ], [ [ "Nazi Party-1" ] ], [ "operation" ] ], [ [ [ "Richard Wagner-1" ] ], [ [ "Nazi Party-1" ] ], [ "operation" ] ], [ [ [ "Richard Wagner-1" ] ], [ [ "Nazi Party-1" ] ], [ "operation" ] ] ] }, { "qid": "363a55b705110a878be1", "term": "Goofy", "description": "Disney cartoon character", "question": "If Goofy were a pet, would he need heartworm prevention?", "answer": true, "facts": [ "Goofy is an anthropomorphic dog character. ", "Dogs require regular heartworm prevention. " ], "decomposition": [ "What kind of animal is Goofy?", "Does a #1 require regular heartworm prevention?" ], "evidence": [ [ [ [ "Goofy-1" ] ], [ [ "Dog-18" ] ] ], [ [ [ "Goofy-1" ] ], [ [ "Dog health-50" ] ] ], [ [ [ "Goofy-1" ] ], [ "operation" ] ] ] }, { "qid": "95bd44fab517139182f2", "term": "Alan Greenspan", "description": "13th Chairman of the Federal Reserve in the United States", "question": "Has Alan Greenspan lived through at least a baker's dozen of president's?", "answer": true, "facts": [ "A baker's dozen refers to the number 13.", "Alan Greenspan was born in 1926 and turned 94 years old in March of 2020.", "There have been 16 different president's from 1926-2020." ], "decomposition": [ "When was Alan Greenspan born?", "What number is represented by a baker's dozen?", "How many US presidents have served since #1?", "Is #3 greater than or equal to #2?" ], "evidence": [ [ [ [ "Alan Greenspan-1" ] ], [ [ "Dozen-7" ] ], [ [ "Calvin Coolidge-1", "Donald Trump-1" ] ], [ "operation" ] ], [ [ [ "Alan Greenspan-1" ] ], [ [ "Dozen-7" ] ], [ [ "Calvin Coolidge-1", "Donald Trump-1" ] ], [ "operation" ] ], [ [ [ "Alan Greenspan-1" ] ], [ [ "Dozen-7" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "9b5a993ed4b5f121f935", "term": "Bill Gates", "description": "American business magnate and philanthropist", "question": "Did Bill Gates achieve Latin honors?", "answer": false, "facts": [ "Bill Gates left Harvard after two years.", "Latin honors are Latin phrases used in some colleges and universities to indicate the level of distinction with which an academic degree has been earned.", "You cannot earn an academic degree if you drop out of the program." ], "decomposition": [ "What are Latin honors used to describe in colleges and universitiies?", "What program must one complete to be awarded #1?", "Did Bill gates finish #2?" ], "evidence": [ [ [ [ "Latin honors-3" ] ], [ [ "Latin honors-3" ] ], [ [ "Bill Gates-10", "Latin honors-3" ] ] ], [ [ [ "Latin honors-2" ] ], [ [ "Bill Gates-10" ] ], [ "operation" ] ], [ [ [ "Latin honors-1" ] ], [ [ "Academic degree-1" ] ], [ [ "Bill Gates-10" ] ] ] ] }, { "qid": "265d1ccd0e1d6b813f9b", "term": "Tsar", "description": "title given to a male monarch in Russia, Bulgaria and Serbia", "question": "Was the son of Tsar Nicholas a daredevil?", "answer": false, "facts": [ "Tsar Nicholas had only one son, Alexei. ", "Alexei had hemophilia and had to be carefully guarded.", "Hemophilia is the inability for blood to form clots, making any small cut dangerous." ], "decomposition": [ "Who was the son of Tsar Nicholas?", "What did #1 suffer from medically?", "With #2, is one able to be injured?" ], "evidence": [ [ [ [ "Alexei Nikolaevich, Tsarevich of Russia-1" ] ], [ [ "Alexei Nikolaevich, Tsarevich of Russia-1" ] ], [ [ "Haemophilia-1" ] ] ], [ [ [ "Alexei Nikolaevich, Tsarevich of Russia-1" ] ], [ [ "Alexei Nikolaevich, Tsarevich of Russia-8" ] ], [ [ "Alexei Nikolaevich, Tsarevich of Russia-8" ] ] ], [ [ [ "Nicholas II of Russia-63" ] ], [ [ "Alexei Nikolaevich, Tsarevich of Russia-1" ] ], [ [ "Contaminated blood scandal in the United Kingdom-5" ], "operation" ] ] ] }, { "qid": "9467c294f175d06473c3", "term": "Judo", "description": "modern martial art, combat and Olympic sport", "question": "Does the judo rank system reach the triple digits?", "answer": false, "facts": [ "A triple digit number would be equal to at least 100.", "The judo dan-rank system was capped at 10th dan after the death of judo's founder, Kanō Jigorō." ], "decomposition": [ "Was is the lowest three digit number?", "What is the highest rank a person can reach in Judo?", "Is #2 a higher number than #1?" ], "evidence": [ [ [ [ "100-1", "Numerical digit-1" ] ], [ [ "Judo-55" ] ], [ "operation" ] ], [ [ [ "Numerical digit-1" ], "no_evidence" ], [ [ "Rank in Judo-6" ] ], [ "operation" ] ], [ [ [ "100-1" ] ], [ [ "Judo-55" ] ], [ "operation" ] ] ] }, { "qid": "f79592a376f1793f29cb", "term": "Greeks", "description": "people of southeastern Europe", "question": "Did Cleopatra have ethnicity closer to Egyptians than Greeks?", "answer": false, "facts": [ "Cleopatra was the last ruler of the Kingdom of Egypt.", "Cleopatra was the descendant of Ptolemy I Soter, a Greek general from Macedonia.", "Cleopatra's father, Ptolemy XII Auletes, was most likely descended from an Alexandrian Greek mother.", "Cleopatra is believed to be mostly Greek with some Persian and Syrian ancestry as well." ], "decomposition": [ "Who was Cleopatra the daughter of?", "Was #1 descended from Egyptians?" ], "evidence": [ [ [ [ "Cleopatra-8" ] ], [ [ "Ptolemy XII Auletes-3" ] ] ], [ [ [ "Cleopatra-8" ] ], [ [ "Ptolemy-6" ] ] ], [ [ [ "Cleopatra-1" ] ], [ "operation" ] ] ] }, { "qid": "cd38f5c76c18493b158b", "term": "Soup", "description": "primarily liquid food", "question": "Can soup be eaten with the hands?", "answer": false, "facts": [ "Soup is mostly liquid.", "Hands cannot hold liquid." ], "decomposition": [ "What state of matter is soup?", "Can your hands hold #1?" ], "evidence": [ [ [ [ "Soup-1" ] ], [ "operation" ] ], [ [ [ "Soup-1" ] ], [ "operation" ] ], [ [ [ "Soup-1" ] ], [ "operation" ] ] ] }, { "qid": "b3522ea1e8d0a95dd0b3", "term": "Cooking", "description": "Preparing food for consumption with the use of heat", "question": "If your electric stove has a glass top, should you use cast iron skillets?", "answer": false, "facts": [ "Cast iron skillets can scratch or crack flat top stoves.", "Glass top stoves are considered 'flat tops'." ], "decomposition": [ "What would cast iron skillets do to flat top serves?", "What kind of stove are glass top stoves?", "Would someone want their #2 to be #1?" ], "evidence": [ [ [ [ "Cooktop-8" ], "no_evidence" ], [ [ "Cooktop-5" ] ], [ "operation" ] ], [ [ [ "Cast-iron cookware-1" ], "no_evidence" ], [ [ "Kitchen stove-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Electric stove-13" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "4773d577873ff9e04a88", "term": "Woodrow Wilson", "description": "28th president of the United States", "question": "Did Woodrow Wilson consider Blacks to be equal members of society?", "answer": false, "facts": [ "Woodrow Wilson supported the Ku Klux Klan.", "The Ku Klux Klan consider Blacks to be inferior. " ], "decomposition": [ "What group did Woodrow Wilson support?", "Did #1 consider Blacks to be equal members of society?" ], "evidence": [ [ [ [ "Woodrow Wilson-79" ] ], [ [ "Ku Klux Klan-1", "Ku Klux Klan-2" ] ] ], [ [ [ "Woodrow Wilson-79" ] ], [ [ "Ku Klux Klan-104" ] ] ], [ [ [ "Woodrow Wilson-79" ], "no_evidence" ], [ [ "Woodrow Wilson-76" ] ] ] ] }, { "qid": "6042b48035952d0e1a61", "term": "Friday", "description": "day of the week", "question": "Would an astrologer focus on the densest terrestrial planet for a Friday horoscope?", "answer": true, "facts": [ "Friday is associated with Venus in astrology", "Venus is the densest of the terrestrial planets " ], "decomposition": [ "What astrological body is associated with Friday?", "Which is the densest terrestrial planet?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Planetary hours-4" ] ], [ [ "Venus-19" ] ], [ [ "Planetary hours-4", "Venus-19" ] ] ], [ [ [ "Friday-3" ] ], [ [ "Outline of Venus-2" ] ], [ "operation" ] ], [ [ [ "Friday-12" ] ], [ [ "Venus-2" ] ], [ "operation" ] ] ] }, { "qid": "36edeeb054e722511fcd", "term": "Elizabeth II", "description": "Queen of the United Kingdom and the other Commonwealth realms", "question": "Was Elizabeth II the Queen during the Persian Gulf War?", "answer": true, "facts": [ "Elizabeth II became Queen in 1952.", "The Persian Gulf War occurred 1990-1991." ], "decomposition": [ "When did Elizabeth II become the Queen?", "When was the Persian Gulf War?", "Was Elizabeth II alive in #2?", "Is #2 after #1?", "Are the answers to #3 and #4 both yes?" ], "evidence": [ [ [ [ "Elizabeth II-3" ] ], [ [ "Persian Gulf-3" ] ], [ [ "Elizabeth II-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Head of the Commonwealth-8" ] ], [ [ "Gulf War-1" ] ], [ [ "Elizabeth II-41" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Elizabeth II-3" ] ], [ [ "Gulf War-1" ] ], [ [ "Elizabeth II-42" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "1a11133f240556f239de", "term": "Ocelot", "description": "Small wild cat", "question": "Could an ocelot outrun a kindergartner? ", "answer": true, "facts": [ "An ocelot can run up to 61 kilometers per hour.", "Kindergartners are human children usually aged five to six years old.", "Human children under 10 are not likely to exceed 20 kilometers per hour." ], "decomposition": [ "How quickly can an ocelot run?", "How old is a kindergartner?", "How quickly can someone run at the age of #2?", "Is #1 greater than #3?" ], "evidence": [ [ [ [ "Ocelot-25", "Tiger-54" ], "no_evidence" ], [ [ "Kindergarten-1" ] ], [ [ "Running-43" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Kindergarten-89" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Ocelot-25" ], "no_evidence" ], [ [ "Kindergarten-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "a843854a5eabf03cf56f", "term": "Music", "description": "form of art using sound and silence", "question": "Can music be used as a weapon?", "answer": true, "facts": [ "Music is an art form whose medium is sound.", "Music can help elevate or subdue emotions.", "People connect to music through the sound.", "The military uses loud music to cause psychological disorientation and confusion ", "The military calls the use of loud disorienting music part of psychological operations. " ], "decomposition": [ "In what ways does the military used music in operations?", "Is any of #1 as a weapon?" ], "evidence": [ [ [ [ "Music in psychological operations-1" ] ], [ [ "Music in psychological operations-2" ], "operation" ] ], [ [ [ "Manuel Noriega-44" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Music in psychological operations-2" ] ], [ [ "Weapon-1" ], "operation" ] ] ] }, { "qid": "d0145341694e7034555d", "term": "United Nations Framework Convention on Climate Change", "description": "international treaty", "question": "Can the United Nations Framework Convention on Climate Change be held at the Javits Center?", "answer": false, "facts": [ "The United Nations Framework Convention on Climate Change is an international treaty, not a physical meeting", "The Javits Center is a convention center in New York City" ], "decomposition": [ "What kind of event is the United Nations Framework Convention on Climate Change?", "Does #1 have to meet at a convention centre?" ], "evidence": [ [ [ [ "United Nations Framework Convention on Climate Change-6" ] ], [ [ "Javits Center-2" ], "no_evidence", "operation" ] ], [ [ [ "United Nations Framework Convention on Climate Change-39" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "United Nations Framework Convention on Climate Change-6" ] ], [ [ "United Nations Framework Convention on Climate Change-39" ], "no_evidence" ] ] ] }, { "qid": "9e6de7de5577b59708ed", "term": "San Antonio", "description": "City in Texas, United States", "question": "Was San Antonio the site of a major battle in the 19th century?", "answer": true, "facts": [ "The Alamo is located in San Antonio.", "The Alamo was the site of a major battle during the Texan Revolution against Mexico in 1836." ], "decomposition": [ "Where did the most notable battle during the Texas Revolution take place?", "Is #1 located in San Antonio in present day US?", "Did the Texas revolution happen during the 19th century?", "Are #2 and #3 positive?" ], "evidence": [ [ [ [ "Battle of the Alamo-1" ] ], [ [ "Battle of the Alamo-1" ] ], [ [ "19th century-1" ] ], [ "operation" ] ], [ [ [ "Battle of the Alamo-1" ] ], [ [ "Battle of the Alamo-1" ] ], [ [ "19th century-1", "Battle of the Alamo-1" ] ], [ "operation" ] ], [ [ [ "Battle of the Alamo-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "30eb3ccd28d647d504bb", "term": "Higher education", "description": "Academic tertiary education, such as from colleges and universities", "question": "Did Emma Stone pursue a higher education?", "answer": false, "facts": [ "Higher education, also called post-secondary education, third-level or tertiary education, is an optional final stage of formal learning that occurs after completion of secondary education.", "Stone attended Xavier College Preparatory‍—‌an all-girl Catholic high school‍—‌as a freshman, but dropped out after one semester to become an actress.", "Xavier College Preparatory is a Catholic, all-female private high school.", "High school is a secondary education school." ], "decomposition": [ "What is the highest institution Emma Stone attended?", "Is #1 considered a \"higher education\"?" ], "evidence": [ [ [ [ "Emma Stone-7" ] ], [ [ "Higher education-1" ] ] ], [ [ [ "Emma Stone-7" ] ], [ "operation" ] ], [ [ [ "Emma Stone-7" ] ], [ "operation" ] ] ] }, { "qid": "7daa137a9509cb6c211a", "term": "Billionaire", "description": "person who has a net worth of at least one billion (1,000,000,000) units of a given currency", "question": "Is Cambodia too expensive for 2020 richest billionaire to own?", "answer": false, "facts": [ "The richest billionaire in 2020 is Jeff Bezos.", "Jeff Bezos has an estimated worth of 145 billion dollars.", "GDP is a measure of how much the economy of a country is worth.", "Cambodia has an estimated GDP of 28 billion in 2020." ], "decomposition": [ "Who is currently the richest person alive?", "What is the net worth of #1?", "What is the GDP of Cambodia?", "Is #2 less than #3?" ], "evidence": [ [ [ [ "Jeff Bezos-1" ] ], [ [ "Jeff Bezos-1" ] ], [ [ "Cambodia-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Jeff Bezos-1" ] ], [ [ "Jeff Bezos-28" ] ], [ [ "Thailand and the International Monetary Fund-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Jeff Bezos-1" ] ], [ [ "Jeff Bezos-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "14806a45b707fc56e4dd", "term": "Phobos (moon)", "description": "natural satellite of Mars", "question": "Would you have to wear a coat when on Phobos?", "answer": true, "facts": [ "A coat is a garment used to keep a person warm.", "The surface temperatures on Phobos range from about −4 °C (25 °F) on the sunlit side to −112 °C (−170 °F) on the shadowed side." ], "decomposition": [ "What is the average temperature on Phobos?", "At what temperature would people need to start wearing coats to stay warm?", "Is #1 below #2?" ], "evidence": [ [ [ [ "Phobos (moon)-4" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Phobos (moon)-4" ] ], [ [ "Coat-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Phobos (moon)-4" ] ], [ [ "Coat-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "06724dc213e0dae715f5", "term": "Amazon (company)", "description": "American electronic commerce and cloud computing company", "question": "Could one Amazon share ever buy twenty year Netflix subscription?", "answer": true, "facts": [ "Amazon stock has reached as high as $2,500 a share as of June 2020.", "The basic Netflix subscription package costs $8.99 a month as of 2020." ], "decomposition": [ "What is the cost of a monthly Netflix subscription?", "How many months are there in a year?", "What is #2 multiplied by 20 and then multiplied by #1?", "What is the highest price Amazon stock has ever reached?", "Is #4 greater than #3?" ], "evidence": [ [ [ [ "Netflix-49" ] ], [ [ "Month-38" ] ], [ "operation" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Netflix-55" ] ], [ [ "Year-47" ] ], [ "operation" ], [ [ "Amazon (company)-78" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Netflix-49" ] ], [ [ "Fiscal year-73" ] ], [ "operation" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "22c9aaeaf0c412ba2201", "term": "Mary, mother of Jesus", "description": "religious figure and mother of Jesus of Nazareth", "question": "Would Mary, mother of Jesus have hypothetically prayed to Artemis if she was Greek?", "answer": true, "facts": [ "Mary, Mother of Jesus is known for being a virgin and giving birth to Christ.", "Artemis was the Greek goddess of the hunt, childbirth, and virgins." ], "decomposition": [ "What is Greek Artemis god of?", "What were the things Mary, mother of Jesus was well known for?", "Is any of #2 included in #1?" ], "evidence": [ [ [ [ "Artemis-1" ] ], [ [ "Mary, mother of Jesus-14" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Artemis-1" ] ], [ [ "Mary, mother of Jesus-2" ] ], [ "operation" ] ], [ [ [ "Artemis-16" ] ], [ [ "Mary, mother of Jesus-2" ] ], [ "operation" ] ] ] }, { "qid": "2c1620d67936c9229270", "term": "Nikola Tesla", "description": "Serbian American inventor", "question": "Is there radiation where Nikola Tesla once worked?", "answer": true, "facts": [ "Nikola Tesla built a facility called the Wardenclyffe Tower in Shoreham, New York", "Shoreham was the site of a nuclear power plant in the '70s and '80s" ], "decomposition": [ "What facility did Nikola Tesla build?", "Where is #1 located?", "Did #2 use to be the site of a nuclear power plant?" ], "evidence": [ [ [ [ "Wardenclyffe Tower-12" ] ], [ [ "Wardenclyffe Tower-1" ] ], [ [ "Shoreham Nuclear Power Plant-1" ], "operation" ] ], [ [ [ "Wardenclyffe Tower-1" ] ], [ [ "Wardenclyffe Tower-1" ] ], [ [ "Shoreham Nuclear Power Plant-4", "Shoreham, New York-9" ] ] ], [ [ [ "Wardenclyffe Tower-1" ] ], [ [ "Wardenclyffe Tower-1" ] ], [ [ "Shoreham Nuclear Power Plant-1" ] ] ] ] }, { "qid": "69be580cbd6d2605dc59", "term": "Moss", "description": "Division of plants", "question": "Is a beard is moss that grows on a human?", "answer": false, "facts": [ "Moss is a type of non-vascular and flowerless plant.", "Moss typically grows in dark green clumps in damp and shady locations.", "A beard is facial hair that grows on the chin and cheeks of a human.", "Facial hair is not a plant. " ], "decomposition": [ "What is moss?", "What is a beard?", "Is #1 the same thing as #2?" ], "evidence": [ [ [ [ "Moss-1" ] ], [ [ "Beard-1" ] ], [ "operation" ] ], [ [ [ "Moss-1" ] ], [ [ "Beard-1" ] ], [ "operation" ] ], [ [ [ "Moss-1" ] ], [ [ "Beard-1" ] ], [ "operation" ] ] ] }, { "qid": "63e55313ae5cbb20dc0c", "term": "Pacific War", "description": "Theater of World War II fought in the Pacific and Asia", "question": "Did Archduke Franz Ferdinand of Austria participate in the Pacific War?", "answer": false, "facts": [ "Archduke Franz Ferdinand of Austria was assassinated in 1914.", "The Pacific War took place between 1941 and 1945." ], "decomposition": [ "During what years did the Pacific War occur?", "When did Archduke Franz Ferdinand of Austria die?", "Did #2 occur after #1?" ], "evidence": [ [ [ [ "Pacific War-14" ] ], [ [ "Assassination of Archduke Franz Ferdinand-46" ] ], [ "operation" ] ], [ [ [ "Pacific War-2", "Pacific War-3" ] ], [ [ "Archduke Franz Ferdinand of Austria-1" ] ], [ "operation" ] ], [ [ [ "Pacific War-14" ] ], [ [ "Archduke Franz Ferdinand of Austria-1" ] ], [ "operation" ] ] ] }, { "qid": "2fd3aa5d2672c8c5350c", "term": "The Doctor (Doctor Who)", "description": "fictional character from Doctor Who", "question": "Does The Doctor keep his ship in his childhood home?", "answer": false, "facts": [ "The Doctor grew up on a planet called Gallifrey.", "The planet Gallifrey was destroyed in a time war.", "The Doctor's ship doesn't require docking." ], "decomposition": [ "Where is The Doctor's childhood home?", "Can The Doctor still visit #1?" ], "evidence": [ [ [ [ "The Doctor (Doctor Who)-7" ] ], [ [ "Gallifrey-30" ], "operation" ] ], [ [ [ "Doctor Who-53" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Doctor (Doctor Who)-7" ] ], [ [ "Gallifrey-3" ] ] ] ] }, { "qid": "69e0b6f868ee71314fc1", "term": "Marco Rubio", "description": "United States Senator from Florida", "question": "Does Marco Rubio have a close relationship with Allah?", "answer": false, "facts": [ "Marco Rubio adheres to the religious sect of Christianity known as Catholicism.", "Catholics and other Christians worship God.", "Allah is worshiped by believers of Islam." ], "decomposition": [ "What is Marco Rubio's religion?", "Which deity does #1 worship?", "Is #2 Allah?" ], "evidence": [ [ [ [ "Marco Rubio-86" ] ], [ [ "Catholic Church-2" ] ], [ [ "God in Islam-13" ], "operation" ] ], [ [ [ "Marco Rubio-7" ] ], [ [ "God in Catholicism-38" ] ], [ "operation" ] ], [ [ [ "Marco Rubio-86" ] ], [ [ "Christianity-1" ] ], [ "operation" ] ] ] }, { "qid": "770d0ae34440013dcf8e", "term": "Ethiopian cuisine", "description": "Culinary traditions of Ethiopia", "question": "Is shrimp prevalent in Ethiopian cuisine?", "answer": false, "facts": [ "Ethiopian cuisine specializes in vegetables and spicy meat dishes.", "Ethiopia is a landlocked country without access to seas or oceans." ], "decomposition": [ "What kind of aquatic environments are shrimp caught in?", "Does the geography of Ethiopia include any of #1?" ], "evidence": [ [ [ [ "Shrimp-2" ] ], [ [ "Ethiopia-91" ] ] ], [ [ [ "Shrimp and prawn as food-1" ] ], [ [ "Ethiopia-1" ], "operation" ] ], [ [ [ "Shrimp-8" ] ], [ [ "Ethiopia-90" ], "no_evidence" ] ] ] }, { "qid": "e9b635db671e0c1be8d9", "term": "Pancreas", "description": "A glandular organ that plays a role in the digestive and endocrine systems of vertebrates.", "question": "Can pancreas removal cause bankruptcy?", "answer": true, "facts": [ "Pancreas removal is a medical procedure.", "Medical procedures are expensive in come countries. ", "Expensive procedures can cause debt.", "Debt can cause bankruptcy. " ], "decomposition": [ "What medical procedures are involved when a pancreas be removed?", "In what places are #1 sometimes directly paid for by the patient?", "Among any of #2, what consequences exist for medical debt?", "Is bankruptcy included in #3?" ], "evidence": [ [ [ [ "General surgery-1" ] ], [ [ "Health care systems by country-55" ] ], [ [ "Medical debt-4" ] ], [ "operation" ] ], [ [ [ "Pancreatectomy-4" ] ], [ [ "Medical debt-4" ] ], [ [ "Medical debt-4" ] ], [ "operation" ] ], [ [ [ "Pancreas-37", "Pancreas-44" ], "no_evidence" ], [ [ "Health care in the United States-4" ], "no_evidence" ], [ [ "Health care in the United States-14" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "441d83eccca7714ba2a7", "term": "Harlem Renaissance", "description": "African-American cultural movement in New York City in the 1920s", "question": "Could Al Capone have read works from the Harlem Renaissance?", "answer": true, "facts": [ "The Harlem Renaissance occurred during the 1920s.", "Al Capone lived through the 1920s." ], "decomposition": [ "When was the Harlem Renaissance?", "Was Al Capone able to read during #1?" ], "evidence": [ [ [ [ "Harlem Renaissance-1" ] ], [ [ "Al Capone-1" ], "operation" ] ], [ [ [ "Harlem Renaissance-1" ] ], [ [ "Al Capone-1" ], "operation" ] ], [ [ [ "Harlem Renaissance-41" ], "no_evidence" ], [ [ "Al Capone-3" ], "no_evidence" ] ] ] }, { "qid": "5e39653ec4524dce45c9", "term": "Suburb", "description": "Human settlement that is part of or near to a larger city", "question": "Do suburbs encourage the use of cars?", "answer": true, "facts": [ "Suburbs are generally built outside of walking distance from city centers.", "City centers contain jobs and stores.", "Suburb dwellers need to access jobs and stores to survive." ], "decomposition": [ "How far are suburbs usually situated from city centres?", "Is #1 usually greater than reasonable walking distance?" ], "evidence": [ [ [ [ "Suburb-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Suburb-1" ] ], [ [ "Commuting-13" ] ] ], [ [ [ "Suburb-1" ], "no_evidence" ], [ [ "Suburb-2" ], "no_evidence", "operation" ] ] ] }, { "qid": "0fe3edf35bd1a1e662a9", "term": "Gandalf", "description": "Fictional character created by J. R. R. Tolkien", "question": "Is Gandalf hypothetically a formidable foe for Charmed's Barbas?", "answer": false, "facts": [ "Gandalf was a wizard in the Hobbit and Lord of the Rings series.", "Gandalf used his staff to cast powerful spells.", "Barbas was a demon on Charmed that had resistance to magic." ], "decomposition": [ "In what way did Gandalf attack his enemies?", "Is Barbas from Charmed susceptible to #1?" ], "evidence": [ [ [ [ "Gandalf-23" ] ], [ "no_evidence" ] ], [ [ [ "Gandalf-2" ], "no_evidence" ], [ [ "Barbas (Charmed)-1" ], "no_evidence", "operation" ] ], [ [ [ "Gandalf-2" ], "no_evidence" ], [ [ "Barbas (Charmed)-1", "Marbas-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "a77402c5a20c3a5a8676", "term": "Moose", "description": "A genus of mammals belonging to the deer, muntjac, roe deer, reindeer, and moose family of ruminants", "question": "Would a moose hypothetically be too much for a minotaur to devour whole?", "answer": true, "facts": [ "A minotaur was a mythological beast with the head of a bull and body of a human.", "Human stomachs process about three to four pounds of food a day.", "A moose can weigh up to 1500 pounds.", "Bulls can consume around 33 pounds of food a day." ], "decomposition": [ "What is the body structure of a minotaur?", "What kind of stomach do they have due to #1?", "What is the average weight of a moose?", "How much food weight can #2 process per day?", "Is #3 far greater than #4?" ], "evidence": [ [ [ [ "Minotaur-7" ] ], [ [ "Minotaur-7" ], "no_evidence" ], [ [ "Alaska moose-3" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Minotaur-1" ] ], [ [ "Human digestive system-36" ] ], [ [ "Moose-40" ] ], [ [ "Moose-41" ] ], [ "operation" ] ], [ [ [ "Minotaur-1" ] ], [ [ "Stomach-1", "Stomach-3" ] ], [ [ "Moose-40" ] ], [ [ "Food energy-14" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "e76f4ef36aee97eafb49", "term": "Ahura Mazda", "description": "highest deity of Zoroastrianism", "question": "Can you worship Ahura Mazda at a mosque?", "answer": false, "facts": [ "Ahura Mazda is a deity in Zoroastrianism", "A mosque is a place of worship for Muslims" ], "decomposition": [ "Which religious group worships in a mosque?", "Does #1 believe in or worship Ahura Mazda?" ], "evidence": [ [ [ [ "Mosque-1" ] ], [ [ "Ahura Mazda-1" ], "operation" ] ], [ [ [ "Mosque-30" ] ], [ [ "Ahura Mazda-1", "Zoroastrianism-27" ] ] ], [ [ [ "Islam-28" ] ], [ [ "Ahura Mazda-1" ] ] ] ] }, { "qid": "3f8a1bd6bf3a967cdeb6", "term": "Spinal cord", "description": "long, thin, tubular structure made up of nervous tissue", "question": "Would a hedgehog avoid animals without a spinal cord?", "answer": false, "facts": [ "A hedgehog has a wide ranging diet including birds, toads, slugs, and snails.", "Slugs are animals known as invertebrates because they have no backbones." ], "decomposition": [ "What animals do hedgehog mainly eats?", "Out of #1, do all animals have a spinal cord?" ], "evidence": [ [ [ [ "Hedgehog-11" ] ], [ [ "Frog-54", "Vertebrate-1" ], "operation" ] ], [ [ [ "Hedgehog-11" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Hedgehog-11" ] ], [ "operation" ] ] ] }, { "qid": "d4803e3857fb8b51df5b", "term": "Fraktur", "description": "Typeface", "question": "Does Fraktur have a sordid history?", "answer": true, "facts": [ "Fraktur is a type of font that originated in Germany.", "Fraktur was used on official Nazi documents.", "Fraktur was used on the cover of Hitler's Mein Kampf." ], "decomposition": [ "What is Fraktur?", "Which group in Germany used #1 for their official documents?", "Did #2 have a sordid past?" ], "evidence": [ [ [ [ "Fraktur-1" ] ], [ [ "Fraktur-10" ] ], [ "operation" ] ], [ [ [ "Fraktur-1" ] ], [ [ "Fraktur-10" ] ], [ [ "The Holocaust-1" ] ] ], [ [ [ "Fraktur-1" ] ], [ [ "Fraktur-10" ] ], [ [ "Jewish ghettos in German-occupied Poland-3" ], "operation" ] ] ] }, { "qid": "87829d2b3eb19ca11821", "term": "Andes", "description": "Mountain range in South America", "question": "Was the Peak of the Andes hidden from the view of the Colossus of Rhodes?", "answer": true, "facts": [ "The highest point of the Andes is almost 23,000 feet high.", "The Colossus of Rhodes, a massive ancient statue, was around 108 feet tall.", "The Andes are located in South America.", "The Colossus of Rhodes was found in ancient Greece." ], "decomposition": [ "How high is the peak of the Andes?", "How high is the Colossus of Rhodes?", "Where are the Andes located?", "Where is the Colossus of Rhodes located?", "Is #1 higher than #2 and are #3 and #4 different countries?" ], "evidence": [ [ [ [ "Andes-3" ] ], [ [ "Colossus of Rhodes-1" ] ], [ [ "Andes-1" ] ], [ [ "Colossus of Rhodes-1" ] ], [ "operation" ] ], [ [ [ "Andes-3" ] ], [ [ "Colossus of Rhodes-1" ] ], [ [ "Andes-1" ] ], [ [ "Colossus of Rhodes-1" ] ], [ "operation" ] ], [ [ [ "Andes-3" ] ], [ [ "Colossus of Rhodes-1" ] ], [ [ "Andes-1" ] ], [ [ "Colossus of Rhodes-1" ] ], [ "operation" ] ] ] }, { "qid": "9ad9cdadd2d69de7cfff", "term": "LG Electronics", "description": "South Korean multinational electronics company", "question": "Is LG Electronics located in a city with an official bird that has a purplish/blue tail?", "answer": true, "facts": [ "LG Electronics is headquarted in Seoul.", "The official bird of Seoul is the Korean magpie.", "The Korean magpie has a purplish/blue colored tail." ], "decomposition": [ "Which city is LG Electronics headquarters located in?", "Which bird is officially associated with #1", "Is the tail color of #2 purplish-blue?" ], "evidence": [ [ [ [ "LG Electronics-1" ] ], [ [ "Oriental magpie-1" ], "no_evidence" ], [ [ "Oriental magpie-5" ], "operation" ] ], [ [ [ "LG Electronics-1" ] ], [ [ "Oriental magpie-1" ] ], [ [ "Oriental magpie-5" ] ] ], [ [ [ "LG Electronics-1" ] ], [ [ "Oriental magpie-1" ], "no_evidence" ], [ [ "Oriental magpie-5" ] ] ] ] }, { "qid": "1700bc2c9529a12bd026", "term": "Naruto", "description": "Japanese manga and anime series", "question": "Could you watch Naruto and Puzzle Place on the same channel?", "answer": false, "facts": [ "Puzzle Place aired on PBS between 1995 and 1998.", "Naruto aired on Cartoon Network in 2005." ], "decomposition": [ "What channel did Puzzle Place air on?", "What channel did Naruto air on?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "The Puzzle Place-1" ] ], [ [ "Naruto-2" ] ], [ "operation" ] ], [ [ [ "The Puzzle Place-1" ] ], [ [ "Naruto-2" ] ], [ "operation" ] ], [ [ [ "The Puzzle Place-1" ] ], [ [ "Naruto-24" ] ], [ "operation" ] ] ] }, { "qid": "183a6e39a27a2432989c", "term": "Christopher Nolan", "description": "British–American film director, screenwriter, and producer", "question": "Could Christopher Nolan borrow pants from Danny Devito?", "answer": false, "facts": [ "Christopher Nolan is 6 feet tall.", "Danny Devito is 4'10\" tall.", "Pant sizes relate to height." ], "decomposition": [ "How tall is Christopher Nolan?", "What was Danny Devito's height?", "Does #1 match #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Danny DeVito-4" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Danny DeVito-4" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "01e796e851a77dae22bc", "term": "Joker (character)", "description": "Fictional character in the DC Universe", "question": "Was the Joker an enemy of the Avengers?", "answer": false, "facts": [ "The Joker is a DC Comics villain.", "The Avengers are a group of heroes from Marvel Comics.", "Being from different publishers, they do not meet." ], "decomposition": [ "Which world does the Joker exist in?", "The Avengers are from which universe?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Joker (2019 film)-46" ] ], [ [ "The Avengers (2012 film)-37" ] ], [ [ "The Avengers (2012 film)-45" ], "operation" ] ], [ [ [ "Joker (character)-1" ] ], [ [ "Avengers (comics)-1" ] ], [ "operation" ] ], [ [ [ "Joker (character)-1" ] ], [ [ "Avengers (comics)-1" ] ], [ "operation" ] ] ] }, { "qid": "2541f27af7753e1ec7c1", "term": "Pea", "description": "species of plant", "question": "Will twenty pea pods contents cover entire chess board?", "answer": true, "facts": [ "Pea pods on average have 5 to 6 peas inside.", "A standard chess board has 64 squares." ], "decomposition": [ "On average, how many peas do twenty pea pods contain?", "How many squares does a standard chess board have?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Pea-1" ], "no_evidence" ], [ [ "Chessboard-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Pea-4" ], "no_evidence" ], [ [ "Chessboard-3" ] ], [ "operation" ] ], [ [ [ "Pea-1" ], "no_evidence" ], [ [ "Chess-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2f20fd52567450e69e68", "term": "Grand Theft Auto III", "description": "Open world action-adventure video game", "question": "Would members of Blue Lives Matter support every element of Grand Theft Auto III?", "answer": false, "facts": [ "Blue Lives Matter is a countermovement in the United States that supports police officers and law enforcement personnel.", "Grand Theft Auto III allows for gratuitous violence against police officers in the game." ], "decomposition": [ "Which action against cops are allowed in GTA III?", "What does the Blue Lives Matter movement advocate for?", "Is all of #1 in accordance with #2?" ], "evidence": [ [ [ [ "Grand Theft Auto III-36" ] ], [ [ "Blue Lives Matter-1" ] ], [ "operation" ] ], [ [ [ "Grand Theft Auto III-36" ] ], [ [ "Blue Lives Matter-1" ] ], [ "operation" ] ], [ [ [ "Grand Theft Auto III-36" ] ], [ [ "Blue Lives Matter-1" ] ], [ "operation" ] ] ] }, { "qid": "b2c17613452eb229fa92", "term": "Phobos (moon)", "description": "natural satellite of Mars", "question": "Is Phobos (moon) name origin similar to Roman god Pavor?", "answer": true, "facts": [ "Phobos (moon) derives its name from ancient Greek mythology.", "Phobos was the god of fear.", "In Roman mythology, Pavor or Terror is known as the personification of fear." ], "decomposition": [ "What was Phobos (moon) named after?", "What is #1 referred to in Roman mythology?", "Is #2 the same as Pavor or Terror?" ], "evidence": [ [ [ [ "Phobos (moon)-2" ] ], [ [ "Phobos (mythology)-2" ] ], [ "operation" ] ], [ [ [ "Phobos (moon)-6" ] ], [ [ "Phobos (mythology)-2" ] ], [ "operation" ] ], [ [ [ "Phobos (mythology)-1" ] ], [ [ "Phobos (mythology)-2" ] ], [ "operation" ] ] ] }, { "qid": "f1895e4df6287a560a08", "term": "Binary number", "description": "system that represents numeric values using two symbols; 0 and 1", "question": "Can binary numbers and standard alphabet satisfy criteria for a strong password?", "answer": false, "facts": [ "The criteria for a strong password according to cybersecurity company Avast is: at least 15 characters. uppercase letters. lowercase letters. numbers. and symbols.", "The standard alphabet contains twenty six letters but no special characters.", "Binary numbers only contain 0 and 1." ], "decomposition": [ "Which characters make up binary numbers?", "Which characters make up the standard English alphabet", "Does #1 or #2 include special characters or symbols?" ], "evidence": [ [ [ [ "English alphabet-1" ], "no_evidence" ], [ [ "Binary number-1" ] ], [ "operation" ] ], [ [ [ "Binary number-1" ] ], [ [ "Alphabet-1" ] ], [ [ "Password strength-13" ], "operation" ] ], [ [ [ "Binary number-1" ] ], [ [ "English alphabet-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "64354cb6ee6d987d4f0f", "term": "Ice", "description": "water frozen into the solid state", "question": "Would a diet of ice eventually kill a person?", "answer": true, "facts": [ "Humans can survive without water for four days.", "Ice can be melted into water, which consists of hydrogen and oxygen, using a simple cigarette lighter.", "Humans can survive without food for 30 to 40 days on average.", "Humans need carbohydrates, proteins, and fats that are contained in foods.", "Water does not contain fat, carbohydrates or protein." ], "decomposition": [ "Ice is the solid state of what?", "What nutrients are needed to sustain human life?", "Are most of #2 absent from #1?" ], "evidence": [ [ [ [ "Ice-7" ] ], [ [ "Table of food nutrients-1" ] ], [ [ "Table of food nutrients-1" ], "operation" ] ], [ [ [ "Ice-1" ] ], [ [ "Nutrient-1" ] ], [ [ "Water-1" ], "operation" ] ], [ [ [ "Ice-1" ] ], [ [ "Food-1" ] ], [ "operation" ] ] ] }, { "qid": "08a7de56c14143be3535", "term": "Quran", "description": "The central religious text of Islam", "question": "Would an adherent of Zoroastrianism consult the Quran for religious guidance?", "answer": false, "facts": [ "The Quran is the central religious text of Islam", "Zoroastrianism is an ancient religion predating Islam by several centuries" ], "decomposition": [ "Which religious group mainly uses the Quran for their consultation?", "Is Zoroastrianism closely related to #1?" ], "evidence": [ [ [ [ "Quran-1" ] ], [ [ "Zoroastrianism-1" ], "operation" ] ], [ [ [ "Quran-20" ] ], [ [ "Zoroastrianism-48" ], "operation" ] ], [ [ [ "Quran-1" ] ], [ [ "Zoroastrianism-1" ] ] ] ] }, { "qid": "add18119b84e567fed05", "term": "Joker (character)", "description": "Fictional character in the DC Universe", "question": "Is the Joker in a healthy romantic relationship?", "answer": false, "facts": [ "Healthy relationships are characterized by mutual trust and respect.", "The Joker is dating Harley Quinn.", "The Joker frequently abuses and talks down to Harley." ], "decomposition": [ "Who is the Joker in a relationship with?", "Does the Joker respect #1?", "Is respect necessary in a healthy romantic relationship?", "Are #2 and #3 the same?" ], "evidence": [ [ [ [ "Joker (character)-27" ] ], [ [ "Joker (character)-53" ] ], [ [ "Interpersonal relationship-21" ] ], [ "operation" ] ], [ [ [ "Joker (character)-3" ] ], [ [ "Joker (character)-3" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Harley Quinn-1" ] ], [ [ "Harley Quinn-2" ] ], [ "no_evidence", "operation" ], [ "operation" ] ] ] }, { "qid": "bd3caa68cb957d27b0e2", "term": "Lighthouse of Alexandria", "description": "Ancient lighthouse in Egypt", "question": "Were Greeks essential to crafting Egyptian Lighthouse of Alexandria?", "answer": true, "facts": [ "The Lighthouse of Alexandria was an impressive monument in Egypt.", "The Lighthouse of Alexandria was built by pharaoh Ptolemy II.", "Ptolemy II was the son of Ptolemy I Soter.", "Ptolemy I Soter was a Greek bodyguard of Alexander the Great and became pharaoh of Egypt." ], "decomposition": [ "Who built the Lighthouse of Alexandria?", "Who was #1's father?", "Was #2 Greek?" ], "evidence": [ [ [ [ "Lighthouse of Alexandria-7", "Sostratus of Cnidus-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Lighthouse of Alexandria-6" ] ], [ [ "Ptolemy I Soter-2" ] ], [ [ "Philip II of Macedon-1" ], "operation" ] ], [ [ [ "Lighthouse of Alexandria-6" ] ], [ [ "Ptolemy I Soter-1" ] ], [ "operation" ] ] ] }, { "qid": "cf57e005787dd2d5f90c", "term": "J. D. Salinger", "description": "American writer", "question": "Did J. D. Salinger ever ask his father for a quinceañera?", "answer": false, "facts": [ "A quinceañera is celebration of a girl's 15th birthday.", "J. D. Salinger was male.", "A quinceañera is a Hispanic tradition.", "J. D. Salinger was Jewish." ], "decomposition": [ "What gender is a quinceañera usually held for?", "What gender is J. D. Salinger?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Quinceañera-1" ] ], [ [ "J. D. Salinger-1", "J. D. Salinger-12" ] ], [ "operation" ] ], [ [ [ "Quinceañera-1" ] ], [ [ "J. D. Salinger-18" ] ], [ "operation" ] ], [ [ [ "Quinceañera-1" ] ], [ [ "J. D. Salinger-2" ] ], [ "operation" ] ] ] }, { "qid": "14a58de5d05e976c09cb", "term": "Game engine", "description": "Software-development environment designed for building video games", "question": "Does a game engine have a fuel injector?", "answer": false, "facts": [ "A game engine is the software used to develop video games", " A fuel injector is part of an automotive engine" ], "decomposition": [ "Which kind of engine uses a fuel injector?", "Is 'game engine' a kind of #1?" ], "evidence": [ [ [ [ "Fuel injection-1" ] ], [ [ "Game engine-1" ], "operation" ] ], [ [ [ "Fuel injection-62" ] ], [ "operation" ] ], [ [ [ "Fuel injection-1" ] ], [ [ "Game engine-1" ] ] ] ] }, { "qid": "4c495564c4165b2d5d16", "term": "Vitamin C", "description": "nutrient found in citrus fruits and other foods", "question": "Would drinking a glass of lemonade provide Vitamin C?", "answer": true, "facts": [ "Lemonade is made from lemons, sugar, and water.", "Lemons are a citrus fruit.", "Citrus fruit is high in Vitamin C." ], "decomposition": [ "What are the ingredients of lemonade?", "Are any of #1 high in Vitamin C?" ], "evidence": [ [ [ [ "Lemonade-1" ] ], [ "no_evidence" ] ], [ [ [ "Lemonade-2" ] ], [ [ "Lemon-6" ], "operation" ] ], [ [ [ "Lemonade-2" ] ], [ [ "Lemon-12" ] ] ] ] }, { "qid": "0076b548f329544427eb", "term": "Chevrolet Cruze", "description": "compact car marketed by GM from 2008-2019", "question": "Is Chinese successor to Chevrolet Cruze name a town far from Milan?", "answer": false, "facts": [ "The Chinese successor to Chevrolet Cruze is the Chevrolet Monza.", "Monza is a city near the north of Milan." ], "decomposition": [ "What is the Chinese successor to Chevrolet Cruze?", "What is the city that #1 is named after?", "Is #2 located far from Milan?" ], "evidence": [ [ [ [ "Chevrolet Monza (China)-1" ] ], [ [ "Monza-1" ] ], [ "operation" ] ], [ [ [ "Chevrolet Cruze-71" ] ], [ [ "Monza-1" ] ], [ [ "Monza-1" ], "operation" ] ], [ [ [ "Chevrolet Cruze-71" ] ], [ [ "Monza-1" ] ], [ "operation" ] ] ] }, { "qid": "b9d5010aaef9115f77e7", "term": "Tom Cruise", "description": "American actor and producer", "question": "Could Tom Cruise explain mental auditing?", "answer": true, "facts": [ "Mental auditing is a practice within the church of Scientology.", "Tom Cruise is a long standing member of the church of Scientology and is high in the ranks." ], "decomposition": [ "What church practices mental auditing?", "Is Tom Cruise a member of #1?" ], "evidence": [ [ [ [ "Auditing (Scientology)-1", "Auditing (Scientology)-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Scientology beliefs and practices-1" ] ], [ [ "Tom Cruise-36" ] ] ], [ [ [ "Auditing (Scientology)-1" ] ], [ [ "Tom Cruise-4" ] ] ] ] }, { "qid": "fe5428059eda37cc96c2", "term": "Mount Sharp", "description": "mountain on Mars", "question": "Are human footprints absent from Mount Sharp?", "answer": true, "facts": [ "Mount Sharp is located on Mars.", "Human beings have not traveled to Mars.", "Human footprints could only be present if human feet touched down on Mount Sharp." ], "decomposition": [ "Where is Mount Sharp?", "What would produce a human footprint?", "Have #2 never traveled to #1?" ], "evidence": [ [ [ [ "Mount Sharp-1" ] ], [ [ "Footprint-1" ] ], [ "operation" ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Footprint-1" ] ], [ [ "Human mission to Mars-73" ], "operation" ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Footprint-1" ] ], [ [ "Human mission to Mars-2" ] ] ] ] }, { "qid": "49154ddff1b4e85dff26", "term": "Apollo 15", "description": "Fourth crewed mission to land on the Moon", "question": "Did the crew of Apollo 15 take pictures of Mount Sharp?", "answer": false, "facts": [ "Mount Sharp is a mountain on Mars.", "The crew of Apollo 15 landed on the Moon, not Mars.", "No humans have ever landed on Mars." ], "decomposition": [ "Where is Mount Sharp located", "Did the crew of Apollo 15 travel to #1 or a place very close to #1?" ], "evidence": [ [ [ [ "Mount Sharp-1" ] ], [ [ "Apollo 15-1" ] ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Apollo 15-1" ] ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Apollo 15-2" ] ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Apollo 15-1" ] ] ] ] }, { "qid": "9e7b4c746b598d1521e0", "term": "Geometry", "description": "Branch of mathematics that studies the shape, size and position of objects", "question": "Do carpenters understand geometry?", "answer": true, "facts": [ "Carpenters work in building and maintaining structures such as homes, buildings, and gazebos.", "In order to build a home, one must be able to follow the geometry in the blueprints. " ], "decomposition": [ "What kind of buildings/structures do carpenters help in constructing?", "Do #1 require knowledge of geometry to carry out?" ], "evidence": [ [ [ [ "Carpentry-1" ] ], [ [ "Geometry-38" ], "operation" ] ], [ [ [ "Carpentry-1" ] ], [ [ "Geometry-1" ], "operation" ] ], [ [ [ "Carpentry-1" ] ], [ [ "Geometry-1" ] ] ] ] }, { "qid": "7cb3bf5c1283aad54afa", "term": "Iyer", "description": "caste of Hindu Brahmin communities of Tamil origin", "question": "Do people of the Iyer caste eat meat?", "answer": false, "facts": [ "Iyer is a caste of Hindu Brahmin.", "Brahmin is the priest caste of Hinduism.", "Devout Hindus do not eat meat. ", "Priests of a religion are devout followers of that religion." ], "decomposition": [ "What caste is Iyer part of?", "What larger caste is #1 part of?", "Do people who follow #2 eat meat?" ], "evidence": [ [ [ [ "Iyer-11" ] ], [ [ "Brahmin (disambiguation)-1" ] ], [ [ "Brahmin-15" ], "operation" ] ], [ [ [ "Iyer-35" ] ], [ [ "Brahmin-1", "History of Brahmin diet-7", "Tamil Brahmin-1" ] ], [ [ "History of Brahmin diet-7" ] ] ], [ [ [ "Iyer-1" ] ], [ [ "Brahmin-1" ] ], [ [ "Brahmin-15" ], "operation" ] ] ] }, { "qid": "b07fd13daa6e17e6273c", "term": "Pottery", "description": "Craft of making objects from clay", "question": "Is a pottery kiln inappropriate for use with glass blowing?", "answer": false, "facts": [ "Pottery kilns heat from the sides.", "Glass kilns heat from the top.", "Glass can be fused in a ceramic pottery kiln without trouble." ], "decomposition": [ "From what sides does a pottery kiln Heat?", "From what sides does a glass kiln heat?", "Is #1 listed in #2?" ], "evidence": [ [ [ [ "Kiln-16" ], "no_evidence" ], [ [ "Kiln-10" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Kiln-10" ], "no_evidence" ], [ [ "Glass fusing-7" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Kiln-1" ], "no_evidence" ], [ [ "Kiln-5" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "6ced5b6cea1bf1d35435", "term": "Wednesday", "description": "Day of the week", "question": "Does New Year's Day always occur on a Wednesday?", "answer": false, "facts": [ "New Year's Day occurs on January 1st each year.", "The day of the week any given date falls on rotates by one each year.", "If Leap Year wasn't breaking up the cycle, New Year's Day would be on a Wednesday every seventh year." ], "decomposition": [ "What is the date of New Year's Day?", "Does #1 occur on the same day each year?" ], "evidence": [ [ [ [ "New Year's Day-12" ] ], [ [ "New Year's Day-12" ] ] ], [ [ [ "New Year's Day-1" ] ], [ "operation" ] ], [ [ [ "New Year's Day-1" ] ], [ [ "New Year's Day-4" ], "no_evidence", "operation" ] ] ] }, { "qid": "9390df61207ef77f8ba0", "term": "Swan", "description": "large water bird", "question": "Would WWF be angrier if you killed koala instead of black swan?", "answer": true, "facts": [ "The WWF is an international organization that works for the preservation of animals.", "Black swans are designated as least concern species meaning they are not close to being endangered.", "Koalas are designated as vulnerable to extinction—just a step above endangered." ], "decomposition": [ "What is the black swan's listing on the IUCN red list?", "How is the Koala listed on the IUCN red list?", "What does WWF represent?", "Considering #3, is #2 in more dire straits than #1?" ], "evidence": [ [ [ [ "Black swan-24" ] ], [ [ "Koala-3" ] ], [ [ "World Wide Fund for Nature-1" ] ], [ "operation" ] ], [ [ [ "Black swan-24" ] ], [ [ "Koala-44" ] ], [ [ "World Wide Fund for Nature-10" ] ], [ "operation" ] ], [ [ [ "Black swan-13", "IUCN Red List-1" ] ], [ [ "Koala-50", "Vulnerable species-1" ], "no_evidence" ], [ [ "World Wide Fund for Nature-1" ] ], [ "operation" ] ] ] }, { "qid": "5416d6ace8e5bef7e467", "term": "Tick", "description": "order of arachnids", "question": "Could a nymph tick pass through a standard hole punch?", "answer": true, "facts": [ "A nymph tick is the size of a poppy seed.", "A poppy seed is around 1mm in size.", "The ISO 838 standards set a hole punch size at 6 mm." ], "decomposition": [ "What is a nymph tick comparable in size to?", "How big around is #1?", "What is the diameter of a standard hole punch?", "Is #3 greater than or equal to #2?" ], "evidence": [ [ [ [ "Ixodes pacificus-11" ] ], [ [ "Ixodes pacificus-11" ], "no_evidence" ], [ [ "Hole punch-6" ] ], [ "operation" ] ], [ [ [ "Tick-1" ], "no_evidence" ], [ [ "Tick-1" ] ], [ [ "Hole punch-10" ] ], [ "operation" ] ], [ [ [ "Tick-1" ], "no_evidence" ], [ [ "Tick-1" ] ], [ [ "Hole punch-6" ] ], [ "operation" ] ] ] }, { "qid": "3554b8a182555f759b9f", "term": "Twin", "description": "One of two offspring produced in the same pregnancy. Use with P31 on items for one twin", "question": "Are twins always born during the same year?", "answer": false, "facts": [ "Some twins are born right before the New Year, and right after the New Year.", "There are some twins, implanted through IVF, who are born decades apart." ], "decomposition": [ "What external fertilization processes can result in twins?", "What process can split embryo twins from #1?", "What process is used to preserve embryos from #1?", "Is it impossible to apply #2 and #3 to embryo twins created from #1?" ], "evidence": [ [ [ [ "In vitro fertilisation-32" ] ], [ [ "Twin-16" ] ], [ [ "Embryo cryopreservation-1" ] ], [ [ "Embryo cryopreservation-2" ], "operation" ] ], [ [ [ "Twin-39" ] ], [ [ "In vitro fertilisation-32" ] ], [ [ "In vitro fertilisation-71" ] ], [ "operation" ] ], [ [ [ "In vitro fertilisation-32" ], "no_evidence" ], [ "no_evidence" ], [ [ "Oocyte cryopreservation-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d435a2f25dfb1e78a841", "term": "Pain", "description": "type of unpleasant feeling", "question": "Does acupuncture cause pain in many people?", "answer": false, "facts": [ "Acupuncture doesn't usually feel painful for most people.", "The needles used in acupuncture are very thin and do not go very deep into the skin." ], "decomposition": [ "What kind of instruments are used during acupuncture?", "Are #1 likely to be painful for most people?" ], "evidence": [ [ [ [ "Acupuncture-10" ] ], [ [ "Acupuncture-11" ] ] ], [ [ [ "Acupuncture-1" ] ], [ [ "Acupuncture-1" ] ] ], [ [ [ "Acupuncture-1" ] ], [ "operation" ] ] ] }, { "qid": "f8bf74ed2a1f4005a37f", "term": "Clouded leopard", "description": "species of mammal found from the Himalayan foothills through mainland Southeast Asia into China", "question": "Would a clouded leopard encounter an awake pangolin?", "answer": true, "facts": [ "Pangolins and clouded leopards have an overlap of ranges", "Pangolins are nocturnal", "Clouded leopards are nocturnal" ], "decomposition": [ "What is the range of the clouded leopard?", "What time of day is the clouded leopard active?", "What is the range of the pangolin?", "What time of day is the pangolin active?", "Do #1 and #3 overlap while #2 and #4 overlap?" ], "evidence": [ [ [ [ "Clouded leopard-1" ] ], [ [ "Clouded leopard-24" ] ], [ [ "Pangolin-1" ] ], [ [ "Pangolin-2" ] ], [ [ "Clouded leopard-1", "Clouded leopard-24", "Pangolin-1", "Pangolin-2" ] ] ], [ [ [ "Clouded leopard-1" ] ], [ [ "Clouded leopard-24" ] ], [ [ "Pangolin-1" ], "no_evidence" ], [ [ "Pangolin-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Clouded leopard-1" ] ], [ [ "Clouded leopard-24" ] ], [ [ "Pangolin-1" ] ], [ [ "Pangolin-2" ] ], [ "operation" ] ] ] }, { "qid": "ffa3242f6eacd0271ac2", "term": "Alligator", "description": "Genus of large reptiles", "question": "Would a crocodile survive longer in Great Salt Lake than alligator?", "answer": true, "facts": [ "The Great Salt Lake is a Utah lake composed of salt water.", "Crocodiles natural habitat is salt water and they can breathe underwater for hours.", "Alligators have a natural habitat of fresh water." ], "decomposition": [ "What kind of water habitat is the Great Salt Lake?", "Which water habitats are crocodiles adapted to survive in?", "Which water habitats are alligators adapted to survive in?", "Is #1 included in #2 and excluded from #3?" ], "evidence": [ [ [ [ "Great Salt Lake-1" ] ], [ [ "Crocodile-2", "Crocodile-3" ] ], [ [ "Alligator-8" ] ], [ "operation" ] ], [ [ [ "Great Salt Lake-22" ] ], [ [ "Crocodile-3" ] ], [ [ "Alligator-8" ] ], [ "operation" ] ], [ [ [ "Great Salt Lake-1" ] ], [ [ "Crocodile-2" ] ], [ [ "Crocodile-2" ] ], [ "operation" ] ] ] }, { "qid": "6982e0eb7df792f606c9", "term": "Art dealer", "description": "person that buys and sells works of art", "question": "Can an art dealer buy Boeing 737-800 with a Da Vinci painting?", "answer": true, "facts": [ "The Boeing 737-800 plane costs 106 million dollars in 2019.", "Salvator Mundi, a painting attributed to Leonardo Da Vinci, is the most expensive painting ever sold.", "Salvator Mundi sold for over 450 million dollars." ], "decomposition": [ "How much does a Boeing 737-800 cost?", "How much did Da Vinci's highest priced painting sell for?", "Is #2 more than #1?" ], "evidence": [ [ [ [ "Boeing 737 Next Generation-42" ], "no_evidence" ], [ [ "Leonardo da Vinci-92" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Salvator Mundi (Leonardo)-13" ] ], [ "operation" ] ], [ [ [ "Boeing 737 Next Generation-39" ] ], [ [ "Leonardo da Vinci-92" ] ], [ "operation" ] ] ] }, { "qid": "267f910d8ce95e51d64a", "term": "The Who", "description": "English rock band", "question": "Did The Who have to cancel tours due to World War II?", "answer": false, "facts": [ "The Who was formed in 1964", "World War II ended in 1945" ], "decomposition": [ "When was The Who formed?", "In what year did World War II end?", "Is #1 before #2?" ], "evidence": [ [ [ [ "The Who-1" ] ], [ [ "World War II-1" ] ], [ "operation" ] ], [ [ [ "The Who-1" ] ], [ [ "The Second World War (disambiguation)-1" ] ], [ "operation" ] ], [ [ [ "The Who-1" ] ], [ [ "World War II-1" ] ], [ "operation" ] ] ] }, { "qid": "000275e7aa81e30885e7", "term": "Numerology", "description": "any study of the purported divine, mystical or other special relationship between a number and some coinciding observed (or perceived) events", "question": "Has numerology helped shape hotel layouts?", "answer": true, "facts": [ "Numerology is the study of numbers and how they relate to events.", "Numbers such as 3 and 7 hold biblical significance.", "Numbers such as 6 and 13 are said to be unlucky.", "The thirteenth floor is a designation of a level of a multi-level building that is often omitted in countries where the number 13 is considered unlucky.", "Many hotels do not have thirteenth floors because of the enduring superstition." ], "decomposition": [ "What numbers are often considered unlucky?", "What number is usually omitted in numbering hotel floors?", "Is #2 part of #1?" ], "evidence": [ [ [ [ "13 (number)-14" ], "no_evidence" ], [ [ "13 (number)-14" ] ], [ "operation" ] ], [ [ [ "13 (number)-14" ] ], [ [ "13 (number)-14", "Thirteenth floor-2" ] ], [ "operation" ] ], [ [ [ "13 (number)-16", "Number of the Beast-1" ], "no_evidence" ], [ [ "Thirteenth floor-7" ] ], [ "operation" ] ] ] }, { "qid": "28cb9d93e3a0f58e6350", "term": "Operation Barbarossa", "description": "1941 German invasion of the Soviet Union during the Second World War", "question": "Did Operation Barbarossa or Barbarossa's last expedition succeed?", "answer": false, "facts": [ "Operation Barbarossa was the Nazi advance on Russia during World War II.", "Operation Barbarossa was a failure that resulted in Nazi Germany being pushed back by a Soviet counter offensive.", "Operation Barbarossa was named after Holy Roman Emperor Frederick Barbarossa.", "On his final expedition, Frederick Barbarossa drowned while leading an army to help the Crusaders during the Third Crusade.", "The Crusaders failed to recapture Jerusalem during the Third Crusade without the support of Barbarossa and his troops." ], "decomposition": [ "What was the objective of Operation Barbarossa?", "What was the goal of the final expedition of Frederick Barbarossa?", "Did #1 and #2 succeed?" ], "evidence": [ [ [ [ "Operation Barbarossa-1" ] ], [ [ "Frederick I, Holy Roman Emperor-44" ] ], [ [ "Frederick I, Holy Roman Emperor-46", "Operation Barbarossa-4" ] ] ], [ [ [ "Operation Barbarossa-1" ] ], [ [ "Frederick I, Holy Roman Emperor-37" ] ], [ [ "Frederick I, Holy Roman Emperor-46", "Operation Barbarossa-4" ], "operation" ] ], [ [ [ "Operation Barbarossa-1" ] ], [ [ "Frederick I, Holy Roman Emperor-36" ] ], [ [ "Frederick I, Holy Roman Emperor-44", "Operation Barbarossa-4" ] ] ] ] }, { "qid": "4e694a71502c2e750c00", "term": "Anchovy", "description": "Family of fishes", "question": "Are anchovies associated with Italian food?", "answer": true, "facts": [ "Pizza is an Italian food.", "Anchovies are occasionally used as a pizza topping." ], "decomposition": [ "What food item are anchovies sometimes eaten with?", "Is #1 an Italian food?" ], "evidence": [ [ [ [ "Anchovies as food-5" ] ], [ [ "Anchovies as food-5" ], "operation" ] ], [ [ [ "Anchovies as food-3" ] ], [ [ "Pizza-1" ], "operation" ] ], [ [ [ "Anchovies as food-3" ] ], [ [ "Pizza-1" ] ] ] ] }, { "qid": "9527809204a14d94be67", "term": "Onion", "description": "vegetable", "question": "Can chemicals in onion help create a thermonuclear bomb?", "answer": true, "facts": [ "A thermonuclear bomb, also called a hydrogen bomb, uses hydrogen under high temperatures to create an explosive reaction.", "While chopping onions, cells inside the onion are broken and the gas that comes out forms sulfenic acid.", "Sulfenic acid is composed of several elements including hydrogen." ], "decomposition": [ "What elements are used in a thermonuclear bomb?", "When onions are chopped what gas is released?", "What elements are found in #2?", "Is #1 a subset of #3?" ], "evidence": [ [ [ [ "Thermonuclear weapon-2" ] ], [ [ "Onion-30" ], "no_evidence" ], [ [ "Syn-Propanethial-S-oxide-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Thermonuclear weapon-1" ], "no_evidence" ], [ [ "Onion-30" ] ], [ "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Thermonuclear weapon-2" ] ], [ [ "Onion-30" ] ], [ [ "Syn-Propanethial-S-oxide-1" ] ], [ "operation" ] ] ] }, { "qid": "bad3d5551705406fe58d", "term": "Snowboarding", "description": "winter sport", "question": "Would it be difficult to snowboard on Venus?", "answer": true, "facts": [ "Snowboarding involves descending a snow-covered slope while standing on a snowboard.", "Snow is formed by the freezing of water.", "Water has a freezing point of 32°F.", "Venus has a mean surface temperature of 737 K (464 °C; 867 °F)." ], "decomposition": [ "What kind of surface is suitable for snowboarding?", "What temperature range facilitates the formation of #1?", "What is the average surface temperature on Venus?", "Is #3 within #2?" ], "evidence": [ [ [ [ "Snowboarding-1" ] ], [ [ "Snow-16" ] ], [ [ "Venus-2" ] ], [ "operation" ] ], [ [ [ "Snowboarding-1" ] ], [ [ "Freezing-5" ], "no_evidence" ], [ [ "Venus-2" ] ], [ "operation" ] ], [ [ [ "Snowboarding-1" ] ], [ [ "Snow-3" ], "no_evidence" ], [ [ "Venus-2" ] ], [ "operation" ] ] ] }, { "qid": "d7482b2dc4028be17b36", "term": "Pope Alexander VI", "description": "Pope of the Catholic Church 1492–1503", "question": "Were any of despised Pope Alexander VI's descendants canonized?", "answer": true, "facts": [ "Pope Alexander the VI was a controversial pope born as Rodrigo Borgia.", "Rodrigo Borgia had several children including the despised Juan Borgia who was murdered in 1497.", "Juan Borgia's grandson, Francis Borgia, was a Jesuit priest and the third Superior General of the Society of Jesus.", "Canonization is the process by which the Catholic Church names someone a saint.", "Francis Borgia was named a Catholic saint in June 1670." ], "decomposition": [ "What dynastic house was Pope Alexander VI a member of?", "Were any members of #1 canonized?", "Was #2 a direct descendent of Alexander VI?" ], "evidence": [ [ [ [ "Pope Alexander VI-2" ] ], [ [ "Francis Borgia, 4th Duke of Gandía-1" ] ], [ "operation" ] ], [ [ [ "Pope Alexander VI-2" ] ], [ [ "Francis Borgia, 4th Duke of Gandía-1" ] ], [ "operation" ] ], [ [ [ "House of Borgia-2" ] ], [ [ "Francis Borgia, 4th Duke of Gandía-1" ] ], [ [ "House of Borgia-21" ], "operation" ] ] ] }, { "qid": "5799a384a9ca6cc645a0", "term": "Saturn", "description": "Sixth planet from the Sun in the Solar System", "question": "Is Saturn named after king of gods in Greek mythology?", "answer": false, "facts": [ "Saturn, the sixth planet from the sun is named after the Roman god Saturn.", "The Roman god Saturn is derived from its Greek equivalent, Kronos.", "The king of the gods in Greek mythology was Zeus.", "Kronos was Zeus's father, and was the leader of the Titans." ], "decomposition": [ "Who were the king of the gods in Greek mythology?", "Which god was the planet Saturn named after?", "Is #2 the same as any of #1?" ], "evidence": [ [ [ [ "Zeus-1" ] ], [ [ "Saturn-35" ] ], [ "operation" ] ], [ [ [ "Zeus-1" ] ], [ [ "Saturn-1" ] ], [ "operation" ] ], [ [ [ "Cronus-1", "Uranus (mythology)-1", "Zeus-1" ] ], [ [ "Saturn (mythology)-1", "Saturn-1" ] ], [ "operation" ] ] ] }, { "qid": "470c3051dc9996abc3a7", "term": "Abortion", "description": "Intentionally ending pregnancy", "question": "Is there any absolute way to prevent abortion?", "answer": false, "facts": [ "In areas where professional medical abortions are illegal, women get unsafe illegal abortions from unlicensed practitioners. ", "Women have successfully aborted their own children through physical or chemical means for centuries." ], "decomposition": [ "In places where medical abortions are illegal, are women absolutely unable to get abortions?" ], "evidence": [ [ [ [ "Unsafe abortion-2" ] ] ], [ [ [ "Abortion-41" ] ] ], [ [ [ "Unsafe abortion-1", "Unsafe abortion-2" ], "operation" ] ] ] }, { "qid": "765ef4b32efee9947e5f", "term": "Charles Manson", "description": "American criminal, cult leader", "question": "Did any killer Manson band members were named for exceed Charles Manson's kills?", "answer": true, "facts": [ "Many of the members of the band Marilyn Manson combined the names of a model or actress with a serial killer.", "Marilyn Manson band memberTwiggy Ramirez took his name from model Twiggy and serial killer Richard Ramirez.", "Richard Ramirez was charged with 13 counts of murder.", "Charles Manson was charged with 9 counts of murder." ], "decomposition": [ "What serial killers were members of the Manson band named after?", "How many counts of murder was Charles Manson charged with?", "Were the murder charges of any of #1 greater than #2?" ], "evidence": [ [ [ [ "Marilyn Manson (band)-4" ] ], [ [ "Charles Manson-1" ] ], [ [ "John Wayne Gacy-4" ] ] ], [ [ [ "Charles Manson-1" ], "no_evidence" ], [ [ "Charles Manson-1" ] ], [ "operation" ] ], [ [ [ "Marilyn Manson (band)-1", "Marilyn Manson (band)-4" ] ], [ [ "Charles Manson-1" ] ], [ [ "Ted Bundy-1" ], "operation" ] ] ] }, { "qid": "406d6897eb20e5740d3f", "term": "Brewing", "description": "production of beer", "question": "Should Peter Griffin be an expert at the craft of brewing?", "answer": true, "facts": [ "Peter Griffin is an employee of a brewery in Quahog. ", "Peter has worked at the brewery for many years and is expected to be familiar with how beer is made." ], "decomposition": [ "Where does Peter Griffin work?", "Is #1 a brewery?" ], "evidence": [ [ [ [ "Peter Griffin-2" ] ], [ "operation" ] ], [ [ [ "Jungle Love (Family Guy)-3" ] ], [ "operation" ] ], [ [ [ "Peter Griffin-2" ] ], [ "operation" ] ] ] }, { "qid": "ee1ccc88aa46999db8da", "term": "James Brown", "description": "American singer, songwriter, producer and bandleader from South Carolina", "question": "Could James Brown's ex-wives hold a doubles game of tennis?", "answer": true, "facts": [ "James Brown had four ex-wives", "Doubles tennis requires two players per team, with two teams playing against each other" ], "decomposition": [ "How many people are required for tennis doubles?", "How many ex-wives did James Brown have?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Tennis games-23" ] ], [ [ "James Brown-62" ] ], [ "operation" ] ], [ [ [ "Types of tennis match-3" ] ], [ [ "James Brown-62" ] ], [ "operation" ] ], [ [ [ "Tennis-75" ] ], [ [ "James Brown-62" ] ], [ "operation" ] ] ] }, { "qid": "51cfdfc9c52aeed3a1cc", "term": "Mount Sharp", "description": "mountain on Mars", "question": "Do bald eagles nest on Mount Sharp?", "answer": false, "facts": [ "Bald eagles are birds found on earth", "Mount Sharp is a mountain on Mars", "To date, no life forms have been detected on Mars" ], "decomposition": [ "Where is Mount Sharp located?", "Has any form of life ever been discovered on #1?" ], "evidence": [ [ [ [ "Mount Sharp-1" ] ], [ [ "Mars-4" ] ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Planetary habitability-64" ] ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Life on Mars-1" ] ] ] ] }, { "qid": "a98478f4c66dea748297", "term": "Reality", "description": "Sum or aggregate of all that is real or existent", "question": "Could Plato have agreed with the beliefs of Jainism?", "answer": true, "facts": [ "One principle of reality in Jainism is karma, or asrava.", "Jainism began around 500 B.C.", "Plato was born around 428 B.C., so he was alive while Jainism existed.", "Plato believed in karma and reincarnation." ], "decomposition": [ "What are the major beliefs in Jainism?", "What were Plato's major beliefs?", "When did Jainism begin?", "When was Plato born?", "Is there an overlap between #1 and #2, and is #4 more recent than #3?" ], "evidence": [ [ [ [ "Jainism-1" ] ], [ [ "Plato-43" ] ], [ [ "Jainism-1" ] ], [ [ "Plato-1" ] ], [ "operation" ] ], [ [ [ "Jainism-4" ], "no_evidence" ], [ [ "Plato-3" ], "no_evidence" ], [ [ "Jainism-61" ] ], [ [ "Plato-1" ] ], [ "operation" ] ], [ [ [ "Jainism-2" ] ], [ [ "Plato-3", "Plato-39", "Plato-43" ], "no_evidence" ], [ [ "Jainism-1" ] ], [ [ "Plato-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "85300febfe21ecdcc5b1", "term": "Wolverine", "description": "Species of the family Mustelidae", "question": "Would a Wolverine and a Lynx be hard to tell apart?", "answer": false, "facts": [ "Wolverines have rounded ears and a bear-like appearance.", "Lynxes have a feline body with pointed ears." ], "decomposition": [ "What are the physical characteristics of wolverines?", "What are the physical characteristics of lynxes?", "Is there any significant overlap between #1 and #2?" ], "evidence": [ [ [ [ "Wolverine-6" ] ], [ [ "Lynx-3" ] ], [ "operation" ] ], [ [ [ "Wolverine-6" ] ], [ [ "Lynx-4" ] ], [ [ "Lynx-4", "Wolverine-6" ], "operation" ] ], [ [ [ "Wolverine-6" ] ], [ [ "Lynx-3", "Lynx-4" ] ], [ "operation" ] ] ] }, { "qid": "c565654d56d2c1bb3532", "term": "Water skiing", "description": "surface water sport", "question": "Can you go water skiing on Venus?", "answer": false, "facts": [ "Water skiing requires sufficient area on a smooth stretch of water, one or two skis, a tow boat with tow rope, two or three people, and a personal flotation device.", "Venus has a mean surface temperature of 863 °F.", "There may have been substantial quantities of liquid water on the surface of Venus at one point, but after a period of 600 million to several billion years, a runaway greenhouse effect was caused by the evaporation of that original water." ], "decomposition": [ "What is the basic requirement for water skiing?", "Is #1 present on Venus in sufficient quantities?" ], "evidence": [ [ [ [ "Water skiing-1" ] ], [ [ "Venus-20" ], "operation" ] ], [ [ [ "Water skiing-1" ] ], [ [ "Venus-2" ], "operation" ] ], [ [ [ "Water skiing-1" ] ], [ [ "Venus-2" ] ] ] ] }, { "qid": "f9fcf86196d1847b2f0b", "term": "2008 Summer Olympics", "description": "Games of the XXIX Olympiad, held in Beijing in 2008", "question": "Could you drive a Rowe 550 to the 2008 Summer Olympics?", "answer": true, "facts": [ "The Rowe 550 was a car produced by the Chinese SAIC motor company.", "The Rowe 550 debuted at the 2007 Shanghai Auto Show.", "The 2008 Beijing Summer Olympics happened in the Capital of the People's Republic of China." ], "decomposition": [ "When was the Roewe 550 launched?", "Did the 2008 Summer Olympics hold before or during #1?" ], "evidence": [ [ [ [ "Roewe 550-1" ] ], [ [ "2008 Summer Olympics-1" ], "operation" ] ], [ [ [ "Roewe 550-1" ] ], [ [ "2008 Summer Olympics-1" ], "operation" ] ], [ [ [ "Roewe 550-2" ] ], [ "operation" ] ] ] }, { "qid": "6ef0bafcb42de140de17", "term": "Eagle", "description": "large carnivore bird", "question": "Can shooting bald eagle get a person more prison time than Michael Vick?", "answer": true, "facts": [ "Michael Vick spent 21 months in prison for an illegal dog fighting ring.", "Shooting a bald eagle carries a penalty of up to two years in prison for a second conviction." ], "decomposition": [ "How long of a penalty is it for shooting a bald eagle?", "How many months did Michael Vick serve in prison?", "Is #1 longer than #2?" ], "evidence": [ [ [ [ "Bald and Golden Eagle Protection Act-18" ] ], [ [ "Michael Vick-2" ] ], [ "operation" ] ], [ [ [ "Bald and Golden Eagle Protection Act-4" ] ], [ [ "Michael Vick-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Bald and Golden Eagle Protection Act-18" ] ], [ [ "Michael Vick-2" ] ], [ "operation" ] ] ] }, { "qid": "bcc6b33ca8ae85fc8784", "term": "Amy Winehouse", "description": "English singer and songwriter", "question": "Was Amy Winehouse a fan of Star Wars: Rogue One?", "answer": false, "facts": [ "Amy Winehouse died in 2011.", "Star Wars: Rogue One was released in 2016." ], "decomposition": [ "When did Amy Winehouse die?", "When was Star Wars: Rogue One released?", "Is #1 after #2?" ], "evidence": [ [ [ [ "Amy Winehouse-1" ] ], [ [ "Rogue One-2" ] ], [ [ "Rogue One-2" ], "operation" ] ], [ [ [ "Amy Winehouse-90" ] ], [ [ "Rogue One-1" ] ], [ "operation" ] ], [ [ [ "Amy Winehouse-1" ] ], [ [ "Rogue One-3" ] ], [ "operation" ] ] ] }, { "qid": "bcfeb6bb99d969f74e48", "term": "Popular science", "description": "Interpretation of science intended for a general audience", "question": "Is \"A Tale of Two Cities\" a popular science novel?", "answer": false, "facts": [ "\"A Tale of Two Cities\" is a historical fiction novel.", "Popular science books focus on scientific facts presented to a mainstream audience.", "Fiction is not fact." ], "decomposition": [ "What genre is the novel 'A Tale of Two Cities' classified as?", "Is #1 based on scientific facts?" ], "evidence": [ [ [ [ "A Tale of Two Cities-1" ] ], [ "operation" ] ], [ [ [ "A Tale of Two Cities-1" ] ], [ "operation" ] ], [ [ [ "A Tale of Two Cities-1" ] ], [ [ "Historical fiction-3" ] ] ] ] }, { "qid": "deb934107d390914b5b0", "term": "Western honey bee", "description": "Species of insect", "question": "Would Topa Inca Yupanqui have encountered the western honey bee?", "answer": false, "facts": [ "Topa Inca Yupanqui was an Inca ruler in the 15th century", "Western honey bees were first introduced to the Americas in the 16th century" ], "decomposition": [ "When century was Topa Inca Yupanqui alive?", "When were western honey bees first introduced to The Americas", "Was #1 before #2?" ], "evidence": [ [ [ [ "Topa Inca Yupanqui-1" ] ], [ [ "Western honey bee-5" ] ], [ "operation" ] ], [ [ [ "Topa Inca Yupanqui-1" ] ], [ [ "Western honey bee-5" ] ], [ "operation" ] ], [ [ [ "Topa Inca Yupanqui-1" ] ], [ [ "Western honey bee-5" ] ], [ "operation" ] ] ] }, { "qid": "3bd276ea5db4f37dc983", "term": "Middle Ages", "description": "Period of European history from the 5th to the 15th century", "question": "Did eggs need to be kept cold in the middle ages?", "answer": false, "facts": [ "When eggs are freshly laid, they are covered in a film called a 'bloom.' ", "Eggs with their bloom intact are able to stay at room temperature for one month.", "Pasteurization destroys the bloom on eggs. ", "Pasteurization was introduced in the 1990's." ], "decomposition": [ "What naturally protects eggs from spoiling?", "What process removes #1 from eggs?", "Did #2 exist during the Middle Ages?" ], "evidence": [ [ [ [ "Egg as food-35" ] ], [ [ "Egg as food-34" ] ], [ [ "Middle Ages-1", "Refrigeration-6" ], "operation" ] ], [ [ [ "Egg as food-34" ], "no_evidence" ], [ [ "Egg as food-35" ] ], [ [ "Refrigeration-9" ], "operation" ] ], [ [ [ "Egg as food-35" ], "no_evidence" ], [ [ "Egg as food-34" ] ], [ "operation" ] ] ] }, { "qid": "760a0d26a50972656345", "term": "Ginger", "description": "Species of plant", "question": "Could the Port of Baltimore handle the entire world's cargo production of ginger each year?", "answer": true, "facts": [ "In 2018, the world production of ginger was 2.8 million tons.", "The Port of Baltimore handles about 2.8 million tons of cargo per fiscal quarter. ", "A fiscal quarter is shorter than a year." ], "decomposition": [ "How much cargo does the Port of Baltimore handle each fiscal quarter?", "How much ginger cargo is produced each year?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Port of Baltimore-19" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Port of Baltimore-18" ] ], [ [ "Ginger-8" ] ], [ "operation" ] ], [ [ [ "Port of Baltimore-19" ] ], [ [ "Ginger-8" ] ], [ "operation" ] ] ] }, { "qid": "33289d83688ed7080bd5", "term": "Cream", "description": "Dairy product", "question": "Does store bought milk have cream at the top?", "answer": false, "facts": [ "When milk is non-homogenized, the cream will separate and rise to the top.", "Most store bought milk is homogenized. " ], "decomposition": [ "What processes does store-bought milk go through?", "What are the characteristics of milk that is treated with #1?", "Is \"cream on the top\" a characteristic listed in #2?" ], "evidence": [ [ [ [ "Pasteurization-1" ] ], [ [ "Pasteurization-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Milk-55", "Milk-57", "Milk-59" ] ], [ [ "Cream-1", "Milk-61" ] ], [ [ "Cream-1" ] ] ], [ [ [ "Milk-61" ] ], [ [ "Milk-63" ] ], [ "operation" ] ] ] }, { "qid": "7f8417d42ef9ea1e5a8c", "term": "Watermelon", "description": "A large fruit with a smooth hard rind, of the gourd family", "question": "Are more watermelons grown in Brazil than Antarctica?", "answer": true, "facts": [ "Watermelons are plants grown in climates from tropical to temperate, needing temperatures higher than about 25 °C (77 °F) to thrive.", "The climate of Antarctica is the coldest on Earth.", "The climate of Brazil comprises a wide range of weather conditions across a large area and varied topography, but most of the country is tropical." ], "decomposition": [ "Which climate is suitable for the cultivation of watermelon?", "What are the prevalent climatic conditions in Brazil?", "What are the prevalent climatic conditions in Antarctica?", "Is #2 more similar to #1 than #3?" ], "evidence": [ [ [ [ "Watermelon-2" ] ], [ [ "Brazil-47" ] ], [ [ "Antarctica-42" ] ], [ "operation" ] ], [ [ [ "Watermelon-15" ] ], [ [ "Climate of Brazil-5" ] ], [ [ "Antarctica-42" ] ], [ "operation" ] ], [ [ [ "Watermelon-2" ] ], [ [ "Brazil-47" ] ], [ [ "Antarctica-42" ] ], [ "operation" ] ] ] }, { "qid": "5170cfed313d2de67942", "term": "The Tonight Show Starring Jimmy Fallon", "description": "American late-night talk show", "question": "On August 20, 2020, does The Tonight Show Starring Jimmy Fallon air after moonset EST?", "answer": true, "facts": [ "On August 20th, The Tonight Show Starring Jimmy Fallon airs at 11:35PM", "On August 20th, the moon on the east coast of the USA will set around 9PM" ], "decomposition": [ "The Tonight Show Starring Jimmy Fallon airs at 11:35 p.m. ET/PT.", "On August 20th, the moon on the east coast of the USA set 9PM", "Does #1 occur after #2?" ], "evidence": [ [ [ [ "The Tonight Show Starring Jimmy Fallon-2" ] ], [ [ "Moonlight-1", "Sunset-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Tonight Show Starring Jimmy Fallon-2" ] ], [ [ "Lunar phase-16" ] ], [ [ "Lunar phase-16", "The Tonight Show Starring Jimmy Fallon-2" ], "no_evidence" ] ], [ [ [ "The Tonight Show-30" ], "no_evidence" ], [ [ "Moonrise-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7c7c1f16d72afccb0942", "term": "Paparazzi", "description": "profession", "question": "Were paparazzi directly responsible for the death of Amy Winehouse?", "answer": false, "facts": [ "Amy Winehouse died at home and was found in her home by her bodyguard.", "Amy Winehouse's cause of death was alcohol poisoning. " ], "decomposition": [ "What was the cause of Amy Winehouse's death?", "Did paparazzi have a direct involvement in #1?" ], "evidence": [ [ [ [ "Amy Winehouse-92", "Amy Winehouse-94" ] ], [ [ "Amy Winehouse-90", "Amy Winehouse-94" ] ] ], [ [ [ "Amy Winehouse-92" ] ], [ [ "Amy Winehouse-92" ] ] ], [ [ [ "Amy Winehouse-92" ] ], [ "operation" ] ] ] }, { "qid": "1b747c9c67380c83b6d5", "term": "Alexander Graham Bell", "description": "scientist and inventor known for his work on the telephone", "question": "Did the phone Alexander Graham Bell use have call waiting?", "answer": false, "facts": [ "Call waiting was invented in the 1970's to allow phone users to suspend one call to accept another.", "Alexander Graham Bell's phone was used in 1876." ], "decomposition": [ "When was call waiting service introduced?", "When was Alexander Graham Bell's phone used?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Call waiting-9" ] ], [ [ "Alexander Graham Bell-29" ] ], [ "operation" ] ], [ [ [ "Call waiting-9" ] ], [ [ "Alexander Graham Bell-2" ] ], [ [ "Alexander Graham Bell-2", "Call waiting-9" ], "operation" ] ], [ [ [ "Call waiting-9" ] ], [ [ "Alexander Graham Bell-2" ] ], [ "operation" ] ] ] }, { "qid": "9d72d2a919f84ce9c597", "term": "Zoology", "description": "Study of the animal kingdom", "question": "Is zoology unconcerned with strigoi?", "answer": true, "facts": [ "Zoology is the study of the behavior and classification of animals.", "Strigoi are spirits that can transform into animals in Romanian mythology.", "Zoology is based on science and fossils." ], "decomposition": [ "What does the study of zoology entail?", "What kind of creatures are the strigoi?", "Is #2 unrelated to #1" ], "evidence": [ [ [ [ "Zoology-3" ] ], [ [ "Strigoi-5" ] ], [ [ "Strigoi-5", "Zoology-3" ], "operation" ] ], [ [ [ "Zoology-1" ] ], [ [ "Strigoi-1" ] ], [ "operation" ] ], [ [ [ "Zoology-1" ] ], [ [ "Strigoi-1" ] ], [ "operation" ] ] ] }, { "qid": "54713361a75dbc02912c", "term": "Kidney", "description": "internal organ in most animals, including vertebrates and some invertebrates", "question": "Is it safe to eat kidney?", "answer": true, "facts": [ "Traditional British cuisine includes \"Kidney Pie\", or \"Steak and Kidney Pie\".", "Kidney Pie contains kidney." ], "decomposition": [ "What are the various kinds of meat safe for human consumption?", "Is kidney included in #1?" ], "evidence": [ [ [ [ "Offal-95" ] ], [ "operation" ] ], [ [ [ "Meat-1" ] ], [ "no_evidence" ] ], [ [ [ "Meat-17" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "20744bd481b4245333fa", "term": "Gorillaz", "description": "British virtual band", "question": "Has Gorillaz creator been in more bands than Bernard Sumner?", "answer": true, "facts": [ "Gorillaz was created by Damon Albarn.", "Damon Albarn has been in five bands: Gorillaz, Blur, The Good, the Bad & the Queen, Elastica, and DRC Music.", "Bernard Sumner has been in three bands: New Order, Joy Division, and Electronic, Bad Lieutenant." ], "decomposition": [ "Who was the primary creator of Gorillaz?", "How many bands has #1 been a member of?", "How many bands has Bernard Sumner been a member of?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Damon Albarn-1" ] ], [ [ "Damon Albarn-1", "Damon Albarn-3" ] ], [ [ "Bernard Sumner-1", "Bernard Sumner-2", "Bernard Sumner-6" ] ], [ "operation" ] ], [ [ [ "Gorillaz-1" ] ], [ [ "Blur (band)-1", "Gorillaz-1", "Rocket Juice & the Moon-1", "The Good, the Bad & the Queen-1" ] ], [ [ "Bad Lieutenant (band)-1", "Bernard Sumner-4", "Electronic (band)-1", "Joy Division-1" ] ], [ "operation" ] ], [ [ [ "Damon Albarn-1" ] ], [ [ "Damon Albarn-1", "Damon Albarn-3" ] ], [ [ "Bernard Sumner-3", "Bernard Sumner-4", "Bernard Sumner-5", "Bernard Sumner-6" ] ], [ "operation" ] ] ] }, { "qid": "6124699b29b989a90a0c", "term": "Snoopy", "description": "cartoon dog", "question": "Is Jesse W. Moore a potential recipient of a Snoopy-themed award from NASA?", "answer": false, "facts": [ "Snoopy has been a mascot of safety in NASA", "The Silver Snoopy award is given by NASA astronauts to employees and contractors for outstanding achievements related to flight safety ", "Jesse W. Moore received warnings about the failure history of the O rings used on the Challenger shuttle, but did not act on them", "Jesse W. Moore was the associate administrator in charge of NASA's shuttle program at the time of the Challenger explosion", "The O rings were strongly implicated in the fatal explosion" ], "decomposition": [ "What NASA award is Snoopy-themed?", "What are the qualifications to receive #1?", "What were Jesse W. Moore's mission responsibilities at NASA?", "Did Moore execute #3 according to the guidelines of #2?" ], "evidence": [ [ [ [ "Silver Snoopy award-1" ] ], [ [ "Silver Snoopy award-1" ] ], [ [ "Space Shuttle Challenger disaster-76" ] ], [ [ "Space Shuttle Challenger disaster-1" ] ] ], [ [ [ "Silver Snoopy award-1" ] ], [ [ "Silver Snoopy award-1" ] ], [ [ "Space Shuttle Challenger disaster-76" ] ], [ [ "Silver Snoopy award-1", "Space Shuttle Challenger disaster-76" ] ] ], [ [ [ "Silver Snoopy award-1" ] ], [ [ "Silver Snoopy award-8" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "89a3281f33b15c7fba3f", "term": "The Hague", "description": "City and municipality in South Holland, Netherlands", "question": "Does Abdulqawi Yusuf go to the Hague on a typical work day?", "answer": true, "facts": [ "Abdulqawi Yusuf is the current president of the International Court of Justice", "The International Court of Justice is headquartered in The Hague" ], "decomposition": [ "What organization does Abdulqawi Yusuf's work for?", "Where is #1 headquartered?" ], "evidence": [ [ [ [ "Abdulqawi Yusuf-1" ] ], [ [ "International Court of Justice-3" ], "operation" ] ], [ [ [ "Abdulqawi Yusuf-1" ] ], [ [ "International Court of Justice-3" ] ] ], [ [ [ "Abdulqawi Yusuf-1" ] ], [ [ "United Nations-1" ] ] ] ] }, { "qid": "867a5425c26092b30fbf", "term": "Snow White", "description": "fairy tale", "question": "Are Disney's seven dwarves the original ones?", "answer": false, "facts": [ "In the original fairy tale, the dwarves were unnamed, but first named in a 1912 stage version: Blick, Flick, Glick, Snick, Plick, Whick, and Quee.", "In Disney's version, the dwarves are named Happy, Sleepy, Sneezy, Grumpy, Dopey, Bashful, and Doc." ], "decomposition": [ "What were the original names of the seven dwarfs?", "What are the names of the seven dwarfs in Disney films?", "Is #1 identical to #2?" ], "evidence": [ [ [ [ "Seven Dwarfs-6" ] ], [ [ "Snow White and the Seven Dwarfs (1937 film)-7" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Snow White and the Seven Dwarfs (1937 film)-7" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Seven Dwarfs-6" ] ], [ [ "Snow White and the Seven Dwarfs (1937 film)-7" ] ], [ [ "Seven Dwarfs-6" ], "operation" ] ] ] }, { "qid": "956ca4e81d4ec39ab11a", "term": "Guam", "description": "Island territory of the United States of America", "question": "Does Guam have a state capital?", "answer": false, "facts": [ "Guam is not a state.", "Only states can have a state capital." ], "decomposition": [ "Is Guam a country or state?", "Does #1 have state capitals?" ], "evidence": [ [ [ [ "Guam-1" ] ], [ [ "Hagåtña, Guam-1" ] ] ], [ [ [ "Guam-1" ], "no_evidence" ], [ [ "Guam-1" ] ] ], [ [ [ "Guam-1" ] ], [ [ "Hagåtña, Guam-1" ], "operation" ] ] ] }, { "qid": "20ca635da854c332424e", "term": "Heracles", "description": "divine hero in Greek mythology, son of Zeus and Alcmene", "question": "Did Heracles famous labors exceed a baker's dozen?", "answer": false, "facts": [ "Heracles had twelve labors he was tasked with such as cleaning the Augean stables and killing the nine-headed Lernaean Hydra.", "A baker's dozen is a term used in cooking that refers to 13 of anything." ], "decomposition": [ "How many labors was Heracles made to carry out?", "How many is a baker's dozen?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Labours of Hercules-1" ] ], [ [ "Dozen-7" ] ], [ "operation" ] ], [ [ [ "Labours of Hercules-1" ] ], [ [ "Dozen-7" ] ], [ "operation" ] ], [ [ [ "Heracles-21" ] ], [ [ "Dozen-7" ] ], [ "operation" ] ] ] }, { "qid": "da526afb80b0174a19b0", "term": "Honey badger", "description": "species of mammal", "question": "Would a honey badger fit inside an oven?", "answer": true, "facts": [ " Adult honey badgers measure 23 to 28 cm (9.1 to 11.0 in) in shoulder height and 55–77 cm (22–30 in) in body length, with the tail adding another 12–30 cm (4.7–11.8 in).", "Thirty-inch ovens are the standard for most homes and most kitchens. The inside dimensions of the oven are approximately 25 inches wide and 16 inches high. The oven will be approximately 16 inches deep. " ], "decomposition": [ "What is the average dimmension of a honey badger?", "What are the dimensions of an average oven?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Honey badger-12" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Honey badger-12" ], "no_evidence" ], [ [ "Oven-6" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Honey badger-12" ] ], [ [ "Oven-6" ] ], [ "operation" ] ] ] }, { "qid": "7d2ebef168d86d7ea5a2", "term": "Great Wall of China", "description": "wall along the historical northern borders of China", "question": "Could the Great Wall of China connect the Dodgers to the White Sox?", "answer": true, "facts": [ "The Dodgers are a baseball team located in Los Angeles", "The White Sox are a baseball team located in Chicago", "The distance between Los Angeles and Chicago is 2,015 miles", "The length of the main section of the Great Wall of China is 2,145 miles" ], "decomposition": [ "Where are the Dodgers located?", "Where are the White Sox located?", "What is the distance between #1 and #2?", "How long is The Great Wall of China?", "Is #4 larger than #3?" ], "evidence": [ [ [ [ "Los Angeles Dodgers-1" ] ], [ [ "Chicago White Sox-1" ] ], [ [ "Southwest Chief-1" ] ], [ [ "Great Wall of China-3" ] ], [ "operation" ] ], [ [ [ "Los Angeles Dodgers-1" ] ], [ [ "Chicago White Sox-1" ] ], [ "no_evidence" ], [ [ "Great Wall of China-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Los Angeles Dodgers-1" ] ], [ [ "Chicago White Sox-1" ] ], [ [ "Southwest Chief-1" ] ], [ [ "Ming Great Wall-1" ] ], [ "operation" ] ] ] }, { "qid": "2be20073ad0ad58502eb", "term": "Halloween", "description": "Holiday celebrated October 31", "question": "If a baby was born on Halloween would they be a Scorpio?", "answer": true, "facts": [ "Halloween is a holiday where people dress up and happens on October 31 each year.", "The zodiac sign of Scorpio encompasses the dates from October 23 to November 22." ], "decomposition": [ "On what date does Halloween occur each year?", "What dates are included in the Zodiac sign of Scorpio?", "Does #1 fall in the date span listed in #2?" ], "evidence": [ [ [ [ "Halloween-1" ] ], [ [ "Scorpio (astrology)-1" ] ], [ "operation" ] ], [ [ [ "Halloween-1" ] ], [ [ "Scorpio (astrology)-1" ] ], [ "operation" ] ], [ [ [ "Halloween-12" ], "no_evidence" ], [ [ "Zodiac-31" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "b8df6ac181c4c5b280f9", "term": "Bottlenose dolphin", "description": "genus of dolphin", "question": "Can bottlenose dolphins hypothetically outbreed human women?", "answer": false, "facts": [ "Bottlenose dolphins have a gestation period of 12 months.", "Human women have a gestation period around 9 months." ], "decomposition": [ "What is the gestation period of bottlenose dolphins?", "What is the gestation period of humans?", "Is #1 lower than #2?" ], "evidence": [ [ [ [ "Bottlenose dolphin-42" ] ], [ [ "Gestation-5" ] ], [ "operation" ] ], [ [ [ "Bottlenose dolphin-42" ] ], [ [ "Human-55" ] ], [ "operation" ] ], [ [ [ "Bottlenose dolphin-42" ] ], [ [ "Pregnancy-1" ] ], [ "operation" ] ] ] }, { "qid": "d258695ba910ec875522", "term": "Johnny Carson", "description": "American talk show host and comedian", "question": "Did Johnny Carson win enough Emmy's to fill a carton if Emmy's were eggs?", "answer": false, "facts": [ "There are 12 eggs in a carton.", "Johnny Carson won 6 Emmys.", "6 is less than 12." ], "decomposition": [ "How many eggs can fit in a standard egg carton?", "How many Emmy Awards did Johnny Carson win?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Egg carton-8" ] ], [ [ "Johnny Carson-1" ] ], [ "operation" ] ], [ [ [ "Egg carton-8" ] ], [ [ "Johnny Carson-1" ] ], [ "operation" ] ], [ [ [ "Egg as food-11" ] ], [ [ "Johnny Carson-1" ] ], [ "operation" ] ] ] }, { "qid": "55a0154c4a5dd692c046", "term": "Mercedes-Benz", "description": "automobile brand of Daimler AG", "question": "Was Mercedes-Benz associated with the Nazis?", "answer": true, "facts": [ "During the 1930s, Mercedes-Benz produced the 770 model.", "The 770 was popular with Nazis, and Adolf Hitler used them as his personal vehicle." ], "decomposition": [ "Which Mercedes-Benz model was made during the 1930s?", "Was #1 popular among the Nazis?" ], "evidence": [ [ [ [ "Mercedes-Benz-6" ] ], [ "operation" ] ], [ [ [ "Mercedes-Benz 770-1" ] ], [ [ "Mercedes-Benz 770-1" ] ] ], [ [ [ "Mercedes-Benz 770-6" ] ], [ [ "Mercedes-Benz-6" ], "operation" ] ] ] }, { "qid": "fab710522fe2f188579d", "term": "Buzz Aldrin", "description": "American astronaut; second person to walk on the Moon", "question": "Could Buzz Aldrin have owned a computer?", "answer": true, "facts": [ "Buzz Aldrin was born in 1930 and is still alive in 2020. ", "Home computers were first available for sale in 1977. " ], "decomposition": [ "When were personal computers made available to the public?", "When was Buzz Aldrin born?", "Is #2 well before #1?" ], "evidence": [ [ [ [ "Personal computer-10" ] ], [ [ "Buzz Aldrin-1" ] ], [ [ "Buzz Aldrin-1" ], "operation" ] ], [ [ [ "Personal computer-15" ] ], [ [ "Buzz Aldrin-1" ] ], [ "operation" ] ], [ [ [ "Personal computer-7" ] ], [ [ "Buzz Aldrin-1" ] ], [ "operation" ] ] ] }, { "qid": "15271da5457a404509c4", "term": "Force", "description": "Any action that tends to maintain or alter the motion of an object", "question": "Can a cheetah generate enough force to topple Big Show?", "answer": true, "facts": [ "Big Show is a professional wrestler that weighs 383 pounds.", "Force is equal to mass times acceleration.", "An adult Cheetah weighs around 160 pounds.", "An adult Cheetah can run up to 58 MPH." ], "decomposition": [ "How much does Big Show weigh?", "How much does a cheetah weigh?", "How fast can a cheetah run?", "Is the force produced by a mass of #2 and a speed of #3 enough to knock over something that weighs #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Cheetah-1" ] ], [ [ "Cheetah-1" ] ], [ "operation" ] ], [ [ [ "Big Show-4" ], "no_evidence" ], [ [ "Cheetah-1" ] ], [ [ "Cheetah-1" ] ], [ [ "Acceleration-13" ], "operation" ] ], [ [ [ "Big Show-4" ], "no_evidence" ], [ [ "Cheetah-1" ] ], [ [ "Cheetah-15" ] ], [ "operation" ] ] ] }, { "qid": "4752f67ef92a9195ee36", "term": "Elijah", "description": "Biblical prophet", "question": "Is Elijah part of a Jewish holiday?", "answer": true, "facts": [ "The Jewish holiday Passover involves a traditional ceremonial dinner.", "During the ceremony, it is customary to fill an extra cup with wine and put it at the center of the table.", "The door is then opened so the prophet Elijah can visit." ], "decomposition": [ "How is Elijah venerated according to Jewish custom?", "Does #1 include venerating Elijah at a holiday?" ], "evidence": [ [ [ [ "Elijah-49" ], "no_evidence" ], [ [ "Elijah-43", "Elijah-44" ], "operation" ] ], [ [ [ "Elijah-49" ] ], [ "no_evidence" ] ], [ [ [ "Passover Seder-62" ] ], [ "operation" ] ] ] }, { "qid": "0e7927ed413f7b3a0037", "term": "J. Edgar Hoover", "description": "American law enforcement officer and first director of the FBI", "question": "Did J. Edgar Hoover take his calls in Langley, Virginia?", "answer": false, "facts": [ "J. Edgar Hoover was the director of the FBI", "The FBI is headquartered in Washington, D.C.", "Langley, Virginia is the headquarters of the C.I.A." ], "decomposition": [ "What government agency was J. Edgar Hoover the head of?", "Where are the headquarters of #1?", "Is #2 in Langley, Virginia?" ], "evidence": [ [ [ [ "J. Edgar Hoover-1" ] ], [ [ "Federal Bureau of Investigation-4" ] ], [ [ "Federal Bureau of Investigation-4" ], "operation" ] ], [ [ [ "J. Edgar Hoover-1" ] ], [ [ "J. Edgar Hoover Building-1" ] ], [ [ "J. Edgar Hoover Building-1" ] ] ], [ [ [ "J. Edgar Hoover-1" ] ], [ [ "Federal Bureau of Investigation-4" ] ], [ "operation" ] ] ] }, { "qid": "8ad6558cabf3efcc631c", "term": "Kangaroo", "description": "сommon name of family of marsupials", "question": "Does a kangaroo incubate its offspring?", "answer": false, "facts": [ "Incubation is the process of hatching offspring from eggs", "Kangaroos are mammals", "Mammals give birth to live offspring" ], "decomposition": [ "Incubation is required for what method of embryonic development?", "What infraclass do kangaroos belong to?", "What method of embryonic development do #2 employ?", "Is #1 the same as #3?" ], "evidence": [ [ [ [ "Egg incubation-1" ] ], [ [ "Kangaroo-1" ] ], [ [ "Marsupial-15" ] ], [ "operation" ] ], [ [ [ "Incubator (culture)-1" ] ], [ [ "Kangaroo-1" ] ], [ [ "Mammalian reproduction-3" ] ], [ "operation" ] ], [ [ [ "Egg incubation-1" ] ], [ [ "Marsupial-1" ] ], [ [ "Kangaroo-34", "Mammalian reproduction-11" ] ], [ "operation" ] ] ] }, { "qid": "24418db1d18a7fdf79f3", "term": "Basil", "description": "species of plant", "question": "Is basil safe from Hypervitaminosis D?", "answer": true, "facts": [ "Hypervitaminosis D is a rare disease caused by having too much vitamin D.", "Basil contains many vitamins including Vitamin A, B, C, E, and K." ], "decomposition": [ "Hypervitaminosis D is caused by eating too much of what vitamin?", "Does basil contain #1?", "Is it safe to avoid #1 by eating #2?" ], "evidence": [ [ [ [ "Hypervitaminosis D-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Hypervitaminosis D-1" ] ], [ [ "Vitamin D-52" ] ], [ "operation" ] ], [ [ [ "Hypervitaminosis D-1" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "833413ca3a67a6d1c572", "term": "Garfield", "description": "Comic strip created by Jim Davis", "question": "Is Garfield known for hating italian cuisine?", "answer": false, "facts": [ "Garfield is well known for loving lasagna.", "Lasagna is a traditional Italian dish." ], "decomposition": [ "What food is Garfield known for loving?", "What country does #1 come from?", "Is #2 where Italian cuisine comes from?" ], "evidence": [ [ [ [ "Garfield (character)-2" ] ], [ [ "Lasagne-4" ] ], [ [ "Lasagne-2" ] ] ], [ [ [ "Garfield-2" ] ], [ [ "Lasagne-3" ] ], [ [ "Italian cuisine-1" ] ] ], [ [ [ "Garfield (character)-1" ] ], [ [ "Garfield (character)-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "ee284d0d4524904d39cb", "term": "Northern Mariana Islands", "description": "American-dependent insular area in the western Pacific", "question": "Is Mark Cuban able to visit Northern Mariana Islands without a passport?", "answer": true, "facts": [ "Citizens of the United States can visit Northern Mariana Islands without a passport.", "Mark Cuban is an American citizen." ], "decomposition": [ "Citizens of what countries can visit the Northern Mariana Islands without a passport?", "What country is Mark Cuban a citizen of?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Northern Mariana Islands-1" ], "no_evidence" ], [ [ "Brian Cuban-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Northern Mariana Islands-50" ], "no_evidence" ], [ [ "Mark Cuban-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Northern Mariana Islands-1" ] ], [ [ "Mark Cuban-1" ] ], [ "operation" ] ] ] }, { "qid": "e263728bd940c5c71bdf", "term": "Hydropower", "description": "energy derived from falling or running water", "question": "Is chaff produced by hydropower?", "answer": true, "facts": [ "Chaff is excess material from milled grain.", "Some mills use hydropower to mill grain." ], "decomposition": [ "Where does Chaff come from?", "Do some #1's use hydropower to do it's function?" ], "evidence": [ [ [ [ "Chaff-1" ] ], [ [ "Winnowing (sedimentology)-1" ] ] ], [ [ [ "Chaff-1" ] ], [ [ "Hydropower-1" ], "operation" ] ], [ [ [ "Chaff-5" ] ], [ [ "Hydropower-1" ], "operation" ] ] ] }, { "qid": "77f814c0e9766c9cdb4d", "term": "Apollo 13", "description": "A failed crewed mission to land on the Moon", "question": "Were deaths from Apollo 13 mission eclipsed by other space missions?", "answer": true, "facts": [ "Apollo 13 was a failed Moon landing in which the crew had faulty oxygen resources.", "All of the Apollo 13 crew returned safely to earth.", "The Challenger was a space shuttle that malfunctioned, killing all seven passengers.", "The Space Shuttle Columbia disaster had seven casualties." ], "decomposition": [ "How many astronauts died on the Apollo 13 mission?", "How many astronauts died during the launch of the Challenger space shuttle?", "How many astronauts died aboard the Columbia space shuttle?", "Are #2 and #3 both greater than #1?" ], "evidence": [ [ [ [ "Apollo 13-3" ] ], [ [ "Space Shuttle Challenger disaster-1" ] ], [ [ "Space Shuttle Columbia disaster-1" ] ], [ "operation" ] ], [ [ [ "Apollo 13-55" ] ], [ [ "Space Shuttle Challenger disaster-1" ] ], [ [ "Space Shuttle Columbia disaster-1" ] ], [ "operation" ] ], [ [ [ "Apollo 13-52" ] ], [ [ "Space Shuttle Challenger-1" ] ], [ [ "Space Shuttle Columbia-24" ] ], [ "operation" ] ] ] }, { "qid": "b52d3a7244abcf58ac29", "term": "Final Fantasy VI", "description": "1994 video game", "question": "Is Final Fantasy VI closer to beginning than end of its franchise?", "answer": true, "facts": [ "Final Fantasy VI is the sixth entry into the Final Fantasy series of video games.", "There are 15 total games in the main Final Fantasy series of video games as of 2020." ], "decomposition": [ "How many releases have been made in the Final Fantasy franchise?", "Final Fantasy VI comes in what position in the series?", "What is the absolute difference between #1 and the cardinal value of #2?", "What is the absolute difference between 1 and the cardinal value of #2?", "Is #3 greater than #4?" ], "evidence": [ [ [ [ "Final Fantasy-9" ] ], [ "operation" ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Final Fantasy-1" ] ], [ [ "Final Fantasy-6" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Final Fantasy-1" ] ], [ [ "Final Fantasy VI-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "11f0ae8346b66bc1d948", "term": "Secretary", "description": "occupation", "question": "Is Tange Sazen hypothetically an ideal choice for a secretary job?", "answer": false, "facts": [ "Secretaries are required to type and also read copious amounts of notes.", "Tange Sazen is a one-eyed, one-armed swordsman in Japanese literature." ], "decomposition": [ "What physical characteristics is Tange Sazen known to have?", "What type of skill is secretary supposed to have?", "Would it be easy to do #2 when having #1?" ], "evidence": [ [ [ [ "Tange Sazen-1" ] ], [ [ "Secretary-3" ] ], [ [ "Secretary-3", "Tange Sazen-1" ] ] ], [ [ [ "Tange Sazen-1" ] ], [ [ "Secretary-3" ] ], [ "operation" ] ], [ [ [ "Tange Sazen-1" ] ], [ [ "Secretary-13" ] ], [ "operation" ] ] ] }, { "qid": "61f96e271bbedf0148c0", "term": "Paulo Coelho", "description": "Brazilian lyricist and novelist", "question": "Does Paulo Coelho's wife make a living through speech?", "answer": false, "facts": [ "Paulo Coelho's wife is Christina Oiticica.", "Christina Oiticica is a Brazilian artist.", "Artists make a living through drawing things, which is done by their hands.", "Speech is typically performed with one's mouth." ], "decomposition": [ "Who is Paulo Coelho's wife?", "What does #1 do for a living?", "What body part does she use to do #2?", "What body part do singers use to produce their craft?", "Is #3 the same as #4?" ], "evidence": [ [ [ [ "Paulo Coelho-6" ] ], [ [ "Christina Oiticica-2" ] ], [ [ "Christina Oiticica-3" ] ], [ [ "Origin of speech-14" ] ], [ "operation" ] ], [ [ [ "Paulo Coelho-6" ] ], [ [ "Christina Oiticica-1" ] ], [ [ "Painting-1" ] ], [ [ "Singing-1" ] ], [ "operation" ] ], [ [ [ "Paulo Coelho-6" ] ], [ [ "Christina Oiticica-4" ] ], [ [ "Christina Oiticica-3" ] ], [ [ "Singing-4" ] ], [ "operation" ] ] ] }, { "qid": "a16ec88f41867d97f60a", "term": "Charles Manson", "description": "American criminal, cult leader", "question": "Has Don King killed more people than Charles Manson did with his own hands in 1971?", "answer": true, "facts": [ "Charles Manson is famous for a series of murders in 1971.", "Charles Manson's cult was responsible for seven deaths in 1971 but he was not present during the murders.", "Boxing promoter Don King has been charged with killing two people in incidents 13 years apart and settled out of court.." ], "decomposition": [ "How many people did Charles Manson actually kill?", "Don King has been charged with killing how many people?", "Is #2 larger than #1?" ], "evidence": [ [ [ [ "Charles Manson-1" ] ], [ [ "Don King (boxing promoter)-3" ] ], [ [ "Charles Manson-1", "Don King (boxing promoter)-3" ], "operation" ] ], [ [ [ "Charles Manson-1" ] ], [ [ "Don King (boxing promoter)-1" ] ], [ "operation" ] ], [ [ [ "Charles Manson-1" ], "no_evidence" ], [ [ "Don King (boxing promoter)-3" ] ], [ "operation" ] ] ] }, { "qid": "3f09ce8b018df4527e20", "term": "Sea turtle", "description": "superfamily of reptiles", "question": "Are sea turtles enjoying life during quarantine?", "answer": true, "facts": [ "Sea turtles nest on beaches", "Quarantine due to COVID has resulted in far fewer people using beaches", "More sea turtles have been able to nest and reproduce on beaches during quarantine" ], "decomposition": [ "What elements comprise \"enjoying life\" for a sea turtle?", "Where do the elements in #1 occur?", "How prevalent were humans in the areas in #2 pre-Covid-19?", "In the areas in #2, are humans less prevalent now than in #3?" ], "evidence": [ [ [ [ "Sea turtle-11" ], "no_evidence" ], [ [ "Sea turtle-45" ] ], [ [ "Sea turtle-13" ] ], [ "operation" ] ], [ [ [ "Sea turtle-14" ], "no_evidence" ], [ [ "Sea turtle migration-6" ], "no_evidence" ], [ "no_evidence" ], [ [ "Sea turtle-13" ], "no_evidence", "operation" ] ], [ [ [ "Green sea turtle-30" ] ], [ [ "Sea turtle-12" ] ], [ [ "Tourism-47" ], "no_evidence" ], [ [ "Sea turtle-13" ], "operation" ] ] ] }, { "qid": "f5daea01f2eab51d3ceb", "term": "Big Ben", "description": "Bell within the clock tower at the Palace of Westminster in London, England", "question": "Can a blind person tell time by Big Ben?", "answer": true, "facts": [ "Big Ben is a striking clock", "A blind person can hear the time the bell tolls" ], "decomposition": [ "What type of clock is Big Ben?", "How does #1 indicate a new hour?", "Can a blind person hear #2?" ], "evidence": [ [ [ [ "Big Ben-1" ] ], [ [ "Striking clock-8" ] ], [ [ "Striking clock-9" ] ] ], [ [ [ "Big Ben-1" ] ], [ [ "Big Ben-47" ] ], [ "operation" ] ], [ [ [ "Big Ben-1" ] ], [ [ "Big Ben-50" ] ], [ "operation" ] ] ] }, { "qid": "1eb8a833f2f5f1698b52", "term": "Tom and Jerry", "description": "Hanna Barbera cartoon series", "question": "Are Tom and Jerry featured in a ride at Disneyland?", "answer": false, "facts": [ "Tom and Jerry were created by Hanna-Barbera and are currently owned by Warner Bros. and Turner Entertainment", "Disneyland is a theme park which features characters owned by the Walt Disney Company" ], "decomposition": [ "What company made Tom and Jerry?", "What company owns Disneyland?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Tom and Jerry-2" ] ], [ [ "Disneyland-4" ] ], [ "operation" ] ], [ [ [ "Tom and Jerry-1" ] ], [ [ "The Walt Disney Company-2" ] ], [ "operation" ] ], [ [ [ "Metro-Goldwyn-Mayer-35" ] ], [ [ "Disneyland Resort-1" ] ], [ "operation" ] ] ] }, { "qid": "3176e76e4fa2af4e62b0", "term": "Emulator", "description": "system that emulates a real system such that the behavior closely resembles the behavior of the real system", "question": "Would downloading Mario 64 on an emulator be legal?", "answer": false, "facts": [ "Mario 64 is licenced to Nintendo Entertainment.", "Nintendo holds a copyright on all of their Mario games." ], "decomposition": [ "What company is Mario 64 licensed to?", "What does #1 hold on Mario 64?", "Is it legal to download Mario 64 if it is #2?" ], "evidence": [ [ [ [ "Super Mario 64-1" ] ], [ [ "Copyright-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Super Mario 64-1" ] ], [ [ "Super Mario 64-21" ], "no_evidence" ], [ [ "Copyright infringement-4" ], "no_evidence", "operation" ] ], [ [ [ "Super Mario 64-2" ] ], [ [ "Super Mario 64-2" ] ], [ [ "Video game console emulator-5" ] ] ] ] }, { "qid": "1fe4d0d2911c40065b89", "term": "Johnny Carson", "description": "American talk show host and comedian", "question": "Could Johnny Carson's children fill out a water polo team?", "answer": false, "facts": [ "Johnny Carson had 3 children.", "Water polo teams consist of 7 players." ], "decomposition": [ "How many children does Johnny Carson have?", "How many people are needed to fill out a water polo team?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Johnny Carson-55" ] ], [ [ "Water polo-1" ] ], [ "operation" ] ], [ [ [ "Johnny Carson-55" ] ], [ [ "Water polo-1" ] ], [ "operation" ] ], [ [ [ "Johnny Carson-55" ] ], [ [ "Water polo-9" ] ], [ "operation" ] ] ] }, { "qid": "a7ede6dfa4790db0364c", "term": "Fear", "description": "Basic emotion induced by a perceived threat", "question": "Could someone in a coma experience fear?", "answer": false, "facts": [ "Fear is induced when an individual feels threatened by something or someone.", "A person in a coma is unconscious and therefore cannot perceive their surroundings." ], "decomposition": [ "What kind of feeling is fear?", "Can a person in a coma experience #1?" ], "evidence": [ [ [ [ "Fear-1" ] ], [ [ "Coma-18" ], "no_evidence", "operation" ] ], [ [ [ "Emotion-54" ] ], [ [ "Coma-1" ] ] ], [ [ [ "Fear-1" ] ], [ [ "Coma-1" ] ] ] ] }, { "qid": "4649ef4a279369c83a9f", "term": "Alan Rickman", "description": "British actor", "question": "Do many fans of J.K Rowling know who Alan Rickman is?", "answer": true, "facts": [ "J.K Rowling wrote the Harry Potter series.", "Alan Rickman performed the role of Severus Snape throughout all 8 Harry Potter films." ], "decomposition": [ "What is JK Rowling most famous for?", "What characters has actor Alan Rickman played?", "What characters appear in #1?", "Is at least one character from #2 also listed in #3?" ], "evidence": [ [ [ [ "J. K. Rowling-1" ] ], [ [ "Alan Rickman-2" ] ], [ [ "Harry Potter (film series)-30" ], "no_evidence" ], [ "operation" ] ], [ [ [ "J. K. Rowling-1" ] ], [ [ "Alan Rickman-2" ] ], [ [ "Severus Snape-3" ] ], [ "operation" ] ], [ [ [ "J. K. Rowling-1" ] ], [ [ "Alan Rickman-15" ] ], [ [ "Severus Snape-47" ] ], [ "operation" ] ] ] }, { "qid": "9b205f7f5667ab195540", "term": "Month", "description": "unit of time", "question": "Are months based on the solar cycle?", "answer": false, "facts": [ "The solar cycle is measured by the year: the length of one orbit, as well as by day: the length of one of Earth's rotation about its axis.", "However, months are based on the lunar cycle, how the shadow of the Earth on the moon causes it to appear to grow and shrink and change shape over the course of four weeks." ], "decomposition": [ "What units of time depend on solar cycles?", "Is months one of #1?" ], "evidence": [ [ [ [ "Solar cycle-1" ] ], [ "operation" ] ], [ [ [ "Solar cycle-32" ], "no_evidence" ], [ [ "Month-17", "Solar cycle-32" ], "operation" ] ], [ [ [ "Solar cycle-3" ] ], [ "operation" ] ] ] }, { "qid": "cc266a3e29a3b8442b14", "term": "Billionaire", "description": "person who has a net worth of at least one billion (1,000,000,000) units of a given currency", "question": "Would a 900,000 pound net worth person be an American billionaire if they exchange currency June 2020?", "answer": true, "facts": [ "The exchange rate in June of 2020 between dollars and pounds is 1 Euro= 1.23 dollar.", "900000 pounds is equal to about 1,107,000.00" ], "decomposition": [ "What is the minimum amount one must have to be called a billionaire?", "As of June 2020, how many dollars make a pound?", "Is #2 times 900000 at least equal to #1?" ], "evidence": [ [ [ [ "Billionaire-1" ] ], [ [ "Pound sterling-62" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Billionaire-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Billionaire-1" ] ], [ [ "Pound sterling-62" ] ], [ "operation" ] ] ] }, { "qid": "6d4b524b0f555ce5a7ae", "term": "Missionary", "description": "member of a religious group sent into an area to do evangelism", "question": "Is there a popular Broadway character who is a missionary?", "answer": true, "facts": [ "The Book of Mormon is a popular Broadway musical.", "The two main characters are Elder Price and Elder Cunningham.", "They are Mormon missionaries sent to Africa to share their religion with the villagers." ], "decomposition": [ "Who are the two main characters in the popular Broadway musical \"Book of Mormon\"?", "Are #1 missionaries?" ], "evidence": [ [ [ [ "The Book of Mormon (musical)-3" ] ], [ "operation" ] ], [ [ [ "The Book of Mormon (musical)-3" ] ], [ "operation" ] ], [ [ [ "The Book of Mormon (musical)-32" ] ], [ [ "The Book of Mormon (musical)-3" ] ] ] ] }, { "qid": "13f14adc86f66c7d620a", "term": "Reiki", "description": "Pseudoscientific healing technique", "question": "Would somebody leave reiki with bruises?", "answer": false, "facts": [ "Bruises are caused by blunt trauma to the body.", "Reiki is performed without touching the recipient. " ], "decomposition": [ "What are the processes involved in Reiki?", "Does any of #1 involve physical contact with the body?" ], "evidence": [ [ [ [ "Reiki-1" ] ], [ [ "Reiki-15" ] ] ], [ [ [ "Reiki-1" ] ], [ [ "Reiki-1" ] ] ], [ [ [ "Reiki-1" ] ], [ "operation" ] ] ] }, { "qid": "d216d1e93117da3934e2", "term": "Tower of London", "description": "A historic castle on the north bank of the River Thames in central London", "question": "Would Robert Stack have been interested in Tower of London during 1400s for his 14 season show?", "answer": true, "facts": [ "Robert Stack was an actor best known for Unsolved Mysteries which lasted for 14 seasons before being rebooted by Netflix.", "The Tower of London is a historic building in London.", "Unsolved Mysteries explored unexplained phenomenon and mysterious events.", "The heirs of Edward IV mysteriously vanished from the Tower of London in the 1400s and were presumed muredered." ], "decomposition": [ "What is the defining feature of Robert Stack's 14 season show?", "What events happened at the Tower of London in the 1400's?", "Do any of the events in #2 have the characteristic in #1?" ], "evidence": [ [ [ [ "Robert Stack-35" ] ], [ [ "Tower of London-2", "Tower of London-3" ] ], [ "operation" ] ], [ [ [ "Robert Stack-1" ] ], [ [ "Tower of London-34" ] ], [ "operation" ] ], [ [ [ "Unsolved Mysteries-1" ] ], [ [ "Princes in the Tower-2" ] ], [ "operation" ] ] ] }, { "qid": "c5423254bd666acc8a9c", "term": "Almond", "description": "Species of plant", "question": "Would a stool be useful for a Lusotitan to reach the top of an almond tree?", "answer": false, "facts": [ "Almond trees can grow up to 10 meters high", "The Lusotitan adult was over 10 meters high" ], "decomposition": [ "How tall would a typical almond tree grow to be?", "How tall was the typical Lusotitan adult?", "Is #1 larger than #2?" ], "evidence": [ [ [ [ "Almond-3" ] ], [ [ "Lusotitan-4" ], "no_evidence" ], [ [ "Brachiosaurus-3" ], "no_evidence", "operation" ] ], [ [ [ "Almond-3" ] ], [ [ "Lusotitan-4" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Almond-3" ] ], [ [ "Lusotitan-4" ] ], [ "operation" ] ] ] }, { "qid": "b742c50188a483291945", "term": "Lifeboat (rescue)", "description": "boat rescue craft which is used to attend a vessel in distress", "question": "Can a lifeboat rescue people in the Hooke Sea?", "answer": false, "facts": [ "Lifeboats are used on bodies of liquid water", "The Hooke Sea is a geographical feature on the surface of Mars", "There are no bodies of liquid water on Mars" ], "decomposition": [ "What environment are lifeboats used in?", "Where is the Hooke Sea?", "Is there #1 on #2?" ], "evidence": [ [ [ [ "Lifeboat (rescue)-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Lifeboat (shipboard)-3" ], "no_evidence" ], [ [ "Hooke (Martian crater)-1", "Hooke (Martian crater)-2" ] ], [ "operation" ] ], [ [ [ "Lifeboat (shipboard)-1" ] ], [ [ "Hooke (Martian crater)-1" ] ], [ "operation" ] ] ] }, { "qid": "f8e6087cd52c925fedad", "term": "Tony Bennett", "description": "American singer", "question": "Could ancient Tony Bennett have a baby in 2020?", "answer": true, "facts": [ "Tony Bennett is a legendary singer who will turn 94 years old in August 2020.", "Ramjit Raghav, the oldest man to have a baby, had his first child at age 94.", "Ramjit Raghav had his second child at age 96." ], "decomposition": [ "How old was Tony Bennett in 2020?", "How old was the oldest man to father a child?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Tony Bennett-1" ] ], [ [ "Ramjit Raghav-1" ] ], [ "operation" ] ], [ [ [ "Tony Bennett-1" ] ], [ [ "Ramjit Raghav-1" ] ], [ "operation" ] ], [ [ [ "Tony Bennett-1" ] ], [ [ "Ramjit Raghav-4" ] ], [ "operation" ] ] ] }, { "qid": "5090f573b09ac3050824", "term": "Eagle", "description": "large carnivore bird", "question": "Are eagles and young bears both used as labels for skills-training youth groups?", "answer": true, "facts": [ "A young bear is a cub.", "Boy Scouts is a skill-training youth group that includes divisions such as Cub Scouts and Eagle Scouts" ], "decomposition": [ "What is the name of a young bear?", "What is a popular skill training group for boys? ", "Are #1 and eagles names for groups in #2?" ], "evidence": [ [ [ [ "Bear-27" ] ], [ [ "Boy Scouts of America-2" ] ], [ [ "Boy Scouts of America-22", "Boy Scouts of America-26" ], "operation" ] ], [ [ [ "Bear-37" ] ], [ [ "Boy Scouts of America-1", "Scouting-1" ] ], [ [ "Cub Scout-1", "Eagle Scout (Boy Scouts of America)-1" ] ] ], [ [ [ "Bear-27" ] ], [ [ "Scout (Scouting)-1" ] ], [ [ "Eagle Scout (Boy Scouts of America)-1", "Scout (Scouting)-5" ], "operation" ] ] ] }, { "qid": "3ebb62af96f836d2d4d6", "term": "Nerd", "description": "Descriptive term, often used pejoratively, indicating that a person is overly intellectual, obsessive, or socially impaired", "question": "Do movies always show nerds as the losers?", "answer": false, "facts": [ "Superbad features two main characters that are nerds on a quest for love, and ends with them being victorious.", "The Social Network is a film about a University Nerd who took his website global and became one of the most powerful people in the world." ], "decomposition": [ "How did the quests of the nerds in Superbad turn out at the end?", "How did the project of the nerd in Social Network turn out at the end?", "Was #1 or #2 a negative outcome for the nerds?" ], "evidence": [ [ [ [ "Superbad (film)-3", "Superbad (film)-7" ] ], [ [ "Facebook-3", "The Social Network-1" ] ], [ [ "Facebook-3", "Superbad (film)-7" ] ] ], [ [ [ "Superbad (film)-7" ] ], [ [ "The Social Network-7" ] ], [ "operation" ] ], [ [ [ "Superbad (film)-7" ] ], [ [ "The Social Network-6" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "673fb9299daae81cba69", "term": "HIV", "description": "Human retrovirus, cause of AIDS", "question": "Is it safe to share silverware with an HIV positive person?", "answer": true, "facts": [ "HIV is transmitted through blood and mucous membrane contact, not saliva.", "Silverware is used in the mouth and contacts saliva but not other bodily fluids. " ], "decomposition": [ "How is HIV transmitted?", "What comes in contact with silverware when you use it?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "HIV-1" ] ], [ [ "Household silver-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "HIV-1" ] ], [ [ "Eating utensil etiquette-5" ] ], [ [ "Management of HIV/AIDS-63" ], "operation" ] ], [ [ [ "HIV-1" ] ], [ [ "Cutlery-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "8cce11a0fc856f4c4539", "term": "Brussels sprout", "description": "vegetable", "question": "Would many meals heavy in brussels sprouts benefit someone on Coumadin?", "answer": false, "facts": [ "Brussels sprouts are high in vitamin K", "Eating lots of foods high in vitamin K is potentially harmful to people taking anticoagulants ", "Coumadin is an anticoagulant " ], "decomposition": [ "In which class of drugs is Coumadin in?", "Which nutrients are people taking #1 cautioned against getting too much of?", "Are Brussels sprouts low in #2?" ], "evidence": [ [ [ [ "Warfarin-1" ] ], [ [ "Vitamin K-19" ] ], [ [ "Brussels sprout-12" ] ] ], [ [ [ "Warfarin-1" ] ], [ [ "Warfarin-11" ], "no_evidence" ], [ [ "Cruciferous vegetables-1" ], "operation" ] ], [ [ [ "Warfarin-1" ] ], [ [ "Warfarin-2" ] ], [ [ "Vitamin K-33", "Warfarin-2" ] ] ] ] }, { "qid": "24aa6050c1ed0405e663", "term": "Baptism", "description": "Christian rite of admission and adoption, almost invariably with the use of water", "question": "Can Immersion Baptism lead to a death like Jeff Buckley's?", "answer": true, "facts": [ "Immersion Baptism is the practice of submerging people underwater for a religious ritual.", "Jeff Buckley was an acclaimed singer that died of drowning in 1997.", "A baby in Moldova died from Immersion Baptism in 2010." ], "decomposition": [ "How did Jeff Buckley die?", "How is immersion baptism performed?", "Are the circumstances surrounding #1 similar to that of #2?" ], "evidence": [ [ [ [ "Jeff Buckley-3" ] ], [ [ "Immersion baptism-1" ] ], [ "operation" ] ], [ [ [ "Jeff Buckley-36" ] ], [ [ "Immersion baptism-4" ] ], [ [ "Immersion baptism-4" ], "operation" ] ], [ [ [ "Jeff Buckley-3" ] ], [ [ "Immersion baptism-2" ] ], [ [ "Drowning-1", "Swimming-1" ], "operation" ] ] ] }, { "qid": "db1a2626dae69bdecb34", "term": "Fiat Chrysler Automobiles", "description": "Multinational automotive manufacturing conglomerate", "question": "Is Fiat Chrysler associated with Japanese cars?", "answer": false, "facts": [ "Fiat Chrysler is composed of the two merged automobile companies Fiat and Chrysler.", "Fiat is an Italian company with headquarters in Amsterdam.", "Chrysler is based in the United States of America.", "Together they own 10 car brands but none are Asian in origin." ], "decomposition": [ "Which companies merged to form Fiat Chrysler?", "Is any of #1 based in Japan", "Which cars have been produced by Fiat Chrysler?", "Is any of #3 Japanese in origin?", "Is #2 or #4 positive?" ], "evidence": [ [ [ [ "Fiat Chrysler Automobiles-1" ] ], [ [ "Chrysler-1", "Fiat S.p.A.-1" ], "operation" ], [ [ "Alfa Romeo 4C-12", "Fiat Chrysler Automobiles-27" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Fiat Chrysler Automobiles-1" ] ], [ "operation" ], [ [ "Kid Brands-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Fiat S.p.A.-1" ] ], [ [ "Fiat Chrysler Automobiles-1" ] ], [ [ "Fiat S.p.A.-3" ] ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "bbdc84df46a82a583e06", "term": "The Great Gatsby", "description": "1925 novel by F. Scott Fitzgerald", "question": "When Hugh Jackman was a teacher, would he have taught The Great Gatsby?", "answer": false, "facts": [ "The Great Gatsby is often taught in high school English classes. ", "Hugh Jackman worked as a school gym teacher before he was an actor." ], "decomposition": [ "What classes did Hugh Jackman teach?", "In what classes is The Great Gatsby taught?", "Are any of the classes listed in #1 also listed in #2?" ], "evidence": [ [ [ [ "Hugh Jackman-4" ], "no_evidence" ], [ [ "The Great Gatsby-23" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Hugh Jackman-4" ] ], [ [ "The Great Gatsby-1", "The Great Gatsby-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Hugh Jackman-4" ] ], [ [ "English studies-1" ] ], [ "operation" ] ] ] }, { "qid": "8f3bbafdb649f391fea3", "term": "Arnold Schwarzenegger", "description": "Austrian-American actor, businessman, bodybuilder and politician", "question": "Would Arnold Schwarzenegger be unable to run for President of the US?", "answer": true, "facts": [ "Arnold Schwarzenegger was born in Austria.", "One requirement of running for the US Presidency is to have been born in the USA." ], "decomposition": [ "Where was Arnold Schwarzenegger born?", "Is #1 in the US?" ], "evidence": [ [ [ [ "Arnold Schwarzenegger-11" ] ], [ "operation" ] ], [ [ [ "Arnold Schwarzenegger-6" ] ], [ [ "Thal, Styria-1" ], "operation" ] ], [ [ [ "Arnold Schwarzenegger-6" ] ], [ [ "Styria-1" ] ] ] ] }, { "qid": "30ffdbc264ee1084a9b5", "term": "Capsaicin", "description": "chemical compound", "question": "Is capsaicin associated with cooking?", "answer": true, "facts": [ "Capsaicin occurs naturally in chilies and other peppers.", "It is the substance that makes the peppers spicy.", "Peppers are routinely used in cooking in many cultures." ], "decomposition": [ "What is capsaicin a highly active component of?", "Is #1 used in cooking? " ], "evidence": [ [ [ [ "Capsaicin-1" ] ], [ [ "Chili pepper-1" ] ] ], [ [ [ "Capsaicin-1" ] ], [ [ "Chili pepper-1" ] ] ], [ [ [ "Capsaicin-2" ] ], [ [ "Capsicum-1" ] ] ] ] }, { "qid": "6b86b4445e7fa97f52c6", "term": "Bartender", "description": "person who serves usually alcoholic beverages behind the bar in a licensed establishment", "question": "Does a person need a college degree to become a bartender?", "answer": false, "facts": [ "College degrees require at least 2 years of study to obtain.", "Bartender training generally takes 40 hours." ], "decomposition": [ "How much hours of training does it take to become a bartender?", "How many years does it take to get the lowest college degree?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Bartending school-2" ] ], [ [ "Associate degree-1" ] ], [ [ "Year-57" ], "operation" ] ], [ [ [ "Bartender-12" ] ], [ [ "Associate degree-17" ] ], [ "operation" ] ], [ [ [ "Bartender-12" ], "no_evidence" ], [ [ "Associate degree-1", "Bachelor's degree-131" ] ], [ "operation" ] ] ] }, { "qid": "042642af049331a87b87", "term": "German Shepherd", "description": "Dog breed", "question": "Would a German Shepherd be welcome in an airport?", "answer": true, "facts": [ "Airports in the US must be compliant with the ADA allowing for service dogs as medical equipment.", "Police security often use dogs like German Shepherds to search for drugs at airports.", "Some airlines have special travel accommodations for dogs." ], "decomposition": [ "What measures are used to provide security in airports?", "Which of #1 involve the use of animals?", "Are German Shepherds used as #2?" ], "evidence": [ [ [ [ "Airport security-2" ] ], [ [ "Detection dog-1" ] ], [ [ "German Shepherd-2" ], "operation" ] ], [ [ [ "Airport security-7" ] ], [ [ "Airport security-7" ] ], [ "operation" ] ], [ [ [ "Airport security-7" ] ], [ [ "Police dog-1" ] ], [ [ "Police dog-11" ] ] ] ] }, { "qid": "d402074eec03cd7ea06e", "term": "Underworld", "description": "The mythic Relm of the Dead, located far underground (aka, Hades; Underworld)", "question": "Would Hades and Osiris hypothetically compete for real estate in the Underworld?", "answer": true, "facts": [ "Hades was the Greek god of death and the Underworld.", "Osiris was the Egyptian god of the Underworld." ], "decomposition": [ "What was Hades the God of?", "What was Osiris the God of?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Hades-1" ] ], [ [ "Osiris-1" ] ], [ "operation" ] ], [ [ [ "Hades-1" ] ], [ [ "Osiris-1" ] ], [ "operation" ] ], [ [ [ "Hades-1" ] ], [ [ "Osiris-1" ] ], [ "operation" ] ] ] }, { "qid": "3df705e3e1ff592c8149", "term": "Mathematician", "description": "person with an extensive knowledge of mathematics", "question": "Would Hodor hypothetically be a good math mathematician?", "answer": false, "facts": [ "Mathematicians are expert students of mathematics.", "Hodor was a dimwitted giant of a man that served House Stark in Game of Thrones.", "Hodor worked in the stables and could only utter the only word he ever said was his own name.", "Mathematicians frequently publish articles on theories and need to be able to read and write." ], "decomposition": [ "How proficient is Hodor at reading/writing and general intelligence?", "What skills would be required to be good at math?", "Could #1 satisfy #2?" ], "evidence": [ [ [ [ "Hodor (disambiguation)-1" ], "no_evidence" ], [ [ "Mathematician-1", "Mathematician-12" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Mathematics-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Mathematics-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "45c4f7aad64b5fdd76e9", "term": "Moustache", "description": "Facial hair grown on the upper lip", "question": "Would a Rockette look odd with a moustache? ", "answer": true, "facts": [ "The Rockettes are an American dance group made up of all women.", "In America, it is uncommon to see a woman with a moustache." ], "decomposition": [ "What groups of people make up the Rockettes?", "Would #1 look odd with a moustache?" ], "evidence": [ [ [ [ "The Rockettes-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Rockettes-5" ] ], [ [ "Facial hair-5" ] ] ], [ [ [ "The Rockettes-1" ], "no_evidence" ], [ [ "Facial hair-5" ] ] ] ] }, { "qid": "15885efe6f91724c16d9", "term": "Audi", "description": "Automotive manufacturing subsidiary of Volkswagen Group", "question": "Is sound barrier too much for Audi R8 V-10 Plus to break?", "answer": true, "facts": [ "Audi R8 V-10 Plus is the fastest car produced by Audi.", "The Audi R8 V-10 Plus has a top speed of 205 MPH.", "To break the sound barrier, a jet must reach a speed of 770 MPH." ], "decomposition": [ "What is the top speed of an Audi R8 V-10 Plus?", "What speed must be obtained to break the sound barrier?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Audi R8 (Type 4S)-11" ] ], [ [ "Sound barrier-2" ] ], [ [ "Audi R8 (Type 4S)-11", "Speed of sound-1" ] ] ], [ [ [ "Audi S8-5" ], "no_evidence" ], [ [ "Sound barrier-11" ] ], [ "operation" ] ], [ [ [ "Audi R8-1" ], "no_evidence" ], [ [ "Speed of sound-1" ] ], [ "operation" ] ] ] }, { "qid": "b110458494fa12444ff5", "term": "Kanji", "description": "adopted logographic Chinese characters used in the modern Japanese writing system", "question": "Can printing books in kanji instead of the Roman alphabet save trees?", "answer": true, "facts": [ "The Kanji writing system uses less space to communicate ideas than an alphabet system.", "Trees are killed to make pages for books." ], "decomposition": [ "What type of writing system is kanji?", "Does #1 take up less space than an alphabet?" ], "evidence": [ [ [ [ "Logogram-1" ] ], [ [ "Logogram-2" ], "operation" ] ], [ [ [ "Kanji-1" ] ], [ [ "Logogram-1" ], "no_evidence" ] ], [ [ [ "Kanji-1" ] ], [ [ "Kanji-49", "Kanji-80" ], "no_evidence", "operation" ] ] ] }, { "qid": "f2859b2ce17b5f5a6ad9", "term": "Chinchilla", "description": "Rodent genus", "question": "Is a Chinchilla breed of felis catus a type of rodent?", "answer": false, "facts": [ "A Chinchilla is a rodent native to the Andes mountains.", "Felis catus is the scientific name for a cat.", "The Chinchilla breed of cats is named for its plush coat which shares similarities to the Chinchilla.", "The Chinchilla cat is really a variant of the Persian breed of cats." ], "decomposition": [ "Which species are named felis catus?", "What is the most notable feature of the Chinchilla?", "Are Chinchilla breed of #1 so named because they have #2?", "Given that #3 is positive, does that make #1 rodents?" ], "evidence": [ [ [ [ "Cat-1" ] ], [ [ "Chinchilla-2" ] ], [ [ "Persian cat-28" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Cat-1" ] ], [ [ "Chinchilla-2" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Cat-13" ] ], [ [ "Chinchilla-10" ], "no_evidence" ], [ [ "Persian cat-28", "Rodent-1" ], "operation" ], [ "operation" ] ] ] }, { "qid": "27b84ef72c557aa8fc14", "term": "Watermelon", "description": "A large fruit with a smooth hard rind, of the gourd family", "question": "Is watermelon safe for people with a tricarboxylic acid allergy?", "answer": true, "facts": [ "Tricarboxylic acid as an acid that manifests itself in fruits as citric acid.", "Citric acid can be found in citrus fruits such as oranges and lemon.", "Watermelon is not a citrus fruit." ], "decomposition": [ "What is the most common example of a tricarboxylic acid?", "Which kind of fruits is #1 usually present in?", "Is watermelon excluded from #2?" ], "evidence": [ [ [ [ "Tricarboxylic acid-1" ] ], [ [ "Citric acid-1" ] ], [ [ "Citrus-1", "Watermelon-2" ], "operation" ] ], [ [ [ "Tricarboxylic acid-2" ] ], [ [ "Citric acid-14" ] ], [ [ "Citric acid-14", "Watermelon-8" ], "operation" ] ], [ [ [ "Tricarboxylic acid-1" ] ], [ [ "Citric acid-14" ] ], [ "operation" ] ] ] }, { "qid": "0431a3fc727855c8ad83", "term": "Dalai Lama", "description": "Tibetan Buddhist spiritual teacher", "question": "Can the Dalai Lama fit in a car?", "answer": true, "facts": [ "The Dalai Lama is a person.", "Cars are designed for people to sit in them." ], "decomposition": [ "What type of being is the Dalai Lama?", "Who are cars designed for?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Dalai Lama-1" ] ], [ [ "Car controls-23" ] ], [ "operation" ] ], [ [ [ "Dalai Lama-1" ] ], [ [ "Car-42" ] ], [ "operation" ] ], [ [ [ "Dalai Lama-1", "Person-1" ] ], [ [ "Car-1" ] ], [ "operation" ] ] ] }, { "qid": "02eb6aacd72d1b93a83a", "term": "Vulcan (mythology)", "description": "Ancient Roman god of fire, volcanoes, and metalworking", "question": "Does the Roman god Vulcan have a Greek equivalent?", "answer": true, "facts": [ "Vulcan is the Roman god of fire and metalworking.", "Hephaestus is the Greek god of fire and metalworking.", "They are the same mythological figure, one of many characters the Romans borrowed from the Greeks and changed their names." ], "decomposition": [ "What is the Roman god Vulcan god of?", "Is there a god of #1 in Greek mythology?" ], "evidence": [ [ [ [ "Vulcan (mythology)-17" ] ], [ [ "Helios-13" ] ] ], [ [ [ "Vulcan (mythology)-1" ] ], [ [ "Vulcan (mythology)-1" ] ] ], [ [ [ "Vulcan (mythology)-17" ] ], [ [ "Hephaestus-1" ] ] ] ] }, { "qid": "e1f93419cb9a2f1d06ca", "term": "Christians", "description": "people who adhere to Christianity", "question": "Does Hammurabi's Code violate Christians Golden Rule?", "answer": true, "facts": [ "The Golden Rule of Christianity states to do unto others as you would want them to do to you.", "Hammurabi's Code states an eye for an eye and a tooth for a tooth." ], "decomposition": [ "What is the golden rule in Christianity? ", "What does the Code of Hammurabi state?", "Is #1 the same meaning as #2?" ], "evidence": [ [ [ [ "Golden Rule-1", "Golden Rule-20" ] ], [ [ "Code of Hammurabi-15", "Shofetim (parsha)-26" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Golden Rule-1" ] ], [ [ "Code of Hammurabi-1" ] ], [ [ "Eye for an eye-1" ], "operation" ] ], [ [ [ "Golden Rule-21" ] ], [ [ "Code of Hammurabi-2" ] ], [ "operation" ] ] ] }, { "qid": "8ab0f34f407608dbd2b3", "term": "Chicago \"L\"", "description": "rapid transit system in Chicago, Illinois, operated by the CTA", "question": "Would the fastest tortoise win a race against a Chicago \"L\"?", "answer": false, "facts": [ "Top speed of Chicago \"L\" is 55 mph (89 km/h).", "The Guinness Book of World Records maintains the record for fastest tortoise: the tortoise ran at an average speed of 0.63 miles per hour." ], "decomposition": [ "What is the top speed of a Chicago \"L\"?", "What is the top speed of a tortoise?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Chicago \"L\"-37" ], "no_evidence" ], [ [ "Turtle-5" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Chicago \"L\"-58" ], "no_evidence" ], [ [ "Turtle racing-6" ] ], [ "operation" ] ] ] }, { "qid": "80d50b4511767c25aa28", "term": "Unicode", "description": "Character encoding standard", "question": "Did Malcolm X use Unicode?", "answer": false, "facts": [ "Malcolm X died in 1965. ", "Unicode did not become a standard until 1991. " ], "decomposition": [ "When did Malcolm X die?", "When was Unicode established?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Malcolm X-96" ] ], [ [ "Unicode-12" ] ], [ [ "Malcolm X-96", "Unicode-12" ], "operation" ] ], [ [ [ "Malcolm X-1" ] ], [ [ "Unicode-18" ] ], [ "operation" ] ], [ [ [ "Malcolm X-1" ] ], [ [ "Unicode-12" ] ], [ "operation" ] ] ] }, { "qid": "784d37fe54b0ad49c49a", "term": "Jukebox musical", "description": "stage or film musical compiled from pre-existing songs", "question": "Is there a jukebox musical about a sweet transvestite from Transexual, Transylvania?", "answer": false, "facts": [ "Jukebox musicals feature songs that have already been released.", "Rocky Horror Picture Show is about a sweet transvestite from Transexual, Transylvania", "Rocky Horror Picture Show contains songs written specifically for itself" ], "decomposition": [ "What is characteristic of songs in a jukebox musical?", "What musical is about a sweet transvestite from Transexual, Transylvania?", "Does #2 contain #1?" ], "evidence": [ [ [ [ "Jukebox musical-6" ] ], [ [ "The Rocky Horror Picture Show-5" ] ], [ "operation" ] ], [ [ [ "Jukebox musical-1" ] ], [ [ "The Rocky Horror Show-1" ] ], [ "operation" ] ], [ [ [ "Jukebox musical-1" ] ], [ [ "The Rocky Horror Picture Show-5" ] ], [ [ "The Rocky Horror Picture Show-31" ], "no_evidence", "operation" ] ] ] }, { "qid": "8eecf2ff459ef5a9b98a", "term": "Cactus", "description": "Family of mostly succulent plants, adapted to dry environments", "question": "Would an aerodynamic cactus benefit from more frequently closed stomata?", "answer": true, "facts": [ "Cactus spines help the plant retain water by reducing air flow around the plant", "Aerodynamic objects have smooth surfaces ", "Crassulacean acid metabolism is used by cactuses ", "Crassulacean acid metabolism is when a plant's stomata stay closed during daylight or times of drought to prevent water loss" ], "decomposition": [ "What helps cacti conserve water?", "Of #1, what methods do not involve protrusions that might restrict air flow?", "Are closed stoma one of #2?" ], "evidence": [ [ [ [ "Cactus-28", "Cactus-29" ] ], [ [ "Cactus-34" ] ], [ "operation" ] ], [ [ [ "Cactus-1" ] ], [ [ "Cactus-35" ] ], [ "operation" ] ], [ [ [ "Cactus-13", "Cactus-14" ], "no_evidence" ], [ [ "Cactus-13", "Cactus-14" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d15a33b8f88dc5ce32b0", "term": "Drummer", "description": "percussionist who creates and accompanies music using drums", "question": "Do drummers need spare strings?", "answer": false, "facts": [ "Drummers usually work with guitarists or other stringed instrumentalists.", "However, drum sets do not contain strings.", "Musicians usually change their own strings as necessary, so it is their band mates, not the drummer, who carries the spare strings." ], "decomposition": [ "What instruments require strings in order to be played?", "What instrument do drummers play?", "Is there any overlap between #2 and #1?" ], "evidence": [ [ [ [ "String instrument-1" ] ], [ [ "Drum-1" ] ], [ "operation" ] ], [ [ [ "String instrument-4" ] ], [ [ "Drum-1" ] ], [ [ "Drum-1" ], "operation" ] ], [ [ [ "String instrument-4" ] ], [ [ "Drummer-2" ] ], [ "operation" ] ] ] }, { "qid": "a0eb63776f7720ec0bcc", "term": "Parsifal", "description": "opera in three acts by Richard Wagner", "question": "Was the subject of Parsifal taken from British folklore?", "answer": true, "facts": [ "Parsifal was loosely based on a poem about Percival", "Percival was a Knight of the Round Table", "King Arthur and the Knights of the Round Table were products of British folklore" ], "decomposition": [ "What was the opera 'Parsifal' based on?", "Who is the main character in #1?", "Which group is #2 part of?", "Did #3 originate from British folklore?" ], "evidence": [ [ [ [ "Parsifal-1" ] ], [ [ "Parzival-1" ] ], [ [ "Percival-1" ] ], [ [ "Knights of the Round Table-1" ] ] ], [ [ [ "Parsifal-1" ] ], [ [ "Percival-1" ] ], [ [ "Knights of the Round Table-1" ] ], [ "operation" ] ], [ [ [ "Parsifal-5" ] ], [ [ "Parsifal-34" ] ], [ [ "Parsifal-33" ] ], [ [ "Knight-4" ] ] ] ] }, { "qid": "d06b92172f2854fa1f62", "term": "Parsley", "description": "species of plant, herb", "question": "Is it normal to find parsley in multiple sections of the grocery store?", "answer": true, "facts": [ "Parsley is available in both fresh and dry forms.", "Fresh parsley must be kept cool.", "Dry parsley is a shelf stable product." ], "decomposition": [ "What forms of parsley are consumed?", "What areas of the grocery store carry each of #1?", "Does #2 include places separate from one another?" ], "evidence": [ [ [ [ "Parsley-13", "Parsley-15" ] ], [ [ "Produce-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Parsley-18", "Parsley-3" ] ], [ [ "Supermarket-3" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Parsley-16", "Parsley-18" ], "no_evidence" ], [ [ "Canning-1", "Produce-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "c864d8999a8e4cce870b", "term": "Guam", "description": "Island territory of the United States of America", "question": "Was Raphael's paintings influenced by the country of Guam?", "answer": false, "facts": [ "Raphael was a European painter that died in 1520.", "Magellan's 1521 voyage was Europe's first trip to Guam." ], "decomposition": [ "When did the painter Raphael die?", "When did Europeans first visit Guam?", "Was #1 after #2?" ], "evidence": [ [ [ [ "Raphael-38" ] ], [ [ "Guam-11" ] ], [ [ "Guam-11", "Raphael-38" ], "operation" ] ], [ [ [ "Raphael-1" ] ], [ [ "Guam-11" ] ], [ "operation" ] ], [ [ [ "Raphael-1" ] ], [ [ "Guam-3" ] ], [ "operation" ] ] ] }, { "qid": "72af0dabec64573d498f", "term": "Dosa", "description": "Thin pancakes originating from South India", "question": "Would someone on a keto diet be able to eat Dosa?", "answer": false, "facts": [ "Dosa's main ingredients are rice and black gram,", "The ketogenic diet is a high-fat, adequate-protein, low-carbohydrate diet.", "Rice is high in carbohydrates." ], "decomposition": [ "Which food nutrients are minimally consumed in a keto diet?", "Which food nutrients does Dosa promarily contain?", "Is #2 excluded from #1?" ], "evidence": [ [ [ [ "Ketogenic diet-1" ] ], [ [ "Dosa-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Ketogenic diet-1" ] ], [ [ "Dosa-1" ] ], [ "operation" ] ], [ [ [ "Ketogenic diet-1" ] ], [ [ "Dosa-1", "Rice-17" ] ], [ "operation" ] ] ] }, { "qid": "626694e8f6691431f311", "term": "Kayak", "description": "small boat propelled with a double-bladed paddle", "question": "Are kayaks used at the summit of Mount Everest?", "answer": true, "facts": [ "Kayaks are used to transport people in water.", "The summit of Mount Everest has no running water." ], "decomposition": [ "What are kayaks typically used on?", "Does the summit of Mount Everest have any #1?" ], "evidence": [ [ [ [ "Kayak-1" ] ], [ [ "Mount Everest-122" ], "operation" ] ], [ [ [ "Whitewater kayaking-1" ] ], [ [ "Dudh Koshi-2" ] ] ], [ [ [ "Kayak-1" ] ], [ [ "Mount Everest-122" ], "no_evidence" ] ] ] }, { "qid": "efd8365a1bcecfab14bc", "term": "Orange County, California", "description": "County in California, United States", "question": "Does Orange County, California require airplanes to be quiet?", "answer": true, "facts": [ "John Wayne Airport is in Orange County.", "John Wayne Airport is in very close proximity to residential areas.", "There is a General Aviation Noise Ordinance in Orange County, California. ", "Commercial pilots will cut the engine of the aircraft on arrival and departure from Orange County, California. " ], "decomposition": [ "Which ordinance must airports within or close to Orange County, California abide by?", "What actions do commercial pilots take concerning their engine noise when arriving or departing Orange County, California?", "Does #1 and #2 require that their airplanes make less noise?" ], "evidence": [ [ [ [ "John Wayne Airport-33" ] ], [ [ "John Wayne Airport-38" ] ], [ "no_evidence" ] ], [ [ [ "Orange County, California-13" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "John Wayne Airport-33" ] ], [ [ "John Wayne Airport-34" ] ], [ "operation" ] ] ] }, { "qid": "404ab9200fe1935e7f4c", "term": "Lullaby", "description": "soothing song, usually sung to young children before they go to sleep", "question": "Is an inappropriate lullaby Love Song from November 11, 2000?", "answer": true, "facts": [ "Lullabies are often simple and repetitive.", "Lullabies are used to soothe young children, usually to go to sleep.", "Love Song was a song released by Marilyn Manson on his November 11, 2000 record.", "Marilyn Manson is a heavy metal band.", "The lyrics to Love Song has curse words and scary imagery." ], "decomposition": [ "Why are lullabies usually sung?", "What were the features of The Love Song by Marilyn Manson?", "Would #2 be unsuitable to achieve #1?" ], "evidence": [ [ [ [ "Lullaby-1" ] ], [ [ "Holy Wood (In the Shadow of the Valley of Death)-1", "Holy Wood (In the Shadow of the Valley of Death)-21" ] ], [ "operation" ] ], [ [ [ "Lullaby-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Lullaby-1" ] ], [ [ "Marilyn Manson-8" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "cf23e1cddf4e7f714ba3", "term": "Drum", "description": "type of musical instrument of the percussion family", "question": "Would a vegan prefer a natural bongo drum over a synthetic one?", "answer": false, "facts": [ "Natural bongo drums are made with leather.", "Synthetic bongo drums are made with plastic or leather substitutes.", "Vegans do not use or consume any animal products." ], "decomposition": [ "Which kind of products would a vegan avoid?", "What are natural Bongo drums made with?", "What are synthetic Bongo drums made with?", "Are #2 included in #1 and #3 excluded?" ], "evidence": [ [ [ [ "Veganism-1" ] ], [ [ "Drumhead-3" ] ], [ [ "Drumhead-5" ] ], [ "operation" ] ], [ [ [ "Veganism-1" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Veganism-1" ] ], [ [ "Bongo drum-6" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "9cfa675eeebfbbdb2487", "term": "Bern", "description": "Place in Switzerland", "question": "Is Bern a poor choice for a xenophobic Swiss citizen to live?", "answer": false, "facts": [ "Xenophobic people do not like people from other countries, such as tourists.", "Bern Switzerland was once described by CNN as being a relatively tourist free area.", "Zurich and Geneva get the most tourist traffic out of any city in Switzerland." ], "decomposition": [ "Who do xenophobic people want to avoid?", "What was Bern Switzerland once described by CNN as?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Xenophobia-1" ] ], [ [ "Bern-39", "Canton of Bern-64" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Xenophobia-1" ] ], [ [ "Bern-39" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Xenophobia-1" ] ], [ [ "Bern-17" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "fc48b6524ca6fc402d9d", "term": "People's Volunteer Army", "description": "Communist Chinese forces during the Korean War", "question": "Could all People's Volunteer Army hypothetically be transported on Symphony of the Seas?", "answer": false, "facts": [ "The People's Volunteer Army had 780,000 troops.", "The cruise ship, Symphony of the Seas, has a capacity of 5,518 people." ], "decomposition": [ "How many people were in the People's Volunteer Army?", "How many people can the Symphony of the Seas carry?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "People's Volunteer Army-1" ] ], [ [ "Symphony of the Seas-4" ] ], [ "operation" ] ], [ [ [ "People's Volunteer Army-1" ] ], [ [ "Symphony of the Seas-4" ] ], [ "operation" ] ], [ [ [ "People's Volunteer Army-1" ], "no_evidence" ], [ [ "Symphony of the Seas-4" ] ], [ "operation" ] ] ] }, { "qid": "03c1714ce4e456d9dfd2", "term": "LinkedIn", "description": "Social networking website for people in professional occupations", "question": "Did Kim Il-sung network on LinkedIn?", "answer": false, "facts": [ "LinkedIn was launched in 2003.", "Kim Il-sung died in 1994." ], "decomposition": [ "When was LinkedIn launched?", "When did Kim Il-Sung die?", "Did #1 happen before #2?" ], "evidence": [ [ [ [ "LinkedIn-1" ] ], [ [ "Kim Il-sung-40" ] ], [ "operation" ] ], [ [ [ "LinkedIn-1" ] ], [ [ "Death and state funeral of Kim Il-sung-1" ] ], [ "operation" ] ], [ [ [ "LinkedIn-1" ] ], [ [ "Kim Il-sung-1" ] ], [ "operation" ] ] ] }, { "qid": "7f139bf51273517efd4e", "term": "Christopher Nolan", "description": "British–American film director, screenwriter, and producer", "question": "Is Christopher Nolan indebted to Bob Kane?", "answer": true, "facts": [ "Christopher Nolan rose to fame in large part because of his trilogy of Batman movies released from 2005 to 2012", "Bob Kane was the original artist and co-creator of Batman" ], "decomposition": [ "Who created the Batman?", "To what films was Christopher Nolan's Hollywood success attributed to from the 2000s to 2010s?", "Did #1 provide the source material to #2?" ], "evidence": [ [ [ [ "Batman-1" ] ], [ [ "Christopher Nolan-16" ] ], [ "operation" ] ], [ [ [ "Batman-1" ] ], [ [ "Christopher Nolan-14" ] ], [ "operation" ] ], [ [ [ "Batman-1" ] ], [ [ "Bruce Wayne (The Dark Knight trilogy)-6" ] ], [ "operation" ] ] ] }, { "qid": "0d2746ffc1870126ee29", "term": "Cane toad", "description": "World's largest toad", "question": "Would the average Hawaiian male experience more days on Earth compared to a wild cane toad?", "answer": true, "facts": [ "Cane toads have a life expectancy of 10 to 15 years in the wild.", "The average life expectancy of a male born in Hawaii is 79.3 years as of 2018." ], "decomposition": [ "What is the average lifespan of an average Hawaiian male?", "What is the average lifespan of a wild cane toad?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Human-58" ], "no_evidence" ], [ [ "Cane toad-10" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Cane toad-10" ] ], [ "operation" ] ], [ [ [ "Life expectancy-12" ], "no_evidence" ], [ [ "Cane toad-10" ] ], [ "operation" ] ] ] }, { "qid": "53271c927076992cfb21", "term": "Suicide", "description": "Intentional act of causing one's own death", "question": "Is slitting your wrists an unreliable suicide method?", "answer": true, "facts": [ "Wrist slitting has only a 6% mortality rate.", "Many people cannot complete the action of slitting their wrists due to pain or shock." ], "decomposition": [ "How often do people survive attempts to commit suicide by wrist-slitting?", "Does #1 indicate a high chance of survival?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Suicide methods-5" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "107c3068ae93cbc78edb", "term": "Johnny Cash", "description": "American singer-songwriter and actor", "question": "Was it typical to see Johnny Cash on stage in a rainbow-colored outfit?", "answer": false, "facts": [ "Johnny Cash regularly performed dressed all in black, wearing a long, black, knee-length coat.", "A rainbow-colored outfit would consist of the colors red, orange, yellow, green, blue, indigo, and violet." ], "decomposition": [ "What colors are in a rainbow?", "What color did Johnny Cash always wear on stage?", "Is #2 in the list of #1?" ], "evidence": [ [ [ [ "Rainbow-15" ] ], [ [ "Johnny Cash-47" ] ], [ "operation" ] ], [ [ [ "Rainbow flag-1", "Rainbow-15" ] ], [ [ "Johnny Cash-47", "Johnny Cash-50" ] ], [ "operation" ] ], [ [ [ "Rainbow-6" ] ], [ [ "Johnny Cash-2" ] ], [ "operation" ] ] ] }, { "qid": "3f59cf2d6b48378dbefe", "term": "Woodrow Wilson", "description": "28th president of the United States", "question": "Was Woodrow Wilson sandwiched between two presidents from the opposing party?", "answer": true, "facts": [ "Woodrow Wilson was a Democratic president and was between Taft and Harding.", "President William Howard Taft was a Repubican.", "President Warren G. Harding was a Republican." ], "decomposition": [ "Who was president before Woodrow Wilson?", "Who was president after Woodrow Wilson?", "What is the party of #1?", "What is the party of #2?", "Are #3 and #4 the same as each other and not the same as Wilson's party?" ], "evidence": [ [ [ [ "William Howard Taft-1" ] ], [ [ "Warren G. Harding-1" ] ], [ [ "William Howard Taft-3" ] ], [ [ "Warren G. Harding-1" ] ], [ [ "Woodrow Wilson-1" ], "operation" ] ], [ [ [ "William Howard Taft-1" ] ], [ [ "Presidency of Warren G. Harding-2" ] ], [ [ "William Howard Taft-1" ] ], [ [ "Warren G. Harding-26" ] ], [ [ "Woodrow Wilson-30" ], "operation" ] ], [ [ [ "Woodrow Wilson-2" ] ], [ [ "Warren G. Harding-1", "Woodrow Wilson-1" ] ], [ [ "William Howard Taft-3" ] ], [ [ "Warren G. Harding-1" ] ], [ "operation" ] ] ] }, { "qid": "d0183768701c74f966c5", "term": "Holy Land", "description": "Term used by Jews, Christians, and Muslims to describe the Land of Israel and Palestine", "question": "Do worshipers of Shiva make a pilgrimage to the Holy Land?", "answer": false, "facts": [ "The Holy Land is sacred to Judaism, Islam and Christianity", "Worshipers of Shiva are adherents of Hinduism" ], "decomposition": [ "Which group of religions have the Holy Land as a pilgrimage destination?", "Which religious group worships Shiva?", "Is #2 the same as any of #1?" ], "evidence": [ [ [ [ "Holy Land-1" ] ], [ [ "Shiva-1" ] ], [ "operation" ] ], [ [ [ "Pilgrimage-23" ] ], [ [ "Shiva-10" ] ], [ "operation" ] ], [ [ [ "Holy Land-4" ], "no_evidence" ], [ [ "Shiva Puja-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "dcb503515e6282a1b30f", "term": "Earth's magnetic field", "description": "Magnetic field that extends from the Earth’s inner core to where it meets the solar wind", "question": "Would a compass attuned to Earth's magnetic field be a bad gift for a Christmas elf??", "answer": true, "facts": [ "Christmas elves work in Santa's workshop on the North Pole", "Magnetic compasses point to the North Pole ", "If you are on the North Pole a compass will not work" ], "decomposition": [ "What do compasses do?", "Where do Christmas elves work?", "Would #1 not be effective if you are already at #2?" ], "evidence": [ [ [ [ "Compass-1" ] ], [ [ "Christmas elf-1" ] ], [ [ "North Magnetic Pole-1" ] ] ], [ [ [ "Compass-1" ] ], [ [ "North Pole-61" ] ], [ "operation" ] ], [ [ [ "Compass-19", "Compass-20", "Compass-7" ] ], [ [ "Christmas elf-1", "Christmas elf-7" ] ], [ [ "Compass-35" ], "operation" ] ] ] }, { "qid": "ef398edbb1efa0d9f33f", "term": "Parc des Princes", "description": "football stadium in Paris, France", "question": "Was the Parc des Princes fully operational during June of 2020?", "answer": false, "facts": [ "June of 2020 was marked by a global pandemic.", "During a global pandemic, large events are not permitted to proceed fully." ], "decomposition": [ "What kind of events are usually held in the Parc des Princes?", "In light of recent developments, are such events as #1 still holding in full capacity as of July, 2020?" ], "evidence": [ [ [ [ "Parc des Princes-1" ] ], [ [ "Coronavirus disease 2019-1", "Social distancing-15" ], "no_evidence", "operation" ] ], [ [ [ "Parc des Princes-1" ] ], [ [ "Impact of the COVID-19 pandemic on sports-23" ], "operation" ] ], [ [ [ "Parc des Princes-1", "Parc des Princes-2" ] ], [ "operation" ] ] ] }, { "qid": "102db3a12d5a45bf7a2a", "term": "RoboCop", "description": "1987 science fiction film directed by Paul Verhoeven", "question": "Is RoboCop director from same country as Gaite Jansen?", "answer": true, "facts": [ "Robocop was directed by Paul Verhoeven.", "Paul Verhoeven was born in Amsterdam, Netherlands.", "Gaite Jansen is an actress known for Jett and Peaky Blinders and was born in Rotterdam, Netherlands." ], "decomposition": [ "Who is the director of the movie RoboCop?", "Where was #1 born?", "Where was Gaite Jansen born?", "Are #2 and #3 the same?" ], "evidence": [ [ [ [ "RoboCop-1" ] ], [ [ "Paul Verhoeven-1", "Paul Verhoeven-4" ] ], [ [ "Gaite Jansen-1" ] ], [ "operation" ] ], [ [ [ "RoboCop-1" ] ], [ [ "Paul Verhoeven-4" ] ], [ [ "Gaite Jansen-1" ] ], [ "operation" ] ], [ [ [ "RoboCop-13" ] ], [ [ "Paul Verhoeven-4" ] ], [ [ "Gaite Jansen-1" ] ], [ "operation" ] ] ] }, { "qid": "cf8c0f4ee24b30530377", "term": "Mercury (element)", "description": "Chemical element with atomic number 80", "question": "Does Mercury make for good Slip N Slide material?", "answer": false, "facts": [ "The Slip N Slide was an outdoor water slide toy.", "Mercury is a thick liquid at room temperature.", "Mercury is poisonous and used to kill hatters that lined their hats with the substance." ], "decomposition": [ "Who are Slip N Slides made for?", "Is Mercury safe for #1 to be around?" ], "evidence": [ [ [ [ "Slip 'N Slide-4" ] ], [ [ "Mercury poisoning-27" ] ] ], [ [ [ "Slip 'N Slide-4" ] ], [ [ "Mercury poisoning-1", "Mercury poisoning-27" ] ] ], [ [ [ "Slip 'N Slide-2" ] ], [ [ "Mercury (element)-3", "Mercury (element)-5" ], "operation" ] ] ] }, { "qid": "6d62de0fed9d8151b413", "term": "Torah", "description": "First five books of the Hebrew Bible", "question": "Does Happy Gilmore Productions CEO own a Torah?", "answer": true, "facts": [ "The CEO of Happy Gilmore Productions is Adam Sandler.", "Adam Sandler's religious beliefs are Judaism. ", "The Torah is the first part of the bible in Judaism." ], "decomposition": [ "Who is the CEO of Happy Gilmore Productions?", "What religion does #1 follow?", "What religion uses the Torah?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Happy Madison Productions-1" ], "no_evidence" ], [ [ "Adam Sandler-26" ] ], [ [ "Torah-1" ] ], [ "operation" ] ], [ [ [ "Happy Madison Productions-1" ], "no_evidence" ], [ [ "Adam Sandler-5" ], "no_evidence" ], [ [ "Torah-1", "Torah-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Jews-1", "Torah-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "4ceb5ba3fb64a5f89a59", "term": "ZIP Code", "description": "numeric postal code used in the United States", "question": "Do most people only memorize slightly over half of their ZIP code?", "answer": true, "facts": [ "ZIP codes in the US are 9 digits in length. ", "Most forms in the US only require and have space for the first 5 digits of a ZIP code?" ], "decomposition": [ "How long are zip codes in the US?", "When forms ask for zip codes, how many spaces do they typically request?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "ZIP Code-1" ] ], [ [ "ZIP Code-10" ], "no_evidence" ], [ "operation" ] ], [ [ [ "ZIP Code-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "ZIP Code-1" ] ], [ [ "ZIP Code-10" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "75b9eafe50ef589e8f78", "term": "Honey badger", "description": "species of mammal", "question": "Would a snake have reasons to fear a honey badger?", "answer": true, "facts": [ "Snakes are considered prey to Honey Badgers.", "Honey Badgers have sharp teeth and are carnivorous. " ], "decomposition": [ "What prey do Honey badgers hunt?", "Are snakes listed in #1?" ], "evidence": [ [ [ [ "Honey badger-21" ] ], [ [ "Honey badger-21" ] ] ], [ [ [ "Honey badger-21" ] ], [ "operation" ] ], [ [ [ "Honey badger-21" ] ], [ "operation" ] ] ] }, { "qid": "1c505cad07ffe97f4e41", "term": "Snowshoe", "description": "Footwear for walking easily across snow", "question": "Has Burger King contributed to a decrease in need for snowshoes?", "answer": true, "facts": [ "Burger king serves beef", "Beef farming is associated with increased global temperatures and decreased snowfall" ], "decomposition": [ "What is the main food item that burger king sells?", "What kind of meat is in #1?", "What does farming for #2 do to the global temperature?", "If #3 occurs, are people less lilely to need snowshoes?" ], "evidence": [ [ [ [ "Burger King-1" ] ], [ [ "Pork-2" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Burger King-1" ] ], [ [ "Hamburger-1" ] ], [ [ "Environmental impact of meat production-19" ], "no_evidence" ], [ [ "Snowshoe-1" ], "operation" ] ], [ [ [ "Burger King-1" ] ], [ [ "Meat-9" ] ], [ [ "Cattle-90" ] ], [ [ "Global warming-3" ] ] ] ] }, { "qid": "0e8b696f5770ad5e3ea7", "term": "Pablo Escobar", "description": "Colombian drug lord (1949–1993)", "question": "Did Pablo Escobar's nickname collection outshine Robert Moses Grove's?", "answer": true, "facts": [ "Robert Moses Grove was a baseball player nicknamed Lefty Grove.", "Pablo Escobar had several nicknames including: Don Pablo, El Padrino, and El Patrón." ], "decomposition": [ "How many nicknames did Pablo Escobar have?", "How many nicknames did Robert Moses Grove have?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Pablo Escobar-28" ] ], [ [ "Lefty Grove-1" ] ], [ [ "Lefty Grove-1", "Pablo Escobar-28" ] ] ], [ [ [ "Pablo Escobar-1", "Pablo Escobar-28" ], "no_evidence" ], [ [ "Lefty Grove-1" ] ], [ "operation" ] ], [ [ [ "Pablo Escobar-28" ], "no_evidence" ], [ [ "Lefty Grove-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "18a2f43c7c4e9dfe85a4", "term": "Cell biology", "description": "Scientific Discipline that Studies Cells", "question": "Does cell biology teach about the life cycle of Al Qaeda?", "answer": false, "facts": [ "Cell biology is a subdiscipline of biology that deals with the structure and function of cells in living organisms", "Al Qaeda is made up of terrorist cells", "Terrorist cells are small groups of terrorists acting semi-independently for the same cause" ], "decomposition": [ "What is the main topic that people learn about in Cell biology?", "What is Al Qaeda made up of?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Cell biology-1" ] ], [ [ "Al-Qaeda-2" ] ], [ "operation" ] ], [ [ [ "Cell biology-1" ] ], [ [ "Al-Qaeda-1" ] ], [ "operation" ] ], [ [ [ "Cell biology-1" ] ], [ [ "Al-Qaeda-2" ] ], [ "operation" ] ] ] }, { "qid": "4de177ae8c827fd0ffb1", "term": "Retail", "description": "Sale of goods and services from individuals or businesses to the end-user", "question": "Is retail a job anybody can be suited for?", "answer": false, "facts": [ "Most retail jobs require employees to be able to lift, push, and pull 25-50 lbs. ", "Retail positions require employees to interact with customers regularly.", "Various disabilities can diminish one's ability to interact with the public." ], "decomposition": [ "What are some basic skills that a person employed in retail should have?", "Would every person, even the disabled, possess all of #1?" ], "evidence": [ [ [ [ "Retail clerk-2" ] ], [ "operation" ] ], [ [ [ "Retail-50" ], "no_evidence" ], [ [ "Disability-3" ], "no_evidence", "operation" ] ], [ [ [ "Retail-3" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "1a8ea71c1644ded6b058", "term": "Godzilla", "description": "Giant monster or kaiju", "question": "Could Godzilla have been killed by the Tohoku earthquake?", "answer": false, "facts": [ "The Tohoku earthquake led to the Fukushima Daiichi nuclear power plant meltdown", "Nuclear meltdowns lead to a release of deadly levels of radiation", "Godzilla draws power from radiation and is not hurt by it" ], "decomposition": [ "What major accident was caused by the Tohoku Earthquake?", "What was released into the environment by #1?", "Does #2 cause harm to Godzilla?" ], "evidence": [ [ [ [ "2011 Tōhoku earthquake and tsunami-90" ] ], [ [ "2011 Tōhoku earthquake and tsunami-90" ] ], [ [ "Godzilla-2" ], "operation" ] ], [ [ [ "2011 Tōhoku earthquake and tsunami-9" ] ], [ [ "Fukushima Daiichi nuclear disaster-3" ] ], [ [ "Godzilla-2" ] ] ], [ [ [ "2011 Tōhoku earthquake and tsunami-9" ] ], [ [ "2011 Tōhoku earthquake and tsunami-93" ] ], [ [ "Godzilla-2" ], "operation" ] ] ] }, { "qid": "603f33161fb1500bb73d", "term": "Great Depression", "description": "20th-century worldwide economic depression", "question": "Can a person be diagnosed with a Great Depression?", "answer": false, "facts": [ "The Great Depression was a severe worldwide economic depression that took place mostly during the 1930s, beginning in the United States.", "Major depressive disorder (MDD), also known simply as depression, is a mental disorder characterized by at least two weeks of low mood that is present across most situations." ], "decomposition": [ "What was the Great Depression?", "What is depression that people suffer from?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Great Depression-1" ] ], [ [ "Minor depressive disorder-2" ] ], [ [ "Great Depression-1", "Minor depressive disorder-2" ], "operation" ] ], [ [ [ "Great Depression-1" ] ], [ [ "Depression (mood)-1" ] ], [ "operation" ] ], [ [ [ "Great Depression-1" ] ], [ [ "Depression (mood)-8", "Major depressive disorder-30" ] ], [ "operation" ] ] ] }, { "qid": "debac3970a4fc11774fd", "term": "Marco Rubio", "description": "United States Senator from Florida", "question": "Could Marco Rubio ride the Candymonium roller coaster at Hershey Park?", "answer": true, "facts": [ "The Candymonium roller coaster is restricted to park visitors over 54\" tall (4'6\").", "Marco Rubio is 5'9\" tall." ], "decomposition": [ "What is the height limit for the Candymonium roller coaster?", "How tall is Marco Rubio?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Candymonium-6" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Candymonium-1" ], "no_evidence" ], [ [ "Marco Antonio Rubio-1", "Marco Rubio-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f4319614425eb9b71fa5", "term": "Gunpowder", "description": "explosive most commonly used as propellant in firearms", "question": "Would an explosion at a gunpowder storage facility result in a supersonic shock wave?", "answer": false, "facts": [ "Gunpowder is classified as a low explosive", "Low explosives burn at subsonic speeds" ], "decomposition": [ "What kind of explosive is gunpowder classifed as?", "Does #1 burn at supersonic rates?" ], "evidence": [ [ [ [ "Gunpowder-3" ] ], [ [ "Gunpowder-3" ] ] ], [ [ [ "Gunpowder-3" ] ], [ "operation" ] ], [ [ [ "Gunpowder-3" ] ], [ "operation" ] ] ] }, { "qid": "2a7eeadb1e045fda4550", "term": "Spinach", "description": "species of plant", "question": "Was the amount of spinach Popeye ate unhealthy?", "answer": true, "facts": [ "Popeye was a cartoon character that ate whole cans of spinach to maintain his fighting strength.", "Spinach is high in oxalates which can lead to kidney stones.", "Too much spinach can lead to bloating, gas, fever, and diarrhea." ], "decomposition": [ "What is spinach high in?", "What does eating too much of #1 do to a body?", "Are #2's bad for a body?" ], "evidence": [ [ [ [ "Spinach-1" ] ], [ [ "Oxalate-10" ] ], [ [ "Kidney stone disease-1", "Oxalate-10" ] ] ], [ [ [ "Spinach-7" ] ], [ [ "Oxalate-10" ] ], [ "operation" ] ], [ [ [ "Spinach-7" ] ], [ [ "Vitamin A-13" ] ], [ [ "Vitamin A-16" ] ] ] ] }, { "qid": "48f842fbfb229d784bd4", "term": "Aladdin", "description": "Middle Eastern folk tale", "question": "Is the voice of the Genie from Disney's Aladdin still alive?", "answer": false, "facts": [ "The Genie was voiced by comedian Robin Williams.", "Robin Williams died in 2014." ], "decomposition": [ "Who provided the voice of the Genie in Aladdin?", "Is #1 still alive?" ], "evidence": [ [ [ [ "Robin Williams-33" ] ], [ [ "Robin Williams-1" ], "operation" ] ], [ [ [ "Aladdin (1992 Disney film)-1", "Robin Williams-33" ] ], [ [ "Robin Williams-60" ] ] ], [ [ [ "Robin Williams-33" ] ], [ [ "Robin Williams-3" ] ] ] ] }, { "qid": "6cbd917db2a8315af17c", "term": "Chlorine", "description": "Chemical element with atomic number 17", "question": "Can you buy chlorine at a dollar store?", "answer": true, "facts": [ "Chlorine, when added to water, creates household bleach.", "Household bleach is available at most dollar stores." ], "decomposition": [ "What type of item is Chlorine?", "Would department would you find #1 in?", "Do dollar stores have #2?" ], "evidence": [ [ [ [ "Chlorine-62" ] ], [ [ "Cleaning agent-16" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Chlorine-64" ] ], [ [ "Department store-2" ] ], [ [ "Dollar Tree-2" ] ] ], [ [ [ "Chlorine-4" ] ], [ [ "Household hardware-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c67945e7c471a9692c88", "term": "Snowy owl", "description": "species of bird", "question": "Could a snowy owl survive in the Sonoran?", "answer": false, "facts": [ "The Snowy owl is a bird native to the Arctic regions of North America.", "Temperatures in the North American Arctic range from -36.4F to 50F.", "The Sonoran is one of the hottest deserts in the world.", "The Sonoran Desert can get as hot as 118F." ], "decomposition": [ "Where are Snowy owls found?", "What is the temperature range of #1?", "How hot does it get in the Sonoran Desert?", "Is #3 within #2?" ], "evidence": [ [ [ [ "Snowy owl-1" ] ], [ [ "Climate of the Arctic-40" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Snowy owl-1" ] ], [ [ "Arctic-4" ] ], [ [ "Sonoran Desert-18" ] ], [ "operation" ] ], [ [ [ "Snowy owl-1" ] ], [ [ "Arctic-4" ] ], [ [ "Sonoran Desert-18" ] ], [ "operation" ] ] ] }, { "qid": "47cb63978585d5292d79", "term": "Transport", "description": "Human-directed movement of things or people between locations", "question": "Can any person with a driver's license work in transport of aviation fuel?", "answer": false, "facts": [ "A vehicle operator must possess a Commercial Driver's License to work in the delivery of fuel.", "The process for getting a CDL is much lengthier than that of getting a Driver's License. " ], "decomposition": [ "What certification is required of a driver to work in transport of aviation fuel?", "Does #1 consist only of a regular driver's license?" ], "evidence": [ [ [ [ "Truck driver-63" ] ], [ "operation" ] ], [ [ [ "Dangerous goods-33" ] ], [ "operation" ] ], [ [ [ "Pilot licensing and certification-2" ] ], [ "operation" ] ] ] }, { "qid": "edb5e626587433a2edc2", "term": "Astronaut", "description": "Person who commands, pilots, or serves as a crew member of a spacecraft", "question": "Has every astronaut survived their space journey?", "answer": false, "facts": [ "In 1986, the space shuttle Challenger exploded during launch, killing all astronauts aboard.", "In 2003, the space shuttle Columbia also exploded, again killing its entire crew.", "Various other space flights have resulted in fatal disasters." ], "decomposition": [ "How many astronauts have died during a mission?", "Is #1 equal to zero?" ], "evidence": [ [ [ [ "Astronaut-59" ] ], [ "operation" ] ], [ [ [ "Astronaut-59" ] ], [ [ "Astronaut-59" ], "operation" ] ], [ [ [ "Astronaut-59" ] ], [ "operation" ] ] ] }, { "qid": "4f286910705ee9e8aceb", "term": "Hypothermia", "description": "A human body core temperature below 35.0°C", "question": "Would hypothermia be a concern for a human wearing zoot suit on Triton?", "answer": true, "facts": [ "A zoot suit was a man's suit of an exaggerated style popular in the 1940s.", "Triton is one of the coldest planets in the solar system.", "Triton is located about 2.8 billion miles from the warmth of the sun.", "Triton has an average temperature of -235.0°C", "A zoot suit is made of thin material such as cloth." ], "decomposition": [ "What is the average temperature on Triton?", "What material are zoot suits made of?", "Below which body temperature will hypothermia set in?", "Would clothes made of #2 be unable to keep body temperature above #3 in ambient temperature of #1?" ], "evidence": [ [ [ [ "Triton (moon)-3" ] ], [ [ "Zoot Suit Riots-2" ] ], [ [ "Hypothermia-1" ] ], [ "operation" ] ], [ [ [ "Triton (moon)-3" ] ], [ [ "Zoot Suit Riots-12", "Zoot Suit Riots-2" ] ], [ [ "Human body temperature-30" ] ], [ "operation" ] ], [ [ [ "Triton (moon)-3" ] ], [ [ "Zoot suit-15" ], "no_evidence" ], [ [ "Hypothermia-1" ] ], [ "operation" ] ] ] }, { "qid": "4c7da1c6a6b94f8c44b1", "term": "Saltwater crocodile", "description": "species of reptile", "question": "Are saltwater crocodiles related to alligators?", "answer": true, "facts": [ "Crocodiles belong to the family Crocodylinae.", "Alligators belong to the family Alligatoridae.", "Crocodylinae and Alligatoridae both belong to the order Crocodilia." ], "decomposition": [ "What family do Crocodiles belong to?", "What family do Alligators belong to?", "What order does #1 belong to?", "What order does #2 belong to?", "Are #3 and #4 the same thing?" ], "evidence": [ [ [ [ "Crocodile-1" ] ], [ [ "Alligator-1" ] ], [ [ "Crocodile-1" ] ], [ [ "Alligator-1" ] ], [ "operation" ] ], [ [ [ "Crocodile-35" ] ], [ [ "American alligator-5" ] ], [ [ "Crocodile-35" ] ], [ [ "Alligator-1" ] ], [ "operation" ] ], [ [ [ "Crocodylidae-1" ] ], [ [ "Alligator-1" ] ], [ [ "Crocodilia-1" ] ], [ [ "Crocodilia-1" ] ], [ "operation" ] ] ] }, { "qid": "3d80646ef0844fac8da5", "term": "Spice Girls", "description": "British girl group", "question": "Could the Spice Girls compete against ŽRK Kumanovo?", "answer": false, "facts": [ "The Spice Girls had 5 members.", "ŽRK Kumanovo is a women's handball club from Kumanovo in the Republic of Macedonia.", "Handball is a sport played by two teams of seven players each." ], "decomposition": [ "How many members did the Spice Girls have?", "What sport does ŽRK Kumanovo compete in?", "How many people are on a team in #2?", "Is #1 greater than or equal to #3?" ], "evidence": [ [ [ [ "Spice Girls-1" ] ], [ [ "ŽRK Kumanovo-1" ] ], [ [ "Handball-1" ] ], [ "operation" ] ], [ [ [ "Spice Girls-1" ] ], [ [ "ŽRK Kumanovo-1" ] ], [ [ "Handball-1" ] ], [ "operation" ] ], [ [ [ "Spice Girls-1" ] ], [ [ "ŽRK Kumanovo-1" ] ], [ [ "Handball-1" ] ], [ "operation" ] ] ] }, { "qid": "715336c5f99281337469", "term": "Amazon (company)", "description": "American electronic commerce and cloud computing company", "question": "Was Amazon involved in the lunar landing?", "answer": false, "facts": [ "The lunar landing occurred in 1969.", "Amazon was founded in 1994." ], "decomposition": [ "When did the lunar landing take place?", "When was the company Amazon founded?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Apollo 11-1" ] ], [ [ "Amazon (company)-3" ] ], [ "operation" ] ], [ [ [ "Moon landing-2" ] ], [ [ "Amazon (company)-3" ] ], [ "operation" ] ], [ [ [ "Apollo 11-1" ] ], [ [ "Amazon (company)-3" ] ], [ "operation" ] ] ] }, { "qid": "e8486011a3183cebe449", "term": "Tahiti", "description": "Largest island of French Polynesia", "question": "Could all Tahiti hotels hypothetically accommodate US D-Day troops?", "answer": false, "facts": [ "Tahiti has 47 hotels with around 3,000 rooms.", "The US D-Day force consisted of: 23,250 on Utah Beach, 34,250 on Omaha Beach, and 15,500 airborne troops." ], "decomposition": [ "How many hotel rooms are there in Tahiti?", "How many people can comfortably share a hotel room?", "What is #1 multiplied by #2?", "How many troops were in the US D-Day force?", "Is #3 greater than or equal to #4?" ], "evidence": [ [ [ [ "Tahiti-75" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ], [ [ "Operation Overlord-1" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "operation" ], [ [ "Normandy landings-39" ] ], [ "operation" ] ], [ [ [ "Tahiti-75" ] ], [ [ "Hotel-42" ], "no_evidence" ], [ "no_evidence", "operation" ], [ [ "Normandy landings-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "96962f0eedff1e72af2c", "term": "Bengal fox", "description": "species of mammal", "question": "Is a bengal fox likely to see the Superbowl?", "answer": false, "facts": [ "The Superbowl is the championship game of the National Football League", "The National Football League is a sports league for American football", "American football enjoys the majority of its popularity in the United States", "The bengal fox is found exclusively on the Indian subcontinent" ], "decomposition": [ "Where is the Super Bowl usually held?", "Where is the Bengal Fox mostly found?", "Are #1 and #2 within the same country?" ], "evidence": [ [ [ [ "National Football League-1", "Super Bowl-1" ] ], [ [ "Bengal fox-1" ] ], [ [ "India-1", "United States-1" ] ] ], [ [ [ "American football-52" ] ], [ [ "Bengal fox-1" ] ], [ "operation" ] ], [ [ [ "Super Bowl-65" ] ], [ [ "Bengal fox-1" ] ], [ "operation" ] ] ] }, { "qid": "f1e48f5dc1662a84942b", "term": "Harvey Milk", "description": "American politician who became a martyr in the gay community", "question": "Did Harvey Milk ever run for governor?", "answer": false, "facts": [ "In 1977 Harvey Milk was elected to the San Francisco Board of Supervisors.", "Less than a year later, he was assassinated before he could run for higher offices." ], "decomposition": [ "What were Harvey Milk's political campaigns?", "Does #1 include a gubernatorial campaign?" ], "evidence": [ [ [ [ "Harvey Milk-1" ] ], [ [ "Harvey Milk-1" ] ] ], [ [ [ "Harvey Milk-2", "Jim Foster (activist)-4" ] ], [ "operation" ] ], [ [ [ "Harvey Milk-1" ] ], [ [ "Governor-4" ], "operation" ] ] ] }, { "qid": "b893d680c3d0b3003e80", "term": "Nine Inch Nails", "description": "American industrial rock band", "question": "Is Nine Inch Nails a good guest for students in earliest grade to take Iowa tests?", "answer": false, "facts": [ "The Iowa test is administered to students in kindergarten through eighth grade.", "Nine Inch Nails is a heavy industrial rock band formed in 1988.", "Nine Inch Nails albums are stamped with the explicit warning label." ], "decomposition": [ "Who were the Nine Inch Nails?", "What are #1's albums rated as?", "What is the age range for students who have to take the Iowa test?", "Is it safe to show kids that are #3 things rated #2?" ], "evidence": [ [ [ [ "Nine Inch Nails-1" ] ], [ [ "Nine Inch Nails-10" ], "no_evidence" ], [ [ "Iowa Tests of Educational Development-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Nine Inch Nails-1" ] ], [ [ "Nine Inch Nails-21", "Parental Advisory-1" ], "no_evidence" ], [ [ "Iowa Assessments-2", "Iowa Assessments-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Nine Inch Nails-1" ] ], [ [ "Nine Inch Nails-17" ], "no_evidence" ], [ [ "Iowa Assessments-2" ] ], [ "operation" ] ] ] }, { "qid": "132f21b2b388b637c22f", "term": "Disco", "description": "music genre", "question": "Did the Beatles write any music in the Disco genre?", "answer": false, "facts": [ "The Beatles were active from 1960 until 1969.", "Disco began to appear around 1972." ], "decomposition": [ "When were the Beatles active as a full group?", "When did disco start?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Break-up of the Beatles-1", "The Beatles-1" ] ], [ [ "Disco-1" ] ], [ "operation" ] ], [ [ [ "The Beatles-1" ] ], [ [ "Disco-1" ] ], [ "operation" ] ], [ [ [ "The Beatles-1", "The Beatles-3" ] ], [ [ "Disco-1" ] ], [ "operation" ] ] ] }, { "qid": "0e47bee0304fb6a65643", "term": "Coca", "description": "group of plant varieties cultivated for coca production", "question": "Are leaves from coca good for gaining weight?", "answer": false, "facts": [ "People who want to gain weight seek to increase caloric intake ", "Coca leaf contains chemicals that suppress hunger and thirst" ], "decomposition": [ "What kinds of foods do people who want to gain weight look for?", "Are coca leaves #1?" ], "evidence": [ [ [ [ "Weight gain-2" ], "no_evidence" ], [ [ "Coca-4", "Stimulant-21" ], "no_evidence", "operation" ] ], [ [ [ "Weight gain-8" ], "no_evidence" ], [ [ "Coca-4", "Cocaine-43" ], "operation" ] ], [ [ [ "Weight gain-2" ] ], [ [ "Coca-18" ] ] ] ] }, { "qid": "d0cf00dae29efafa0865", "term": "Funeral", "description": "ceremony for a person who has died", "question": "Do embalmed bodies feel different at funerals?", "answer": true, "facts": [ "Embalming fluid fixates into the bodily tissues and replaces the bodily fluid.", "Bodies that have not been embalmed tend to feel soft.", "When embalming fluid fills the body, the body becomes firm." ], "decomposition": [ "What does Embalming a body do to it?", "Does #1 make a body hard?", "What does a non embalmed body feel like", "Is #2 different from #3?" ], "evidence": [ [ [ [ "Embalming-4" ] ], [ "operation" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Embalming-37" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Embalming-1" ] ], [ [ "Embalming chemicals-14" ] ], [ [ "Natural burial-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "eb257b257001eb384cf3", "term": "Stone Cold Steve Austin", "description": "American professional wrestler", "question": "Did Stone Cold Steve Austin wrestle in three different centuries?", "answer": false, "facts": [ "A century is a period of 100 years.", "Stone Cold Steve Austin made his wrestling debut on September 30, 1989.", "Stone Cold Steve Austin retired on March 30, 2003.", "The 20th (twentieth) century was a century that began on January 1, 1901 and ended on December 31, 2000.", "The 21st century began on January 1, 2001, and will end on December 31, 2100." ], "decomposition": [ "When did Stone Cold Steve Austin start wrestling?", "When did Stone Cold Steve Austin stop wrestling?", "In what century is #1?", "In what century is #2?", "Is #4 minus #3 greater than 1?" ], "evidence": [ [ [ [ "Stone Cold Steve Austin-6" ] ], [ [ "Stone Cold Steve Austin-45" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Stone Cold Steve Austin-6" ] ], [ [ "Stone Cold Steve Austin-4" ] ], [ [ "20th century-2" ] ], [ [ "21st century-1" ] ], [ "operation" ] ], [ [ [ "Stone Cold Steve Austin-6" ] ], [ [ "Stone Cold Steve Austin-41" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "703db4629ff46d82a205", "term": "Kane (wrestler)", "description": "American professional wrestler, actor, businessman, and politician", "question": "Can Kane challenge Joe Biden in this year's primaries?", "answer": false, "facts": [ "Kane is a member of the Republican Party", "Joe Biden is a member of the Democratic Party", "Primaries are conducted between members of the same political party" ], "decomposition": [ "Primaries are held within what?", "What #1 does Joe Biden belong to?", "What #1 does Kane belong to?", "Are #2 and #3 the same?" ], "evidence": [ [ [ [ "Primary election-1" ] ], [ [ "Joe Biden-1" ] ], [ [ "Kane (wrestler)-1" ] ], [ "operation" ] ], [ [ [ "Primary election-5" ] ], [ [ "Joe Biden-1" ] ], [ [ "Kane (wrestler)-1" ] ], [ "operation" ] ], [ [ [ "Primary election-1" ] ], [ [ "Joe Biden-1" ] ], [ [ "Kane (wrestler)-1" ] ], [ "operation" ] ] ] }, { "qid": "2e1482cb04970fe7c449", "term": "Richard III of England", "description": "15th-century King of England", "question": "Did Richard III know his grandson?", "answer": false, "facts": [ "Richard III died in battle at age 32.", "He had only one son, who died during childhood, and therefore had no grandchildren.", "Even if he did have grandchildren, he would have been dead long before they were born based on his age at death." ], "decomposition": [ "Did Richard III have any grandchildren?" ], "evidence": [ [ [ [ "Richard III of England-71" ], "no_evidence" ] ], [ [ [ "Richard III of England-43" ], "operation" ] ], [ [ [ "Richard III of England-40" ], "operation" ] ] ] }, { "qid": "68ed0e7f870fb8e2b0e0", "term": "Ringo Starr", "description": "British musician, drummer of the Beatles", "question": "Has Ringo Starr been in a relatively large number of bands?", "answer": false, "facts": [ "RIngo Starr has been in three bands besides the Beatles.", "Mike Patton, lead singer of Faith No More, has been in at least 12 bands.", "Dave Grohl, lead singer of the Foo Fighters, has played in over 10 bands." ], "decomposition": [ "How many bands has Ringo Starr been part of?", "How many bands has Mike Patton been part of?", "How many bands has Dave Grohl been part of?", "Is #1 larger than #2 or #3?" ], "evidence": [ [ [ [ "Plastic Ono Band-2", "Ringo Starr & His All-Starr Band-1", "Rory Storm-1", "The Beatles-1" ] ], [ [ "Mike Patton-1" ] ], [ [ "Dave Grohl-1", "Scream (band)-5", "Teenage Time Killers-1" ] ], [ "operation" ] ], [ [ [ "Ringo Starr-13" ] ], [ [ "Mike Patton-18" ] ], [ [ "Dave Grohl-28" ] ], [ "operation" ] ], [ [ [ "Ringo Starr-3" ], "no_evidence" ], [ [ "Mike Patton-1" ] ], [ [ "Dave Grohl-1" ] ], [ "operation" ] ] ] }, { "qid": "60e2540a5e07213eeca1", "term": "Pancake", "description": "Thin, round cake made of eggs, milk and flour", "question": "Are pancakes typically prepared in a pot?", "answer": false, "facts": [ "Pancakes are usually fried on a shallow flat surface.", "Pots typically have high walls.", "Griddles and skillets are low, shallow flat pans appropriate for pancakes." ], "decomposition": [ "What kind of surface are pancakes usually made on?", "Does a pot have #1?" ], "evidence": [ [ [ [ "Pancake-1", "Pancake-57" ], "no_evidence" ], [ [ "Cookware and bakeware-52" ], "operation" ] ], [ [ [ "Pancake-1" ] ], [ [ "Pancake-1" ] ] ], [ [ [ "Pancake-1" ] ], [ [ "Cookware and bakeware-4" ], "operation" ] ] ] }, { "qid": "b3ef76f4c5ea1f4ca066", "term": "Sunday", "description": "day of the week", "question": "Is Christmas always celebrated on a Sunday?", "answer": false, "facts": [ "Christmas is always celebrated on December 25.", "A specific date on the calendar rotates to the following day of the week each year.", "Christmas can therefore be any day of the week." ], "decomposition": [ "What date does Christmas fall on each year?", "Does #1 always fall on a Sunday?" ], "evidence": [ [ [ [ "Christmas-1" ] ], [ [ "Sunday-1" ], "operation" ] ], [ [ [ "Christmas-3" ] ], [ [ "Christmas-3" ] ] ], [ [ [ "Christmas-1" ] ], [ [ "Christmas Sunday-2" ] ] ] ] }, { "qid": "535870a1d73b7288d0e1", "term": "Toronto Star", "description": "Newspaper in Toronto, Ontario, Canada", "question": "Would someone in Boston not receive the Toronto Star?", "answer": true, "facts": [ "The Toronto Star is only distributed in Canada.", "Boston is located in the United States." ], "decomposition": [ "Where is the Toronto Star distributed?", "Where is Boston located?", "Is #2 not in #1?" ], "evidence": [ [ [ [ "Toronto Star-1" ] ], [ [ "Boston-1" ] ], [ "operation" ] ], [ [ [ "Toronto Star-1" ] ], [ [ "Boston-1" ] ], [ "operation" ] ], [ [ [ "Toronto Star-1" ] ], [ [ "Boston-1" ] ], [ "operation" ] ] ] }, { "qid": "6ffaa1b46d2ea47c2cea", "term": "Saint", "description": "one who has been recognized for having an exceptional degree of holiness, sanctity, and virtue", "question": "Will Lhamo Thondup be considered by Catholic Church to be a saint?", "answer": false, "facts": [ "Lhamo Thondup is the current Dalai Lama.", "The Dalai Lama is the spiritual leader of Tibetan Buddhism.", "Catholic saints must fill specific criteria, including being devout Christians." ], "decomposition": [ "What religion must one belong to as part of the criteria to be a Catholic saint?", "Which religion does Lhamo Thondup practice?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Saint-1" ] ], [ [ "14th Dalai Lama-1" ] ], [ [ "14th Dalai Lama-1", "Saint-1" ] ] ], [ [ [ "Saint-7" ], "no_evidence" ], [ [ "14th Dalai Lama-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Saint-1" ] ], [ [ "14th Dalai Lama-1" ] ], [ "operation" ] ] ] }, { "qid": "759c7c8babd737e24f1d", "term": "Cinnamon", "description": "spice obtained from the inner bark of several trees from the genus Cinnamomum", "question": "Is a spice grinder ueseless for the cheapest cinnamon sticks?", "answer": true, "facts": [ "Different species of cinnamon require different grinding methods to make the spice", "Indonesian cinnamon quills are thick and capable of damaging a spice or coffee grinder.", "Indonesian cinnamon is the most common and cheapest cinnamon in the USA" ], "decomposition": [ "What type of cinnamon sticks are the cheapest in the US?", "What is the size of #1?", "Because of #2, would it cause damage to a typical spice grinder?" ], "evidence": [ [ [ [ "Cinnamon-2" ] ], [ [ "Cinnamomum cassia-2" ] ], [ "operation" ] ], [ [ [ "Cinnamon-2" ] ], [ [ "Cinnamomum cassia-2" ] ], [ [ "Herb grinder-1" ], "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "9163ac8f2409fe39c3b2", "term": "Sea of Japan", "description": "Marginal sea between Japan, Russia and Korea", "question": "Is the Sea of Japan landlocked within Japan?", "answer": false, "facts": [ "The sea of Japan touches Japan, Russia and the Koreas", "Japan has no landlocked sea" ], "decomposition": [ "Which countries have a shoreline that touches the Sea of Japan?", "Is Japan the only item in #1?" ], "evidence": [ [ [ [ "Sea of Japan-1" ] ], [ "operation" ] ], [ [ [ "Sea of Japan-13" ] ], [ "operation" ] ], [ [ [ "Sea of Japan-1" ] ], [ "operation" ] ] ] }, { "qid": "a2dd0493fc87bb64e1fa", "term": "Hundred Years' War", "description": "Series of conflicts and wars between England and France during the 14th and 15th-century", "question": "Did the first Duke of Valentinois play a key role in the Hundred Years' War?", "answer": false, "facts": [ "The Hundred Years' War was a conflict between England and France from 1337-1453", "Cesare Borgia, the son of Pope Alexander VI, was the first Duke of Valentinois.", "Cesare Borgia was born in 1475." ], "decomposition": [ "When did the Hundred Years' War end?", "Who was the first Duke of Valentinois?", "When was #2 born?", "Is #3 before #1?" ], "evidence": [ [ [ [ "Hundred Years' War (1415–1453)-1" ] ], [ [ "Duke of Valentinois-6" ] ], [ [ "Honoré II, Prince of Monaco-1" ] ], [ "operation" ] ], [ [ [ "Hundred Years' War-1" ] ], [ [ "Cesare Borgia-7" ] ], [ [ "Cesare Borgia-1" ] ], [ "operation" ] ], [ [ [ "Hundred Years' War-1" ] ], [ [ "Cesare Borgia-7" ] ], [ [ "Cesare Borgia-1" ] ], [ "operation" ] ] ] }, { "qid": "55fe694e6b5e5dc9def0", "term": "Dustin Hoffman", "description": "American actor and director", "question": "Can you substitute the pins in a bowling alley lane with Dustin Hoffman's Oscars?", "answer": false, "facts": [ "There are ten pins in a bowling alley lane", "Dustin Hoffman has won two Oscars" ], "decomposition": [ "How many pins are on a bowling alley lane?", "How many Oscars has Dustin Hoffman won?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Bowling-1" ] ], [ [ "Dustin Hoffman-35", "Dustin Hoffman-45" ] ], [ "operation" ] ], [ [ [ "Ten-pin bowling-1" ] ], [ [ "Dustin Hoffman-5" ] ], [ "operation" ] ], [ [ [ "Ten-pin bowling-1" ] ], [ [ "Dustin Hoffman-5" ] ], [ "operation" ] ] ] }, { "qid": "07ba78b177df5d2a30c3", "term": "Whole genome sequencing", "description": "A process that determines the complete DNA sequence of an organism's genome at a single time", "question": "Did Rosalind Franklin contribute to work that led to Whole Genome Sequencing?", "answer": true, "facts": [ "Rosalind Franklin used specialized photography to capture the first photos of the double helix.", "The double helix is the form that DNA takes.", "Without understanding the structure of DNA, genome sequencing would be impossible." ], "decomposition": [ "Rosalind Franklin capture the first photo of what?", "What takes the form of #1?", "Is understanding #2 essential to genome sequencing?" ], "evidence": [ [ [ [ "Rosalind Franklin-19" ] ], [ [ "DNA-1" ] ], [ [ "Whole genome sequencing-1" ], "operation" ] ], [ [ [ "Rosalind Franklin-3" ] ], [ [ "Rosalind Franklin-3" ] ], [ [ "Whole genome sequencing-1" ], "operation" ] ], [ [ [ "Rosalind Franklin-3" ] ], [ [ "Rosalind Franklin-3" ] ], [ [ "Rosalind Franklin-3", "Whole genome sequencing-3" ] ] ] ] }, { "qid": "557e389d7efe47c5bc4d", "term": "Blue", "description": "A primary colour between purple and green", "question": "Is the most expensive color in the world Blue?", "answer": true, "facts": [ "Blue is a primary color.", "Blue is between violet and green on the visible light spectrum.", "Lapis Lazuli is used to make ultramarine. ", "Ultramarine is a pigment of Blue", "Processing Lapis Lazuli into Ultramarine is the most expensive of color processes." ], "decomposition": [ "What was the most expensive pigment used by Renaissance painters?", "Is #1 a shade of the color blue?" ], "evidence": [ [ [ [ "Ultramarine-2" ] ], [ [ "Ultramarine-1" ], "operation" ] ], [ [ [ "Blue-2" ] ], [ [ "Ultramarine-2" ], "operation" ] ], [ [ [ "Ultramarine-2" ] ], [ [ "Ultramarine-2" ] ] ] ] }, { "qid": "3f5af97fdff20028347b", "term": "French Revolution", "description": "Revolution in France, 1789 to 1798", "question": "Was the French Revolution televised?", "answer": false, "facts": [ "The french revolution occurred during the 1700's.", "Television was invented in 1927. " ], "decomposition": [ "When did the French Revolution occur?", "When did televisions become common?", "Is #2 before or within #1?" ], "evidence": [ [ [ [ "French Revolution-1" ] ], [ [ "Television-2" ] ], [ "operation" ] ], [ [ [ "French Revolution-1" ] ], [ [ "Television-2" ] ], [ "operation" ] ], [ [ [ "French Revolution-1" ] ], [ [ "Television-2" ] ], [ "operation" ] ] ] }, { "qid": "18a28739b11305043131", "term": "Eskimo", "description": "Name used to describe Indigenous people from the circumpolar region", "question": "Do the Eskimos sunbathe frequently?", "answer": false, "facts": [ "Sunbathing requires a high amount of sunshine.", "The Eskimos live in regions that receive very little sunshine.", "The Eskimos live in very cold regions, which would make it dangerous to be exposed to outside temperatures without clothes." ], "decomposition": [ "What is necessary for sunbathing?", "Where do Eskimos live?", "Is #1 frequently present in #2?" ], "evidence": [ [ [ [ "Sun tanning-1" ] ], [ [ "Eskimo-1" ] ], [ [ "Climate of the Arctic-1" ], "operation" ] ], [ [ [ "Sun tanning-1" ], "no_evidence" ], [ [ "Eskimo-1" ] ], [ [ "Arctic-5" ], "no_evidence", "operation" ] ], [ [ [ "Sun tanning-1" ] ], [ [ "Igloo-2" ] ], [ "operation" ] ] ] }, { "qid": "20abc7ba538a2fd1dd76", "term": "Small intestine", "description": "part of the digestive tract, following the stomach and followed by the large intestine", "question": "Will the small intenstine break down a cotton ball?", "answer": false, "facts": [ "The small intestine does not digest cellulose ", "Cotton fiber is 90% cellulose" ], "decomposition": [ "What compound is cotton mostly made up of?", "Can the small intestine digest #1?" ], "evidence": [ [ [ [ "Cotton-1" ] ], [ [ "Cellulose-3" ] ] ], [ [ [ "Cotton pad-1" ] ], [ [ "Bezoar-1" ], "no_evidence", "operation" ] ], [ [ [ "Cotton-1" ] ], [ [ "Cellulose-3" ] ] ] ] }, { "qid": "0782cde19737531d14fe", "term": "University of Pittsburgh", "description": "American state-related research university located in Pittsburgh, Pennsylvania", "question": "Is University of Pittsburgh easier to enter than FBI?", "answer": true, "facts": [ "The University of Pittsburgh has around a 60% acceptance rate.", "The FBI estimated accepting 900 agents out of 16000 applicants in 2019." ], "decomposition": [ "What percent of applicants does University of Pittsburgh accept?", "How many applications did the FBI get in 2019?", "Out of #2, how many were accepted?", "What is #3 divided by #2?", "Is #1 greater than #4?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "University of Pittsburgh-2" ], "no_evidence" ], [ [ "Federal Bureau of Investigation-59" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "University of Pittsburgh-35" ], "no_evidence" ], [ [ "Federal Bureau of Investigation-61" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ed421aaf019e01f38a84", "term": "Landscape architect", "description": "person involved in the planning, design and sometimes direction of a landscape, garden, or distinct space", "question": "Would Persephone be a good consultant to a landscape architect?", "answer": true, "facts": [ "Persephone is a vegetation goddess. ", "A vegetation deity is a nature deity whose disappearance and reappearance, or life, death and rebirth, embodies the growth cycle of plants.", "Landscape architects deal with planning and laying out gardens and other plant life." ], "decomposition": [ "Over what domains does Persephone preside?", "Do landscape architects work with any of #1?" ], "evidence": [ [ [ [ "Persephone-1", "Persephone-2" ] ], [ [ "Landscape architect-1" ], "operation" ] ], [ [ [ "Persephone-1" ] ], [ [ "Landscaping-5" ] ] ], [ [ [ "Persephone-1" ] ], [ "operation" ] ] ] }, { "qid": "a2a5a06a55c24bf2d408", "term": "Pregnancy", "description": "time when children develop inside the mother's body before birth", "question": "Do women often need new shoes during their pregnancy?", "answer": true, "facts": [ "Pregnancy can cause swelling in the feet and legs.", "For safety and comfort, one needs to get new shoes if the size of their feet change." ], "decomposition": [ "Which signs and symptoms of pregnancy in women affect the lower extremities?", "Do #1 lead to a change in size of affected areas?" ], "evidence": [ [ [ [ "Pregnancy-9" ] ], [ "operation" ] ], [ [ [ "Pregnancy-9" ] ], [ "operation" ] ], [ [ [ "Inferior vena cava syndrome-3" ] ], [ [ "Edema-1" ], "operation" ] ] ] }, { "qid": "9052063382d30870a2ed", "term": "Depression (mood)", "description": "state of low mood and fatigue", "question": "Would Seroquel be the first treatment recommended by a doctor to someone with depression?", "answer": false, "facts": [ "Seroquel is a powerful drug that is prescribed for bipolar disorder.", "Seroquel has sedating effects and can increase feelings of depression.", "Depression is usually treated by SSRI's. ", "Seroquel is an atypical antipsychotic." ], "decomposition": [ "What is Seroquel typically used for?", "What are the side effects of taking #1?", "Would #2 be helpful for someone with depression" ], "evidence": [ [ [ [ "Quetiapine-32" ] ], [ [ "Quetiapine-17" ] ], [ "no_evidence" ] ], [ [ [ "Quetiapine-1" ] ], [ [ "Quetiapine-2" ] ], [ "operation" ] ], [ [ [ "Quetiapine-1" ] ], [ [ "Quetiapine-11" ] ], [ [ "Quetiapine-12" ], "operation" ] ] ] }, { "qid": "3763c0523e44d02ff1e3", "term": "QR code", "description": "trademark for a type of matrix barcode", "question": "Do you have to put on glasses to read a QR code?", "answer": false, "facts": [ "Glasses are used to improve one's vision capabilities.", "QR codes are not readable by humans and have to be read by machines or programs." ], "decomposition": [ "Can a human read QR codes?" ], "evidence": [ [ [ "no_evidence", "operation" ] ], [ [ [ "QR code-5" ] ] ], [ [ [ "QR code-1" ] ] ] ] }, { "qid": "9cf2cfed8753e1a1a3dc", "term": "Anchovy", "description": "Family of fishes", "question": "Can an anchovy born in 2020 survive 25th US census?", "answer": false, "facts": [ "The US Census takes place every ten years.", "The 24th US Census took place in 2020.", "The 25th US Census will take place in 2030.", "The average lifespan of an anchovy is five years." ], "decomposition": [ "What is the ordinal number of the 2020 U.S. Census?", "How many years after #1 wll the 25th census occur?", "What is the maximum life span of an anchovy?", "Is #3 greater than #2?" ], "evidence": [ [ [ [ "2020 United States Census-1" ] ], [ [ "United States Census-1" ] ], [ [ "Japanese anchovy-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "2020 United States Census-1" ] ], [ [ "United States Census Bureau-4" ] ], [ [ "European anchovy-7", "Japanese anchovy-1" ] ], [ "operation" ] ], [ [ [ "2020 United States Census-1" ] ], [ [ "United States Census-1" ] ], [ [ "European anchovy-7" ] ], [ "operation" ] ] ] }, { "qid": "4ce0ad7ba3ee5886a794", "term": "Double-slit experiment", "description": "Physics experiment, showing light can be modelled by both waves and particles", "question": "Can a minor replicate the double-slit experiment?", "answer": true, "facts": [ "A minor is a human child.", "The double-slit experiment can theoretically be replicated by any human." ], "decomposition": [ "What species of living things does 'minor' refer to?", "Can #1 replicate the double-slit experiment?" ], "evidence": [ [ [ [ "Minor (law)-1" ] ], [ [ "Double-slit experiment-2" ] ] ], [ [ [ "Minor (law)-1" ] ], [ [ "Double-slit experiment-1" ], "operation" ] ], [ [ [ "Age of majority-1" ] ], [ [ "Double-slit experiment-2" ], "operation" ] ] ] }, { "qid": "810d95d42fdcfbc153df", "term": "Alan Greenspan", "description": "13th Chairman of the Federal Reserve in the United States", "question": "Do Squidward Tentacles and Alan Greenspan have different musical passions?", "answer": false, "facts": [ "Squidward Tentacles plays the clarinet.", "Alan Greenspan played clarinet and saxophone along with Stan Getz.", "Alan Greenspan studied clarinet at the Juilliard School from 1943 to 1944." ], "decomposition": [ "What musical instruments does Squidward Tentacles play?", "What musical instruments does Alan Greenspan play?", "Is at least one instrument in #1 also found in #2?" ], "evidence": [ [ [ [ "Squidward Tentacles-5" ] ], [ [ "Alan Greenspan-5" ] ], [ "operation" ] ], [ [ [ "Squidward Tentacles-5" ] ], [ [ "Alan Greenspan-5" ] ], [ "operation" ] ], [ [ [ "Squidward Tentacles-5" ] ], [ [ "Alan Greenspan-5" ] ], [ "operation" ] ] ] }, { "qid": "064d8faaa8f164b55270", "term": "Apollo 15", "description": "Fourth crewed mission to land on the Moon", "question": "Would the crew of Apollo 15 have difficulty riding a unicycle?", "answer": true, "facts": [ "There were 3 astronauts in the crew of the Apollo 15 mission.", "A unicycle only contains one saddle, and is typically only operated by a single person." ], "decomposition": [ "What is the maximum number of people that can ride a typical unicycle?", "How many people were on the Apollo 15 crew?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Unicycle-1" ] ], [ [ "Apollo 15-1" ] ], [ "operation" ] ], [ [ [ "Unicycle-1" ], "no_evidence" ], [ [ "Apollo 15-6" ] ], [ "operation" ] ], [ [ [ "Unicycle-25" ] ], [ [ "Apollo 15-6" ] ], [ "operation" ] ] ] }, { "qid": "620957c2018f91328a2e", "term": "Infinitive", "description": "grammatical form", "question": "Is Shakespeare famous because of the infinitive form?", "answer": true, "facts": [ "Shakespeare wrote the play Hamlet", "Hamlet contains one of Shakespeare's most famous passages, Hamlet's soliloquy", "Hamlet's soliloquy begins with the line 'To be or not to be', which uses the infinitive form" ], "decomposition": [ "The use of the infinitive form in \"To be or not to be\" appears in which popular soliloquy?", "Which book contained #1", "Did Williams Shakespeare write #2?" ], "evidence": [ [ [ [ "To be, or not to be-1" ] ], [ [ "Hamlet-1" ] ], [ [ "Hamlet-2" ], "operation" ] ], [ [ [ "To be, or not to be-1" ] ], [ [ "To be, or not to be-1" ] ], [ [ "To be, or not to be-1" ] ] ], [ [ [ "To be, or not to be-1" ] ], [ [ "Hamlet-1" ] ], [ "operation" ] ] ] }, { "qid": "d1142162a82aab83611e", "term": "Television", "description": "Telecommunication medium for transmitting and receiving moving images", "question": "Did Gandhi watch the television show Bonanza?", "answer": false, "facts": [ "Bonanza was a television show that aired from September 12, 1959 until January 16, 1973.", "Gandhi was assassinated on January 30, 1948." ], "decomposition": [ "How long ago did Bonanza first air?", "How long ago did Gandhi die?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Bonanza-1" ] ], [ [ "Mahatma Gandhi-1" ] ], [ "operation" ] ], [ [ [ "Bonanza-1" ] ], [ [ "Family of Mahatma Gandhi-1" ] ], [ "operation" ] ], [ [ [ "Bonanza-1" ] ], [ [ "Mahatma Gandhi-90" ] ], [ "operation" ] ] ] }, { "qid": "5adc7e126ca3383da225", "term": "Farmer", "description": "person that works in agriculture", "question": "Do you need a farmer to make a circuit board?", "answer": false, "facts": [ "Farmers cultivate and produce crops and/or livestock for sale or consumption", "Circuit boards contain various man made materials as well as metals", "Metals are produced from the earth by miners" ], "decomposition": [ "What do farmers produce?", "What are the things needed to make a circuit board?", "Is any of #1 part of #2?" ], "evidence": [ [ [ [ "Farmer-1" ] ], [ [ "Printed circuit board-1" ] ], [ "operation" ] ], [ [ [ "Farmer-9" ] ], [ [ "Stamped circuit board-4" ] ], [ "operation" ] ], [ [ [ "Farmer-1" ] ], [ [ "Printed circuit board-1" ] ], [ "operation" ] ] ] }, { "qid": "70b2122323424f6e80c2", "term": "Yuri Gagarin", "description": "Soviet pilot and cosmonaut, first human in space", "question": "Would LeBron James hypothetically glance upwards at Yuri Gagarin?", "answer": false, "facts": [ "LeBron James is 6 feet 9 inches tall.", "Yuri Gagarin was 5 feet 2 inches tall.", "Typically shorter individuals look up at taller individuals when they are speaking as it is polite to look face to face at someone when you are speaking to them." ], "decomposition": [ "How tall is LeBron James?", "How tall was Yuri Gagarin?", "Is #1 lesser than #2?" ], "evidence": [ [ [ [ "LeBron James-42" ], "no_evidence" ], [ [ "Yuri Gagarin-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "LeBron James-42" ] ], [ [ "Yuri Gagarin-9" ] ], [ "operation" ] ], [ [ [ "LeBron James-42" ] ], [ [ "Yuri Gagarin-9" ] ], [ "operation" ] ] ] }, { "qid": "af6afbc6da7065d522ee", "term": "Ludacris", "description": "American rapper and actor", "question": "Is Ludacris in same music genre as 2000's Binaural?", "answer": false, "facts": [ "Ludacris is a rapper, particularly in the southern rap style.", "Binaural was a 2000 album released by Pearl Jam.", "Pearl Jam is a grunge rock band formed in Seattle." ], "decomposition": [ "What genre does Ludacris produce music in?", "Who recorded the 2000 album Binaural?", "What genre does #2 produce music in?", "Is #1 the same as #3?" ], "evidence": [ [ [ [ "Hip hop music-1", "Ludacris-1" ] ], [ [ "Binaural (album)-1" ] ], [ [ "Binaural (album)-1" ] ], [ "operation" ] ], [ [ [ "Ludacris-1" ] ], [ [ "Binaural (album)-1" ] ], [ [ "Pearl Jam-1" ] ], [ "operation" ] ], [ [ [ "Ludacris-1" ] ], [ [ "Binaural (album)-1" ] ], [ [ "Pearl Jam-1" ] ], [ "operation" ] ] ] }, { "qid": "4957e0329698c05832c0", "term": "Seismology", "description": "The scientific study of earthquakes and propagation of elastic waves through a planet", "question": "Did Brad Peyton need to know about seismology?", "answer": true, "facts": [ "Brad Peyton directed the movie San Andreas", "San Andreas is an earthquake disaster film", "Seismology is the science of earthquakes and related phenomena" ], "decomposition": [ "What does the study of seismology involve?", "What was the movie San Andreas primarily about?", "Did Brad Peyton direct San Andreas and is #2 included in #1?" ], "evidence": [ [ [ [ "Seismology-1" ] ], [ [ "San Andreas (film)-1" ] ], [ [ "San Andreas (film)-1" ], "operation" ] ], [ [ [ "Seismology-1" ] ], [ [ "San Andreas (film)-1" ] ], [ [ "Brad Peyton-1" ], "operation" ] ], [ [ [ "Seismology-1" ] ], [ [ "San Andreas (film)-1" ] ], [ "operation" ] ] ] }, { "qid": "082af323f414128588b8", "term": "Frigate", "description": "Type of warship", "question": "Are ropes required to operate a frigate?", "answer": true, "facts": [ "Frigates are a kind of sailing ship.", "Many features of ships require rope to use." ], "decomposition": [ "What force powers frigates?", "What characteristic of frigates allows them to use #1?", "Are ropes used to manipulate #2?" ], "evidence": [ [ [ [ "Sailing-1" ] ], [ [ "Frigate-6" ] ], [ [ "Sailing ship-35" ], "operation" ] ], [ [ [ "Frigate-11" ] ], [ [ "Sail-1" ] ], [ [ "Sail-3" ], "no_evidence" ] ], [ [ [ "Full-rigged ship-5" ], "no_evidence" ], [ [ "Rigging-1" ] ], [ "operation" ] ] ] }, { "qid": "71691ae5050e621c9b4c", "term": "Breast cancer", "description": "cancer that originates in the mammary gland", "question": "Is someone more likely to survive having breast cancer in Japan than in Sweden?", "answer": false, "facts": [ "84.70% of people in Japan with breast cancer survive", "86.20% of people in Sweden with breast cancer survive" ], "decomposition": [ "What percentage of people survive breast cancer in Japan?", "What percentage of people survive breast cancer in Sweden?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Breast cancer-4" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "b418ba4e11aee26faabc", "term": "Jason", "description": "Greek mythological hero", "question": "Does Jason have anything in common with Dr. Disrespect?", "answer": true, "facts": [ "Jason cheated on Medea with Creusa", "Dr. Disrespect cheated on his wife with another woman" ], "decomposition": [ "Was Jason faithful or unfaithful?", "Was Dr. Disrespect faithful or unfaithful?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Medea-10" ], "no_evidence" ], [ [ "Dr DisRespect-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Jason-3" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Jason-18" ] ], [ [ "Dr DisRespect-5" ] ], [ "operation" ] ] ] }, { "qid": "b1e061a88d467fdb632e", "term": "2009", "description": "Year", "question": "Could $1 for each 2009 eclipse buy a copy of TIME magazine in 2020?", "answer": true, "facts": [ "The 2020 Newsstand price of TIME magazine is $5.99.", "There were six eclipses in 2009 including 2 solar and 4 lunar eclipses." ], "decomposition": [ "What was the price of a single issue of TIME magazine in 2020?", "How many solar eclipses were there in 2009?", "How many lunar eclipses were there in 2009?", "What is #2 plus #3?", "Is #4 greater than or equal to #1?" ], "evidence": [ [ [ [ "Time (magazine)-5" ], "no_evidence" ], [ [ "July 2009 lunar eclipse-6" ], "no_evidence" ], [ [ "July 2009 lunar eclipse-4" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Time (magazine)-5" ], "no_evidence" ], [ [ "Solar eclipse of January 26, 1990-1", "Solar eclipse of July 22, 2009-1" ] ], [ [ "August 2009 lunar eclipse-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Time (magazine)-22", "Time (magazine)-5" ], "no_evidence" ], [ [ "Solar eclipse-3" ], "no_evidence" ], [ [ "Lunar eclipse-23" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "90474e8efe599628cbc4", "term": "Olympia, Washington", "description": "State capital and city in Washington, United States", "question": "Does Olympia Washington share name with Hephaestus's workshop location?", "answer": true, "facts": [ "Olympia Washington, is named after Mount Olympus.", "Mount Olympus is a mountain range in Washington named after the ancient Greek Mount Olympus.", "Hephaestus was the ancient Greek god of the forge and had a workshop on Mount Olympus." ], "decomposition": [ "Where did Hephaestus have his workshop?", "Olympia, Washington derived it's name from what mountain?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Hephaestus-5" ] ], [ [ "Olympia, Washington-3" ] ], [ "operation" ] ], [ [ [ "Hephaestus-2" ] ], [ [ "Mount Olympus-1" ] ], [ "operation" ] ], [ [ [ "Hephaestus-5" ], "operation" ], [ [ "Hephaestus-5" ], "operation" ], [ "operation" ] ] ] }, { "qid": "dd5a2ed28bfb890a14e5", "term": "Spaghetti", "description": "Type of pasta", "question": "Should a Celiac sufferer avoid spaghetti?", "answer": true, "facts": [ "Celiac is a disease in which the body cannot process gluten.", "Gluten is a protein found in wheat.", "Spaghetti is made with milled wheat and water." ], "decomposition": [ "What food ingredients should people with celiac disease avoid?", "What ingredients make up spaghetti?", "Is #2 listed in #1?" ], "evidence": [ [ [ [ "Coeliac disease-2" ] ], [ [ "Spaghetti-1" ] ], [ "operation" ] ], [ [ [ "Coeliac disease-2" ] ], [ [ "Spaghetti-1" ] ], [ "operation" ] ], [ [ [ "Coeliac disease-36" ] ], [ [ "Spaghetti-1" ] ], [ "operation" ] ] ] }, { "qid": "2eed74f7dc66f5aadcd7", "term": "Saudi Aramco", "description": "Saudi Arabian petroleum and natural gas company", "question": "Was Saudi Aramco started due to an assassination?", "answer": true, "facts": [ "Saudi Aramco was formed in response to oil shortages during World War I", "The origins of World War I can be traced to the assassination of Archduke Franz Ferdinand in Sarajevo" ], "decomposition": [ "Saudi Aramco was formed in response to oil shortages during what major conflict?", "What event is widely acknowledged to have started #1?", "Is #2 an assassination?" ], "evidence": [ [ [ [ "Saudi Aramco-7" ] ], [ [ "Paris in the Belle Époque-96" ] ], [ [ "Assassination-1" ] ] ], [ [ [ "Saudi Aramco-7" ] ], [ [ "Assassination of Archduke Franz Ferdinand-1" ] ], [ "operation" ] ], [ [ [ "Saudi Aramco-7" ] ], [ [ "Assassination of Archduke Franz Ferdinand-1" ] ], [ "operation" ] ] ] }, { "qid": "e863b1e1d891da16b06a", "term": "Order of the British Empire", "description": "British order of chivalry", "question": "Is Hermione Granger eligible for the Order of the British Empire?", "answer": false, "facts": [ "The Order of the British Empire is awarded to people that have made significant contributions to the United Kingdom", "Hermione Granger is a fictional character from the Harry Potter series of books" ], "decomposition": [ "What criteria makes one eligible for the Order of the British Empire?", "Does Hermione Granger meet #1?" ], "evidence": [ [ [ [ "Order of the British Empire-2" ] ], [ [ "Hermione Granger-1" ], "no_evidence" ] ], [ [ [ "Order of the British Empire-1" ], "no_evidence" ], [ [ "Hermione Granger-3" ], "no_evidence", "operation" ] ], [ [ [ "Order of the British Empire-2" ] ], [ [ "Hermione Granger-1" ], "operation" ] ] ] }, { "qid": "a0e7719bca9b347d32db", "term": "Carl Linnaeus", "description": "Swedish botanist, physician, and zoologist", "question": "Did Linnaeus edit Darwin's draft of Origin of Species?", "answer": false, "facts": [ "Linnaeus died in 1778", "Origin of Species was published in 1859" ], "decomposition": [ "When did Carl Linnaeus pass away?", "When was Origin of Species first published?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Carl Linnaeus-1" ] ], [ [ "On the Origin of Species-1" ] ], [ "operation" ] ], [ [ [ "Carl Linnaeus-1" ] ], [ [ "On the Origin of Species-1" ] ], [ "operation" ] ], [ [ [ "Carl Linnaeus-1" ] ], [ [ "On the Origin of Species-1" ] ], [ "operation" ] ] ] }, { "qid": "957adcdf1c676bc082da", "term": "Riksdag", "description": "Legislative body of Sweden", "question": "Is the Riksdag a political entity in Scandinavia?", "answer": true, "facts": [ "The Riksdag is the legislative branch of the Swedish government.", "Sweden is part of Scandinavia." ], "decomposition": [ "What country does the Riksdag belong to?", "Which countries are part of Scandinavia?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Riksdag-1" ] ], [ [ "Scandinavia-1" ] ], [ "operation" ] ], [ [ [ "Riksdag-1" ] ], [ [ "Scandinavia-1" ] ], [ "operation" ] ], [ [ [ "Riksdag-1" ] ], [ [ "Scandinavia-1" ] ], [ "operation" ] ] ] }, { "qid": "1534ef2130cb48d6abb4", "term": "Heart", "description": "organ for the circulation of blood in animal circulatory systems", "question": "Is a jellyfish safe from atherosclerosis?", "answer": true, "facts": [ "Atherosclerosis is a condition in which the arteries to the heart are blocked.", "Jellyfish use their guts to circulate nutrients because they do not have hearts." ], "decomposition": [ "What structures are affected by atherosclerosis?", "What bodily system does #1 contribute to?", "What structures are found in the jellyfish #2?", "Are there structures in common in both #1 and #3?" ], "evidence": [ [ [ [ "Atherosclerosis-1" ] ], [ [ "Circulatory system-1" ] ], [ [ "Jellyfish-18" ] ], [ "operation" ] ], [ [ [ "Atherosclerosis-1" ] ], [ [ "Artery-2" ] ], [ [ "Jellyfish-18" ] ], [ "operation" ] ], [ [ [ "Atherosclerosis-1" ] ], [ [ "Artery-2" ] ], [ [ "Jellyfish-18" ] ], [ "operation" ] ] ] }, { "qid": "0df67cbe1345f90555dd", "term": "Quran", "description": "The central religious text of Islam", "question": "Would Dave Chappelle pray over a Quran?", "answer": true, "facts": [ "Dave Chappelle converted to Islam in 1998.", "Dave Chappelle has not deviated from his religious beliefs since 1998 and is a practicing Muslim.", "Practicing Muslims pray often." ], "decomposition": [ "Which group uses the Quran as their religious text?", "Does Dave Chappelle belong to #1?" ], "evidence": [ [ [ [ "Quran-1" ] ], [ [ "Dave Chappelle-57" ], "operation" ] ], [ [ [ "Quran-1" ] ], [ [ "Dave Chappelle-57" ] ] ], [ [ [ "Quran-1" ] ], [ [ "Dave Chappelle-57" ], "operation" ] ] ] }, { "qid": "16bb34f451c9620e422c", "term": "Justin Timberlake", "description": "American singer, record producer, and actor", "question": "Can Justin Timberlake ride Shipwreck Falls at Six Flags?", "answer": true, "facts": [ "Shipwreck Falls is a boat ride at Six Flags", "The minimum height for Shipwreck Falls is 42\"", "Justin Timberlake is 73\" tall" ], "decomposition": [ "What is Shipwreck Falls?", "What is the minimum height required to ride #1?", "How tall is Justin Timberlake?", "Is #3 bigger than #2?" ], "evidence": [ [ [ [ "Shipwreck Falls-1" ] ], [ "no_evidence" ], [ [ "Justin Timberlake-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Shipwreck Falls-1" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Shipwreck Falls-1" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "381d38f377cfce5087b9", "term": "Dementia", "description": "long-term brain disorders causing impaired memory, reasoning, and normal function together with personality changes", "question": "Can dementia be cured with a cast?", "answer": false, "facts": [ "Dementia refers to various disorders of the brain.", "Casts are used to help treat broken bones.", "The brain does not contain any bones." ], "decomposition": [ "What part of the body does Dementia affect?", "What do cast help fix?", "Are there any #2 in #1?" ], "evidence": [ [ [ [ "Dementia-21" ] ], [ [ "Bone fracture-22" ] ], [ [ "Bone fracture-22", "Dementia-21" ], "operation" ] ], [ [ [ "Dementia-1" ] ], [ [ "Orthopedic cast-1" ] ], [ "operation" ] ], [ [ [ "Dementia-1" ] ], [ [ "Orthopedic cast-1" ] ], [ [ "Brain-1" ] ] ] ] }, { "qid": "68f04375b2ac70c73759", "term": "Düsseldorf", "description": "Place in North Rhine-Westphalia, Germany", "question": "Does Düsseldorf have only a small number of smoggy days each year?", "answer": true, "facts": [ "Mercer's 2012 Quality of Living survey ranked Düsseldorf the sixth most livable city in the world.", "Clean air is an important attribute for a livable city.", "Smog is a term for air pollution." ], "decomposition": [ "What is another term for smog?", "What is Düsseldorf ranked as in Mercer's 2012 Quality of Living survey?", "To be #2, does a country need to have limited #1 days a year?" ], "evidence": [ [ [ [ "Smog-1" ] ], [ [ "Global Liveability Ranking-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Smog-1" ] ], [ [ "Düsseldorf-2" ] ], [ "operation" ] ], [ [ [ "Smog-1" ] ], [ [ "Düsseldorf-2" ] ], [ [ "Most livable cities-7" ] ] ] ] }, { "qid": "6491c2e5de52c190eff8", "term": "Saint", "description": "one who has been recognized for having an exceptional degree of holiness, sanctity, and virtue", "question": "Can a false pope become a saint?", "answer": true, "facts": [ "A false pope, or antipope, is someone that tries to claim they are the true pope but the church rejects them.", "Hippolytus (c. 170–235 AD) headed a schismatic group as a rival to the Bishop of Rome, thus becoming an antipope.", "Hippolytus (c. 170–235 AD) was named a saint in the Roman Catholic Church." ], "decomposition": [ "Which actions could make the Catholic church consider one a false pope or antipope?", "What role did Hippolytus (c. 170–235 AD) play in the schismatic group against the Bishop of Rome?", "Is #2 a form of #1 and he still became saint?" ], "evidence": [ [ [ [ "Antipope-1" ] ], [ [ "Hippolytus of Rome-1" ] ], [ [ "Hippolytus of Rome-24" ], "operation" ] ], [ [ [ "Antipope-1" ], "no_evidence" ], [ [ "Hippolytus of Rome-1" ] ], [ [ "Hippolytus of Rome-2" ], "operation" ] ], [ [ [ "Antipope-1" ] ], [ [ "Antipope-4" ] ], [ [ "Antipope-16", "Saint-7" ], "no_evidence" ] ] ] }, { "qid": "3c6d21dff562440d86a4", "term": "Snowshoe", "description": "Footwear for walking easily across snow", "question": "Can a snake wear a snowshoe?", "answer": false, "facts": [ "Snowshoes are worn by attaching them to the wearer's feet.", "Snakes do not have feet." ], "decomposition": [ "Which part of the body are snowshoes worn on?", "Do snakes have #1?" ], "evidence": [ [ [ [ "Snowshoe-1" ] ], [ [ "Snake-1" ] ] ], [ [ [ "Snowshoe-1" ] ], [ [ "Snake-1" ], "operation" ] ], [ [ [ "Footwear-1", "Snowshoe-1" ] ], [ [ "Snake-1" ] ] ] ] }, { "qid": "36f063a2fc1338b5a21d", "term": "Starch", "description": "glucose polymer used as energy store in plants", "question": "Can a wheelbarrow full of starch kill hyperglycemics?", "answer": true, "facts": [ "Hyperglycemia is a condition in which people have higher than normal blood glucose levels.", "Starch is a compound made by plants that is made of numerous glucose units.", "An excess of glucose can lead to diabetic complications and can result ind death.", "The average wheelbarrow can hold up to 1200 pounds." ], "decomposition": [ "What is hyperglycemia?", "What is starch made of?", "How much can the average wheelbarrow hold?", "Could #3 of #2 potentially be fatal to someone who has #1?" ], "evidence": [ [ [ [ "Hyperglycemia-1" ] ], [ [ "Starch-1" ] ], [ [ "Wheelbarrow-2" ] ], [ [ "Hyperglycemia-21" ] ] ], [ [ [ "Hyperglycemia-1" ] ], [ [ "Starch-1" ] ], [ [ "Wheelbarrow-2" ] ], [ [ "Hyperglycemia-2" ], "operation" ] ], [ [ [ "Hyperglycemia-1" ] ], [ [ "Starch-1" ] ], [ [ "Wheelbarrow-2" ] ], [ "operation" ] ] ] }, { "qid": "363e5889466d85bba2ca", "term": "Giraffe", "description": "Tall African ungulate", "question": "Do giraffes require special facilities at zoos?", "answer": true, "facts": [ "Giraffes are much taller than other land animals.", "Giraffe shelters at zoos must be built larger than shelters for other animals to accommodate their height." ], "decomposition": [ "What is the most distinctive feature of a giraffe?", "Does #1 make it necessary for them to have different facilities from other animals at a zoo?" ], "evidence": [ [ [ [ "Giraffe-16" ] ], [ [ "Giraffe-16" ] ] ], [ [ [ "Giraffe-2" ] ], [ [ "West African giraffe-4" ], "no_evidence", "operation" ] ], [ [ [ "Giraffe-2" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "6c43fa359095fd0845f5", "term": "Camel", "description": "Genus of mammals", "question": "Is Bactrian Camel most impressive animal when it comes to number of humps?", "answer": false, "facts": [ "The Bactrian Camel is a camel with two humps native to Central Asia.", "Three humped camels were discovered on the Arabian peninsula in 2019." ], "decomposition": [ "How many humps does the Bactrian Camel have?", "What is the most number of humps seen on a camel?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Bactrian camel-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Camel-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Bactrian camel-1" ] ], [ [ "Dromedary-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "3503e8d41364eb0b33bc", "term": "Bartender", "description": "person who serves usually alcoholic beverages behind the bar in a licensed establishment", "question": "Would a responsible bartender make a drink for Millie Bobby Brown?", "answer": false, "facts": [ "Millie Bobby Brown is currently 16 years old.", "In the United States, the minimum legal age to purchase any alcohol beverage is 21 years old.", "Bartenders are usually responsible for confirming that customers meet the legal drinking age requirements before serving them alcoholic beverages. " ], "decomposition": [ "How old is Millie Bobby Brown?", "What is the minimum legal age one must be to be served alcohol in the US?", "Is #1 larger than #2?" ], "evidence": [ [ [ [ "Millie Bobby Brown-1" ] ], [ [ "National Minimum Drinking Age Act-1" ] ], [ "operation" ] ], [ [ [ "Millie Bobby Brown-1" ] ], [ [ "Legal drinking age-6" ] ], [ "operation" ] ], [ [ [ "Millie Bobby Brown-1" ] ], [ [ "Legal drinking age-6" ] ], [ "operation" ] ] ] }, { "qid": "91b75d57e720179305e9", "term": "Salsa music", "description": "Latin American dance music genre", "question": "Would Ibn Saud tolerate salsa music?", "answer": false, "facts": [ "Ibn Saud was the first ruler of Saudi Arabia and adhered to Wahhabism.", "Wahhabism is an ultra conservative sect of Islam that prohibits dancing.", "Salsa is a popular Latin American music genre that is heavily connected to dance." ], "decomposition": [ "Which religion(s) did Ibn Saud practice?", "Did #1 permit its adherents to listen to or play music during Ibn Saud's lifetime?" ], "evidence": [ [ [ [ "Ibn Saud-3" ] ], [ [ "Islamic music-28" ], "operation" ] ], [ [ [ "Ibn Saud-3" ] ], [ [ "Wahhabism-1", "Wahhabism-52" ], "operation" ] ], [ [ [ "Ibn Saud-3" ] ], [ [ "Najd-23" ], "no_evidence" ] ] ] }, { "qid": "032419040e53e3e9d194", "term": "House of Lords", "description": "upper house in the Parliament of the United Kingdom", "question": "Was Aristotle a member of the House of Lords?", "answer": false, "facts": [ "Aristotle died in 322 BC.", "The House of Lords is grown out of the Model Parliament, which was the first English Parliament.", "The Model Parliament was held in 1295." ], "decomposition": [ "When did Aristotle die?", "Where did the House of Lords originate from?", "When did #2 occur?", "Did #3 happen before #1?" ], "evidence": [ [ [ [ "Aristotle-1" ] ], [ [ "House of Commons of the United Kingdom-17" ] ], [ [ "House of Commons of the United Kingdom-17" ] ], [ "operation" ] ], [ [ [ "Aristotle-10" ], "no_evidence" ], [ [ "House of Lords-8" ] ], [ [ "House of Lords-8" ] ], [ "operation" ] ], [ [ [ "Aristotle-69" ], "no_evidence" ], [ [ "House of Lords-18" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "8aa5c9ba2ae74151dd6e", "term": "Krishna", "description": "Major deity in Hinduism", "question": "Was Krishna skilled at using the bow?", "answer": true, "facts": [ "Lord Krishna was known as the eighth manifestation of the god Vishnu.", "Vishnu had a trove of weapons including the Sudarshana Chakra and Sharanga.", "Sharanga was a celestial bow and a favored weapon of Vishnu." ], "decomposition": [ "Which Hindu god was Krishna known to be a manifestation of?", "Which weapons belonging to #1 were among his favorite?", "Is the bow included in #2?" ], "evidence": [ [ [ [ "Krishna-1" ] ], [ [ "Sharanga-1" ] ], [ [ "Sharanga-1" ] ] ], [ [ [ "Krishna-1" ] ], [ [ "Sharanga-1" ] ], [ "operation" ] ], [ [ [ "Krishna-1" ] ], [ [ "Krishna-24", "Krishna-36", "Sharanga-1" ] ], [ "operation" ] ] ] }, { "qid": "f5c0f0f85f624d9f8016", "term": "Dessert", "description": "A course that concludes a meal; usually sweet", "question": "Is dessert eaten before breakfast?", "answer": false, "facts": [ "Desserts are sweets.", "Meals generally begin with savory foods, and sweets eaten after." ], "decomposition": [ "What is a dessert?", "Are #1 usually sweet or salty?", "Do meals generally begin with foods that are #2?" ], "evidence": [ [ [ [ "Dessert-1" ] ], [ [ "Dessert-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Dessert-1" ] ], [ [ "Dessert-1" ] ], [ "operation" ] ], [ [ [ "Dessert-1" ] ], [ [ "Dessert-2" ] ], [ [ "Breakfast-85" ], "no_evidence" ] ] ] }, { "qid": "17b8fa74f450c4d0e56b", "term": "Alice in Wonderland (1951 film)", "description": "1951 American animated musical fantasy film produced by Walt Disney Productions", "question": "Does Disney's Alice in Wonderland involve the celebration of a holiday?", "answer": true, "facts": [ "In the movie, Alice meets the Mad Hatter.", "The Mad Hatter is having a tea party to celebrate his Unbirthday.", "The Unbirthday is a holiday which happens every day of the year which is not the subject's actual birthday." ], "decomposition": [ "What celebrations were featured in the Disney movie Alice in Wonderland?", "Is any of #1 an holiday?" ], "evidence": [ [ [ [ "Alice in Wonderland (1951 film)-7" ] ], [ "operation" ] ], [ [ [ "Alice in Wonderland (franchise)-14" ] ], [ [ "Birthday-1" ], "operation" ] ], [ [ [ "Alice in Wonderland (1951 film)-9" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "598be3dcddbd7775a827", "term": "Bob Marley", "description": "Jamaican singer-songwriter", "question": "Could Bob Marley's children hypothetically win tug of war against Kublai Khan's children?", "answer": false, "facts": [ "Bob Marley had 9 children.", "Kublai Khan had 23 children.", "Many of Bob Marley's children became singers, and followed his themes of peace and love.", "The children of Kublai Khan followed in his footsteps and were fierce warlords." ], "decomposition": [ "How many children did Bob Marley have?", "How many children did Kublai Khan have?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Bob Marley-42" ] ], [ [ "Kublai Khan-71", "Toghon (son of Kublai)-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bob Marley-42" ] ], [ [ "Kublai Khan-71", "Kublai Khan-76" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Bob Marley-42" ] ], [ [ "Kublai Khan-76" ] ], [ "operation" ] ] ] }, { "qid": "d9f255a491a0e4048210", "term": "Separation of church and state", "description": "principle to separate religious and civil institutions", "question": "Does USA fail separation of church and state in multiple ways?", "answer": true, "facts": [ "Separation of church ad state refers to keeping God and religion out of state matters.", "Presidents of the United States are sworn in by placing their hand on a bible.", "The US currency contains the words, \"In God We Trust.\"", "The Pledge of Allegiance states, \"One Nation Under God.\"" ], "decomposition": [ "How are US Presidents sworn in?", "What is the inscription on the US currency?", "What does the Pledge of Allegiance state?", "Do #1, #2 and #3 contain references to religion/the chuch?" ], "evidence": [ [ [ [ "President of the United States-46" ] ], [ [ "In God We Trust-2" ] ], [ [ "Pledge of Allegiance-1" ] ], [ "operation" ] ], [ [ [ "United States presidential inauguration-23" ] ], [ [ "In God We Trust-15" ] ], [ [ "Pledge of Allegiance-1" ] ], [ "operation" ] ], [ [ [ "Oath of office of the President of the United States-12" ] ], [ [ "In God We Trust-3" ] ], [ [ "Pledge of Allegiance-43" ] ], [ "operation" ] ] ] }, { "qid": "dc9a0bab0809f6a1b59b", "term": "Star Wars", "description": "Epic science fantasy space opera franchise", "question": "Do Star Wars fans say \"beam me up\" often?", "answer": false, "facts": [ "Beam me up is an expression from Star Trek.", "Much to the annoyance of fans, Star Trek and Star Wars are often confused for one another. " ], "decomposition": [ "Where does the expression beam me up come from?", "Is the answer to #1 the same as Star Wars?" ], "evidence": [ [ [ [ "Beam me up, Scotty-1" ] ], [ "operation" ] ], [ [ [ "Beam me up, Scotty-1" ] ], [ "operation" ] ], [ [ [ "Beam me up, Scotty-1" ] ], [ "operation" ] ] ] }, { "qid": "97c91d5613b99fd4098f", "term": "Gorilla", "description": "Genus of mammals", "question": "Are gorillas closely related to humans?", "answer": true, "facts": [ "Gorillas are part of the animal family Hominidae.", "Hominidae also includes the genus Homo, which only contains the human species." ], "decomposition": [ "What animal family are Gorillas part of?", "Are humans also part of #1?" ], "evidence": [ [ [ [ "Hominidae-1" ] ], [ "operation" ] ], [ [ [ "Gorilla-1" ] ], [ [ "Primate-2" ] ] ], [ [ [ "Hominidae-1" ] ], [ [ "Hominidae-1" ] ] ] ] }, { "qid": "10325585370ba368ca9a", "term": "Goat", "description": "domesticated mammal raised primarily for its milk", "question": "Are goats found on abyssal plains?", "answer": false, "facts": [ "An abyssal plain is typically located between 10,000 and 20,000 feet below the surface of the ocean", "A goat is a mammal that lives on land and cannot intake oxygen from underwater environments" ], "decomposition": [ "What things do goats need to live?", "Where are abyssal plains located?", "Is everything in #1 also found in #2?" ], "evidence": [ [ [ [ "Goat-1" ], "no_evidence" ], [ [ "Abyssal plain-1" ] ], [ "operation" ] ], [ [ [ "Goat-5" ] ], [ [ "Abyssal plain-1" ] ], [ "operation" ] ], [ [ [ "Goat-23" ] ], [ [ "Abyssal plain-1" ] ], [ "operation" ] ] ] }, { "qid": "fc9339f21f44a3841a7c", "term": "Koala", "description": "An arboreal herbivorous marsupial native to Australia.", "question": "Do Koalas prefer Eucalyptus over meat?", "answer": true, "facts": [ "Koalas are herbivores.", "Koalas main dietary staple is eucalyptus " ], "decomposition": [ "What kind of diet do Koalas follow?", "Are Eucalyptus part of #1?" ], "evidence": [ [ [ [ "Koala-2" ] ], [ "operation" ] ], [ [ [ "Koala-2" ] ], [ [ "Eucalypt-5", "Koala-2" ] ] ], [ [ [ "Koala-2" ] ], [ "operation" ] ] ] }, { "qid": "a61431fb7fe82f6b2ee0", "term": "Julian calendar", "description": "solar calendar in use from imperial Rome until after the Reformation", "question": "Did Saint Augustine use the Julian calendar?", "answer": true, "facts": [ "The Julian calendar was in use from 45 BC to the late 16th century AD", "Saint Augustine lived from 354 AD to 430 AD" ], "decomposition": [ "During what years was the Julian calendar used?", "When did Saint Augustine live?", "Is #2 during the time period listed in #1?" ], "evidence": [ [ [ [ "Julian calendar-1", "Julian calendar-2" ] ], [ [ "Augustine of Hippo-1" ] ], [ "operation" ] ], [ [ [ "Julian calendar-1", "Julian calendar-2" ] ], [ [ "Augustine of Hippo-1" ] ], [ "operation" ] ], [ [ [ "Julian calendar-2" ] ], [ [ "Augustine of Hippo-1" ] ], [ "operation" ] ] ] }, { "qid": "27551dd918fdafe87524", "term": "New York Harbor", "description": "harbor in the New York City, U.S.A. metropolitan area", "question": "Did Donald Trump come up with the idea for the New York Harbor?", "answer": false, "facts": [ "The New York Harbor is at the mouth of the Hudson River", "A harbor is a sheltered body of water where boats and ships can be docked.", "The New York Harbor has been used since colonial era of the 1500s.", "Donald Trump is a failed business man and 2016 president elect.", "Donald Trump makes outrageous deceitful claims " ], "decomposition": [ "When was the New York Harbor built?", "When was Donald Trump born?", "Did #2 come before #1?" ], "evidence": [ [ [ [ "New York Harbor-2" ] ], [ [ "Donald Trump-1" ] ], [ "operation" ] ], [ [ [ "New York Harbor-2" ] ], [ [ "Donald Trump-1" ] ], [ "operation" ] ], [ [ [ "New York Harbor-4" ] ], [ [ "Donald Trump-1" ] ], [ "operation" ] ] ] }, { "qid": "31cd0c0fe3dfe163e662", "term": "Wheat", "description": "Cereal grain", "question": "Can a woman on average have a baby before wheat seed blooms?", "answer": false, "facts": [ "The average time it takes for a woman to give birth is 9 months.", "Wheat takes between 7 to 8 months to harvest." ], "decomposition": [ "How long does pregnancy typically last in humans?", "How long does it typically take to grow and harvest wheat?", "Is #1 shorter than #2?" ], "evidence": [ [ [ [ "Pregnancy-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Pregnancy-1" ] ], [ [ "Intensive crop farming-11" ] ], [ "operation" ] ], [ [ [ "Pregnancy-1" ] ], [ [ "Intensive crop farming-11" ] ], [ "operation" ] ] ] }, { "qid": "b666e61a04dff4fc1c44", "term": "Jackfruit", "description": "species of plant", "question": "Would it be safe to have a jackfruit thrown at your head?", "answer": false, "facts": [ "Jackfruit can weigh between 22-55 lbs. ", "Jackfruit are covered in small spikes." ], "decomposition": [ "How much do jackfruit weigh?", "Is #1 light enough to not hurt you?" ], "evidence": [ [ [ [ "Jackfruit-2" ] ], [ [ "Jackfruit-2" ], "no_evidence" ] ], [ [ [ "Jackfruit-2" ] ], [ "operation" ] ], [ [ [ "Jackfruit-2" ] ], [ "operation" ] ] ] }, { "qid": "3ea7edd8c6bf70af1f1b", "term": "Vice President of the United States", "description": "Second highest executive office in United States", "question": "Was the first Vice President of the United States an Ottoman descendant?", "answer": false, "facts": [ "The first Vice President of the United States was John Adams.", "The Ottomans were a Turkic group that conquered Constantinople in 1453.", "John Adams was descended from English Puritans." ], "decomposition": [ "Who was the first Vice President of the United States?", "Which group of people was #1 a descendant of?", "Is #2 the same as Ottoman?" ], "evidence": [ [ [ [ "John Adams-1" ] ], [ [ "John Adams-5" ], "no_evidence" ], [ [ "Ottoman dynasty-1" ], "operation" ] ], [ [ [ "Vice President of the United States-52" ] ], [ [ "John Adams-6" ], "no_evidence" ], [ [ "Christianity in the modern era-12" ], "operation" ] ], [ [ [ "John Adams-1" ] ], [ [ "John Adams-5" ] ], [ "operation" ] ] ] }, { "qid": "867d95acb802575522dc", "term": "Aldi", "description": "Germany-based supermarket chain", "question": "Are all United States Aldi locations owned by the same company?", "answer": false, "facts": [ "Aldi is actually two German-based supermarket chains, Aldi Nord and Aldi Sud.", "Both companies operate internationally, but the United States is the only country other than Germany where both Aldi chains operate." ], "decomposition": [ "Which country is Aldi based in?", "How many chains of Aldi operate in #1?", "Are each of #2 owned by different organizations?", "How many chains of Aldi operate in the US?", "Is #3 negative or #4 different than #2?" ], "evidence": [ [ [ [ "Aldi-1" ] ], [ [ "Aldi-2" ] ], [ [ "Aldi-13" ] ], [ [ "Aldi-2" ] ], [ "operation" ] ], [ [ [ "Aldi-1" ] ], [ [ "Aldi-1" ] ], [ "no_evidence", "operation" ], [ [ "Aldi-2" ] ], [ "operation" ] ], [ [ [ "Aldi-1" ] ], [ [ "Aldi-1" ] ], [ [ "Aldi-1" ] ], [ [ "Aldi-17", "Aldi-2" ] ], [ "operation" ] ] ] }, { "qid": "5f24cbd41798ed5c7608", "term": "Saint Peter", "description": "apostle and first pope", "question": "Could Saint Peter watch television?", "answer": false, "facts": [ "Saint Peter died in 64 BC.", "The television was invented in 1900." ], "decomposition": [ "When was television invented?", "When did Saint Peter die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "History of television-15" ] ], [ [ "Saint Peter-55" ] ], [ "operation" ] ], [ [ [ "Television-12" ], "no_evidence" ], [ [ "Saint Peter-57" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Television-2" ] ], [ [ "Saint Peter-1" ] ], [ "operation" ] ] ] }, { "qid": "0dd4ab1e6386856acf76", "term": "Universal Music Group", "description": "American music corporation", "question": "Will NY Stock Exchange closing bell be heard in Universal Music Group's headquarters?", "answer": false, "facts": [ "The New York Stock Exchange is located in New York, USA.", "Universal Music Group's headquarters is located in Santa Monica, California.", "Santa Monica is about 2800 miles from New York.", "A shout can be heard up to 100 meters away." ], "decomposition": [ "Where is the New York Stock Exchange located?", "Where is Universal Music Group's headquarters located?", "What is the distance between #1 and #2?", "Is #3 a reasonable distance within which a bell's chime can be heard?" ], "evidence": [ [ [ [ "New York Stock Exchange-1" ] ], [ [ "Universal Music Group-1" ] ], [ "no_evidence", "operation" ], [ "operation" ] ], [ [ [ "New York Stock Exchange-1" ] ], [ [ "Universal Music Group-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "New York Stock Exchange-1" ] ], [ [ "Universal Music Group-1" ] ], [ "no_evidence", "operation" ], [ [ "Bell-32" ], "operation" ] ] ] }, { "qid": "1515242f123df1362ad7", "term": "Elk", "description": "Large antlered species of deer from North America and east Asia", "question": "Would a body builder prefer an elk burger over a beef burger?", "answer": true, "facts": [ "Bodybuilders want to build muscle and keep fat low", "Elk meat is leaner than beef", "Elk meat has higher protein than beef", "Protein helps build muscle" ], "decomposition": [ "Which nutrients are more important for a body builder's diet?", "How is an elk burger different from a beef burger in terms of nutrients?", "Considering #1 and #2 would an elk burger be a better source of #1?" ], "evidence": [ [ [ [ "Bodybuilding-31", "Bodybuilding-41" ] ], [ [ "Elk-3" ] ], [ [ "Bodybuilding-31", "Elk-3" ] ] ], [ [ [ "Bodybuilding-41" ] ], [ [ "Elk-3" ] ], [ "operation" ] ], [ [ [ "Bodybuilding-39" ] ], [ [ "Elk-3" ] ], [ [ "Elk-3" ], "operation" ] ] ] }, { "qid": "a88bcb511bb3ad8f9af8", "term": "Super Mario", "description": "platform video game series from Nintendo's Mario franchise", "question": "Does Super Mario protagonist hypothetically not need continuing education classes in Illinois?", "answer": false, "facts": [ "Mario, the protagonist of Super Mario, is a plumber by profession.", "Continuing education classes are required for certain professions in certain jurisdictions.", "Plumbers are required in Illinois to take continuing education classes." ], "decomposition": [ "Who is the protagonist of Super Mario?", "What is #1's profession? ", "In Illinois, can #2's avoid taking continuing education classes?" ], "evidence": [ [ [ [ "Super Mario-2" ] ], [ [ "Kill the Plumber-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Super Mario Bros.-4" ] ], [ "no_evidence" ], [ [ "Plumber-9" ], "operation" ] ], [ [ [ "Mario-1" ] ], [ [ "Mario-1" ] ], [ [ "Continuing education-9" ], "no_evidence", "operation" ] ] ] }, { "qid": "9bb59488a227a5f5cd62", "term": "Samsung Galaxy", "description": "series of Android mobile computing devices", "question": "Can you save every HD episode of Game of Thrones on Samsung Galaxy A10e?", "answer": false, "facts": [ "The Samsung Galaxy A10e has 32GB of storage.", "The average storage requirement of an HD episode of Game of Thrones is 600MB", "There are 60 total episodes of Game of Thrones.", "There are 1000MB in one GB." ], "decomposition": [ "How much storage does a Samsung Galaxy A10e have?", "What is #1 multiplied by 1000?", "What is the average storage requirement for an HD episode of Game of Thrones?", "How many episodes are the of Game of Thrones?", "Is #2 greater than or equal to #3 multiplied by #4?" ], "evidence": [ [ [ [ "Samsung Galaxy A10-1" ] ], [ "operation" ], [ "no_evidence" ], [ [ "The Iron Throne (Game of Thrones)-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Samsung Galaxy-1" ], "no_evidence" ], [ "no_evidence", "operation" ], [ [ "High-definition video-18" ], "no_evidence" ], [ [ "Game of Thrones-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Samsung Galaxy A10-1" ] ], [ "operation" ], [ [ "Game of Thrones (season 1)-26" ], "no_evidence" ], [ [ "Game of Thrones-1" ] ], [ "operation" ] ] ] }, { "qid": "127e33dd84829d6283ef", "term": "Tenth Amendment to the United States Constitution", "description": "says powers not Constitutionally granted to the Federal Government belong to States or the People", "question": "Was the tenth Amendment to the Constitution written using Pitman shorthand?", "answer": false, "facts": [ "Pitman shorthand was invented in 1837.", "The tenth Amendment to the Constitution was added in 1791." ], "decomposition": [ "When was Pitman shorthand invented?", "When was the tenth Amendment to the Constitution added?", "Did #1 happen before #2?" ], "evidence": [ [ [ [ "Pitman shorthand-1" ] ], [ [ "Tenth Amendment to the United States Constitution-1" ] ], [ "operation" ] ], [ [ [ "Pitman shorthand-5" ] ], [ [ "Tenth Amendment to the United States Constitution-1" ] ], [ "operation" ] ], [ [ [ "Pitman shorthand-1" ] ], [ [ "Tenth Amendment to the United States Constitution-1" ] ], [ "operation" ] ] ] }, { "qid": "d513fadf58cbb9cd1d9d", "term": "Skype", "description": "telecommunications software service", "question": "Are the founders of Skype from Asia?", "answer": false, "facts": [ "Skype was created by the Niklas Zennström and the Dane Janus Friis.", "Niklas Zennström and Dane Janus Friis are from Sweden.", "Sweden is located in Europe, not Asia. " ], "decomposition": [ "Who are the founders of Skype?", "What country are #1 from?", "What continent is #2 on?", "Is #3 Asia?" ], "evidence": [ [ [ [ "Skype-4" ] ], [ [ "Skype-4" ] ], [ [ "Outline of Denmark-2", "Sweden-1" ] ], [ "operation" ] ], [ [ [ "Skype-9" ] ], [ [ "Skype-9" ] ], [ [ "Denmark-1" ] ], [ "operation" ] ], [ [ [ "Skype-9" ] ], [ [ "Skype-9" ] ], [ [ "Denmark-1", "Sweden-1" ] ], [ "operation" ] ] ] }, { "qid": "4454659b2638172ab151", "term": "Mercury (element)", "description": "Chemical element with atomic number 80", "question": "Can you transport a coin along a sea of mercury?", "answer": true, "facts": [ "The density of an object determines if it will float.", "An object will float if it is less dense than the liquid it is placed in.", "Mercury is liquid at room temperature.", "The density of mercury is 13.56 g/cm3.", "The density of a penny is 7.15 g/cm3." ], "decomposition": [ "What is the density of mercury?", "What is the density of a typical coin?", "Is #2 less than #1?", "Considering #3 and the principle of flotation, will the coin float along mercury sea surface?" ], "evidence": [ [ [ [ "Mercury (element)-7" ] ], [ [ "Quarter (United States coin)-3" ] ], [ "operation" ], [ [ "Archimedes' principle-3" ], "operation" ] ], [ [ [ "Mercury (element)-7" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ], [ [ "Buoyancy-1" ], "operation" ] ], [ [ [ "Mercury (element)-7" ] ], [ [ "Coin-2", "Metal-9" ] ], [ "operation" ], [ [ "Buoyancy-2" ], "operation" ] ] ] }, { "qid": "3e9e3ccc9fc4d44a3eb4", "term": "Astronaut", "description": "Person who commands, pilots, or serves as a crew member of a spacecraft", "question": "Can actress Danica McKellar skip astronaut education requirements?", "answer": true, "facts": [ "Astronaut's are required to have a bachelor's degree in engineering, biological science, physical science, computer science, or mathematics.", "Actress Danica McKellar graduated summa cum laude from UCLA with a degree in Mathematics." ], "decomposition": [ "Astronauts can have any one of which degrees?", "What degree does Danica McKellar have?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "NASA Astronaut Corps-10" ] ], [ [ "Danica McKellar-5" ] ], [ "operation" ] ], [ [ [ "NASA Astronaut Corps-10" ] ], [ [ "Danica McKellar-5" ] ], [ "operation" ] ], [ [ [ "NASA Astronaut Corps-10" ] ], [ [ "Danica McKellar-5" ] ], [ "operation" ] ] ] }, { "qid": "b2dddce981183d5a083d", "term": "Robert Downey Jr.", "description": "American actor", "question": "Did Robert Downey Jr. possess same caliber gun as Resident Evil's Barry Burton?", "answer": true, "facts": [ "Robert Downey Jr. was arrested in 1996 n drug and weapons charges and possessed a .357 Magnum.", "Barry Burton, a character in the Resident Evil series, used a Colt Python.", "The Colt Python is a type of .357 Magnum revolver." ], "decomposition": [ "What type of gun did Robert Downey Jr. have when he was arrested?", "What gun does Barry Burton use?", "What type of gun is #2?", "Is #1 the same as #3?" ], "evidence": [ [ [ [ "Robert Downey Jr.-13" ] ], [ [ "Resident Evil 3: Nemesis-10" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Robert Downey Jr.-13" ] ], [ [ "Resident Evil 3: Nemesis-10" ], "no_evidence" ], [ [ "Colt Python-1" ] ], [ "operation" ] ], [ [ [ "Robert Downey Jr.-13" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3f7daff5574045d7b737", "term": "Olive", "description": "Species of plant", "question": "Would Bugs Bunny harm an olive tree in the real world?", "answer": true, "facts": [ "Bugs Bunny is an anthropomorphic gray and white rabbit.", "Rabbits eat the bark of olive trees and can do considerable damage, especially to young trees." ], "decomposition": [ "What kind of animal is Bugs Bunny?", "Do #1 eat and damage the bark of olive trees?" ], "evidence": [ [ [ [ "Bugs Bunny-2" ] ], [ [ "Olive-73" ] ] ], [ [ [ "Bugs Bunny-2" ] ], [ [ "Olive-73" ], "operation" ] ], [ [ [ "Bugs Bunny-2" ] ], [ [ "Olive-73" ] ] ] ] }, { "qid": "f61620c8fca3a1530998", "term": "Daily Mirror", "description": "British daily tabloid newspaper owned by Reach plc.", "question": "Can a copy of The Daily Mirror sustain a campfire?", "answer": true, "facts": [ "The Daily Mirror is a British tabloid made of paper.", "Kindling helps sustain a fire because easily combustible help a fire keep igniting.", "Paper is capable of igniting and burning easily." ], "decomposition": [ "What kind of product is The Daily Mirror?", "Is #1 made of combustible material?" ], "evidence": [ [ [ [ "Daily Mirror-1" ] ], [ "no_evidence" ] ], [ [ [ "Daily Mirror-1" ] ], [ [ "Newsprint-1" ], "no_evidence", "operation" ] ], [ [ [ "Daily Mirror-1" ] ], [ [ "Combustibility and flammability-2" ] ] ] ] }, { "qid": "848941fb02c8818a4e1f", "term": "Porsche", "description": "automotive brand manufacturing subsidiary of Volkswagen", "question": "Could a Porsche 992 Turbo S defeat Usain Bolt in a 100 meter sprint?", "answer": true, "facts": [ "The Porsche 992 Turbo S can accelerate to 62 mph in 2.7 seconds.", "Usain Bolt's top speed ever measured is 27.79 mph." ], "decomposition": [ "What is the max speed of a Porsche 992 Turbo S?", "What is Bolt's top speed?", "Is #1 faster than #2?" ], "evidence": [ [ [ [ "Porsche 992-8" ] ], [ [ "Usain Bolt-106" ] ], [ "operation" ] ], [ [ [ "Porsche 992-8" ] ], [ [ "Footspeed-4" ] ], [ "operation" ] ], [ [ [ "Porsche 992-8" ] ], [ [ "Usain Bolt-106" ] ], [ "operation" ] ] ] }, { "qid": "46f53b9e0008ad1110ed", "term": "Cuban Revolution", "description": "Revolution in Cuba between 1953 and 1959", "question": "During the Cuban revolution, did the US experience a population boom?", "answer": true, "facts": [ "After WWII, the US experienced a baby boom.", "WWII ended in 1945." ], "decomposition": [ "When was the Cuban Revolution?", "When did the United States experience a rapid growth in its population?", "Does some or all of #2 overlap with #1?" ], "evidence": [ [ [ [ "Cuban Revolution-8" ] ], [ [ "Baby boom-3" ] ], [ [ "Baby boom-3", "Cuban Revolution-8" ], "operation" ] ], [ [ [ "Cuban Revolution-1" ] ], [ [ "Mid-twentieth century baby boom-1", "Mid-twentieth century baby boom-3" ] ], [ "operation" ] ], [ [ [ "Cuban Revolution-1" ] ], [ [ "Mid-twentieth century baby boom-12" ] ], [ "operation" ] ] ] }, { "qid": "e4115d4b10cb3447c7a2", "term": "Meatball", "description": "dish made from ground meat rolled into a small ball-like form", "question": "Can you buy furniture and meatballs in the same store?", "answer": true, "facts": [ "IKEA is well known for selling cheap, modern furniture.", "IKEA is famous for serving meatballs at their in-store restaurants." ], "decomposition": [ "What is IKEA known for selling?", "What are some delicacies IKEA is known to serve at their in-store restaurants?", "Is meatballs included in #2 and #1 furniture?" ], "evidence": [ [ [ [ "IKEA-1" ] ], [ [ "IKEA-15" ] ], [ [ "IKEA-15" ] ] ], [ [ [ "IKEA-1" ] ], [ [ "IKEA-12" ] ], [ "operation" ] ], [ [ [ "IKEA-1" ] ], [ [ "IKEA-12" ] ], [ "operation" ] ] ] }, { "qid": "39764cc9679c2e0e6435", "term": "2000", "description": "Year", "question": "Was there fear leading up to the year 2000?", "answer": true, "facts": [ "Many computer programs were not designed with the year 2000 in mind.", "People were worried that computers would crash all over the world when the year 2000 arrived.", "Financial and electrical systems require computers to function.", "Without financial and electrical systems there could be global chaos." ], "decomposition": [ "What concerns did people have about computing systems as 2000 approached?", "Did #1 involve a widespread fear of malfunction?" ], "evidence": [ [ [ [ "Year 2000 problem-1" ] ], [ [ "Year 2000 problem-23" ], "operation" ] ], [ [ [ "Year 2000 problem-1" ] ], [ [ "Year 2000 problem-15" ], "operation" ] ], [ [ [ "Year 2000 problem-1" ] ], [ [ "Year 2000 problem-23" ] ] ] ] }, { "qid": "e0f36bf27467cd086ecd", "term": "Leonardo da Vinci", "description": "15th and 16th-century Italian Renaissance polymath", "question": "Did Leonardo da Vinci lack contemporary peers in his home city?", "answer": false, "facts": [ "Leonardo da Vinci was born in Anchiano, a town in the city of Florence.", "Da Vinci lived during the 15th and 16th century.", "Sandro Boticelli was a Florentine artist 15th and 16th century.", "Donatello was a Florentine artist during the 15th century." ], "decomposition": [ "Which period did Leonardo da Vinci live through and where was his home city?", "When did Sandro Boticelli live through and where was his home city?", "Where was Donatello's home city and what period did he live through?", "Are #1, #2 and #3 different from one another?" ], "evidence": [ [ [ [ "Leonardo da Vinci-1", "Leonardo da Vinci-2" ] ], [ [ "Sandro Botticelli-1", "Sandro Botticelli-2" ] ], [ [ "Donatello-1" ] ], [ "operation" ] ], [ [ [ "Leonardo da Vinci-1" ] ], [ [ "Sandro Botticelli-1" ] ], [ [ "Donatello-1" ] ], [ "operation" ] ], [ [ [ "Leonardo da Vinci-3", "Leonardo da Vinci-7" ] ], [ [ "Sandro Botticelli-1", "Sandro Botticelli-5" ] ], [ [ "Donatello-1" ] ], [ "operation" ] ] ] }, { "qid": "a0ab5b0fc9bb188bcc99", "term": "Nickel", "description": "Chemical element with atomic number 28", "question": "Is nickel dominant material in US 2020 nickels?", "answer": false, "facts": [ "Nickels have been made of various materials including silver in the 1940s.", "Nickels in 2020 are made from a mix of copper and nickel.", "2020 nickels are 25% nickel and 75% copper." ], "decomposition": [ "What is the composition of the US 2020 nickel?", "Of the elements listed in #1, do any of them make up more than 50% of the US 2020 Nickel?", "If #2 is yes, is that element nickel?" ], "evidence": [ [ [ [ "Jefferson nickel-14" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Nickel (United States coin)-1" ], "no_evidence" ], [ [ "Nickel (United States coin)-1" ] ], [ "operation" ] ], [ [ [ "Nickel-5" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "e6d3973ed3feb8a42928", "term": "Bee", "description": "Clade of insects", "question": "Can Africanized bees be considered multicultural?", "answer": true, "facts": [ "Multicultural refers to a blend of several cultures within one organism.", "Africanized bees, also killer bees are a result of crossbreeding.", "Africanized bees are a mix of East African lowland honey bees and European honey bee subspecies such as the Italian honey bee and the Iberian honey bee. " ], "decomposition": [ "What is the definition of multicultural?", "What are Africanized bees a result of?", "What types of bees were part of #2?", "Is #3 an example of #1?" ], "evidence": [ [ [ [ "Multiculturalism-1" ] ], [ [ "Africanized bee-1" ] ], [ [ "Africanized bee-1" ] ], [ [ "Africanized bee-1", "Multiculturalism-1" ] ] ], [ [ [ "Multiracial people-1" ] ], [ [ "Honey bee-21" ] ], [ [ "Africanized bee-1" ] ], [ "operation" ] ], [ [ [ "Multiculturalism-1" ] ], [ [ "Africanized bee-1" ] ], [ [ "African bee-1", "Western honey bee-1" ] ], [ "operation" ] ] ] }, { "qid": "d22a8c50ff6a652dd49e", "term": "Heracles", "description": "divine hero in Greek mythology, son of Zeus and Alcmene", "question": "Were all of Heracles's children present for his funeral pyre?", "answer": false, "facts": [ "Heracles killed his children by his first wife Megara.", "They were not returned to life prior to his death." ], "decomposition": [ "What did Heracles do to his children by his first wife?", "Are people who have been #1 able to come back to life?" ], "evidence": [ [ [ [ "Heracles-21" ], "no_evidence" ], [ [ "Death-11" ], "operation" ] ], [ [ [ "Megara (mythology)-4" ] ], [ [ "Death (disambiguation)-1" ], "operation" ] ], [ [ [ "Megara (mythology)-4" ] ], [ "operation" ] ] ] }, { "qid": "373634846d34dafefcff", "term": "Phobos (moon)", "description": "natural satellite of Mars", "question": "Is Phobos part of the Andromeda galaxy?", "answer": false, "facts": [ "Phobos orbits around Mars.", "Mars is a planet in Earth's solar system.", "The solar system is in the Milky Way galaxy." ], "decomposition": [ "What planet does Phobos orbit around?", "What solar system is #1 part of?", "What galaxy is #2 part of?", "Is #3 the same as the Andromeda galaxy?" ], "evidence": [ [ [ [ "Phobos (moon)-1" ] ], [ [ "Mars-1" ] ], [ [ "Milky Way-1" ] ], [ [ "Andromeda Galaxy-1" ] ] ], [ [ [ "Phobos (moon)-1" ] ], [ [ "Solar System-2" ] ], [ [ "Milky Way-1" ] ], [ "operation" ] ], [ [ [ "Phobos (moon)-16" ] ], [ [ "Solar System-37" ] ], [ [ "Solar System-73" ] ], [ "operation" ] ] ] }, { "qid": "e98776ed6640c773a31d", "term": "Lobster", "description": "family of crustaceans", "question": "Can lobster breathe in the desert?", "answer": false, "facts": [ "Lobsters use gills to breathe.", "Gills require water to breathe.", "There is no water in the desert. " ], "decomposition": [ "Which part of their body do lobsters breathe with?", "Where does #1 obtain oxygen from?", "Is #2 easily found in the desert?" ], "evidence": [ [ [ [ "Gill-1", "Lobster-14" ] ], [ [ "Aquatic respiration-2" ] ], [ [ "Desert-3" ], "operation" ] ], [ [ [ "Lobster-14" ], "no_evidence" ], [ [ "Lobster-2" ], "no_evidence" ], [ [ "Desert-1" ], "operation" ] ], [ [ [ "Gill-1", "Lobster-14" ] ], [ [ "Gill-1" ] ], [ [ "Desert-1", "Precipitation-1" ] ] ] ] }, { "qid": "ad97256b5f0ab80b948a", "term": "Parody", "description": "Imitative work created to mock, comment on or trivialise an original work", "question": "Are parodies of the President of the United States illegal?", "answer": false, "facts": [ "Parody in the US is protected under fair use in regards to copyright.", "Criticism of political leaders is protected under the 1st Amendment." ], "decomposition": [ "Is parody illegal in the US?", "Is criticism of the government against the US constitution?", "Is #1 or #2 positive?" ], "evidence": [ [ [ [ "Fair use-1", "Parody-30" ], "no_evidence" ], [ [ "Freedom of speech in the United States-1", "Human rights in the United States-2" ] ], [ "operation" ] ], [ [ [ "Parody-30" ] ], [ [ "Freedom of speech in the United States-1", "Freedom of speech in the United States-34" ] ], [ "operation" ] ], [ [ [ "Parody-30" ] ], [ [ "First Amendment to the United States Constitution-1" ] ], [ "operation" ] ] ] }, { "qid": "42c947475244ed02629e", "term": "Oyster", "description": "salt-water bivalve mollusc", "question": "Can oysters be used in guitar manufacturing?", "answer": true, "facts": [ "Oysters produce nacre", "Nacre is also known as mother of pearl", "Mother of pearl is commonly used as an inlay on guitar fretboards, headstocks, and soundboards" ], "decomposition": [ "What non-food products are derived from oysters?", "Which of #1 are used for decoration?", "What materials are used to decorate a guitar?", "Is there overlap between #2 and #3?" ], "evidence": [ [ [ [ "Nacre-1", "Nacre-17" ], "no_evidence" ], [ [ "Oyster-2" ], "no_evidence" ], [ [ "Guitar-45" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Oyster-9" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Oyster-6" ] ], [ [ "Oyster-2" ] ], [ [ "Inlay (guitar)-1" ] ], [ [ "Inlay (guitar)-1", "Oyster-2" ], "operation" ] ] ] }, { "qid": "29a634ea52cba72c13e4", "term": "Brussels sprout", "description": "vegetable", "question": "Are Brussels sprout particularly good for adrenal fatigue?", "answer": true, "facts": [ "Adenal fatigue is a disorder in which the body does not produce enough hormones and people get tired.", "Brussels sprout are foods rich in vitamin C.", "When stress levels rise, the adrenal glands require more Vitamin C and it is used very quickly." ], "decomposition": [ "What vitamins are found in abundance in Brussels sprouts?", "What vitamins do the adrenal glands require when a body is under stress?", "Is #2 found in #1?" ], "evidence": [ [ [ [ "Brussels sprout-12" ] ], [ [ "Adrenaline-29" ] ], [ "operation" ] ], [ [ [ "Brussels sprout-12" ] ], [ [ "Adrenal gland-2" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Brussels sprout-12" ] ], [ [ "Adrenal fatigue-1", "Adrenal gland-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "b331cd0701aa703e188b", "term": "Veto", "description": "legal power to unilaterally stop an official action, especially the enactment of legislation", "question": "Can the US branch of government that has power over the military also have the power to veto?", "answer": true, "facts": [ "The US President is the commander in chief of the US military.", "The executive branch of the USA includes the President.", "The President has power to veto." ], "decomposition": [ "What US branch has power over the military?", "Who has the power to veto?", "Is #2 part of #1?" ], "evidence": [ [ [ [ "Article Two of the United States Constitution-1", "Article Two of the United States Constitution-4", "Executive (government)-5" ] ], [ [ "Federal government of the United States-18", "Veto-1" ] ], [ "operation" ] ], [ [ [ "Federal government of the United States-17" ] ], [ [ "Federal government of the United States-17", "Federal government of the United States-18" ] ], [ [ "Federal government of the United States-17" ] ] ], [ [ [ "Federal jurisdiction (United States)-4" ] ], [ [ "Veto-34" ] ], [ "operation" ] ] ] }, { "qid": "d9e23006aef632b6d65f", "term": "Kane (wrestler)", "description": "American professional wrestler, actor, businessman, and politician", "question": "Have any murderers outlasted Kane's Royal Rumble record?", "answer": true, "facts": [ "The longest Kane lasted in the Royal Rumble was 53:46 in 2001.", "Chris Benoit lasted over an hour in the 2004 Royal Rumble.", "Chris Benoit murdered his own wife and son in 2007." ], "decomposition": [ "What is Kane's Royal Rumble record?", "Which wrestlers have a Royal Rumble record longer than #1?", "Are any of the wrestlers listed in #2 a murderer?" ], "evidence": [ [ [ [ "Royal Rumble (2001)-13" ], "no_evidence" ], [ [ "Royal Rumble (2004)-19" ] ], [ [ "Chris Benoit-3" ], "operation" ] ], [ [ [ "Royal Rumble (2014)-36" ] ], [ [ "Royal Rumble match-30" ] ], [ [ "Chris Benoit-3" ] ] ], [ [ [ "Kane (wrestler)-3" ] ], [ [ "Chris Benoit-2" ], "no_evidence" ], [ [ "Chris Benoit-3" ], "no_evidence", "operation" ] ] ] }, { "qid": "463567e1582ccab9a048", "term": "Euro", "description": "European currency", "question": "Will a 2 Euro coin float across the Red Sea?", "answer": false, "facts": [ "A 2 Euro coin is made of a mix of copper and brass.", "Objects float if their density is less than water.", "Ancient bronze metal ingots were found on the sea floor off the coast of Italy in 2015." ], "decomposition": [ "What are the material constituents of a 2 Euro coin?", "#1 belong to which family of materials?", "Can non hollow forms of #2 float on water?" ], "evidence": [ [ [ [ "2 euro coin-1" ] ], [ [ "Metal-1" ] ], [ "operation" ] ], [ [ [ "2 euro coin-1" ] ], [ [ "Copper-2" ] ], [ [ "Metal-9" ], "no_evidence" ] ], [ [ [ "2 euro coin-8" ] ], [ [ "Metal-35" ] ], [ "no_evidence" ] ] ] }, { "qid": "ff0c2df8c385ec5189dc", "term": "Sloth", "description": "tree dwelling animal noted for slowness", "question": "Could a sloth hypothetically watch an entire episode of Scrubs underwater?", "answer": true, "facts": [ "Sloths can hold their breath underwater for up to 40 minutes.", "The running time of a Scrubs episode is between 20-23 minutes." ], "decomposition": [ "How long can sloths hold their breath underwater?", "How long is an episode of Scrubs?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Sloth-20" ] ], [ [ "Scrubs (TV series)-69" ] ], [ "operation" ] ], [ [ [ "Sloth-20" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Sloth-20" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "036a0cce807566449c9f", "term": "Marco Polo", "description": "Italian explorer and merchant noted for travel to central and eastern Asia", "question": "Did Marco Polo travel with Christopher Columbus?", "answer": false, "facts": [ "Marco Polo died in 1324.", "Christopher Columbus was born in 1451." ], "decomposition": [ "When did Marco Polo die?", "When was Columbus born?", "Was #1 after #2?" ], "evidence": [ [ [ [ "Marco Polo-1" ] ], [ [ "Christopher Columbus-1" ] ], [ "operation" ] ], [ [ [ "Marco Polo-24" ] ], [ [ "Christopher Columbus-5" ] ], [ "operation" ] ], [ [ [ "Marco Polo-24" ] ], [ [ "Christopher Columbus-5" ] ], [ "operation" ] ] ] }, { "qid": "c6c7d980dd92cb870ca0", "term": "Presidency of Bill Clinton", "description": "1993–2001 U.S. presidential administration", "question": "Did the Presidency of Bill Clinton conclude with his impeachment?", "answer": false, "facts": [ "Bill Clinton was impeached in 1998.", "Bill Clinton remained in office until 2001." ], "decomposition": [ "In what year was Bill Clinton impeached?", "In what year did Bill Clinton's presidency end?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Bill Clinton-61" ] ], [ [ "Bill Clinton-1", "Impeachment of Bill Clinton-29" ] ], [ "operation" ] ], [ [ [ "Impeachment of Bill Clinton-16" ] ], [ [ "Bill Clinton-1" ] ], [ "operation" ] ], [ [ [ "Bill Clinton-61" ] ], [ [ "Bill Clinton-61" ] ], [ "operation" ] ] ] }, { "qid": "52e1b2df624813a9d66b", "term": "Canary Islands", "description": "Archipelago in the Atlantic and autonomous community of Spain", "question": "Could someone in the Canary Islands fish for largemouth bass?", "answer": false, "facts": [ "The Canary Islands are located in the Atlantic Ocean", "The Atlantic Ocean is a body of salt water", "Largemouth bass live in fresh water" ], "decomposition": [ "What kind of water do largemouth bass live in?", "In what body of water are the Canary Islands located?", "What kind of water is found in #2?", "Is #1 the same as #3?" ], "evidence": [ [ [ [ "Largemouth bass-1" ] ], [ [ "Canary Islands-1" ] ], [ [ "Atlantic Ocean-31" ] ], [ "operation" ] ], [ [ [ "Largemouth bass-1" ] ], [ [ "Canary Islands-1" ] ], [ [ "Saline water-1" ] ], [ "operation" ] ], [ [ [ "Largemouth bass-1" ] ], [ [ "Canary Islands-1" ] ], [ [ "Archipelago-1" ] ], [ "operation" ] ] ] }, { "qid": "858feef04ea972946401", "term": "Family Guy", "description": "American animated sitcom", "question": "Does Family Guy take place on the American West Coast?", "answer": false, "facts": [ "Family Guy takes place in the fictional town of Quahog, Rhode Island.", "Rhode Island is a state on the American East Coast." ], "decomposition": [ "Where is Family Guy set?", "Is #1 on the American West Coast?" ], "evidence": [ [ [ [ "Family Guy-1" ] ], [ [ "Rhode Island-1" ] ] ], [ [ [ "Rhode Island-89" ] ], [ [ "West Coast of the United States-1" ] ] ], [ [ [ "Family Guy-9" ] ], [ [ "Rhode Island-34" ], "operation" ] ] ] }, { "qid": "fa454eaf401bb6222bf3", "term": "Referee", "description": "person of authority, in a variety of sports, who is responsible for presiding over the game from a neutral point of view", "question": "Is the referee at a soccer match highly visible against the field?", "answer": true, "facts": [ "Referees uniforms for soccer matches are usually bright neon colors.", "An alternative referee uniform color is black and white stripes." ], "decomposition": [ "What are the typically colors of a referee's uniform?", "Is #1 easy to see from afar?" ], "evidence": [ [ [ [ "Referee (association football)-6" ] ], [ "operation" ] ], [ [ [ "Referee (association football)-6" ] ], [ "no_evidence" ] ], [ [ [ "Kit (association football)-12" ] ], [ [ "Black-3" ], "operation" ] ] ] }, { "qid": "54b08f028141c591badd", "term": "Intellectual disability", "description": "Generalized neurodevelopmental disorder", "question": "Is dyslexia the most common intellectual disability in US?", "answer": false, "facts": [ "An intellectual disability is reflected in below-average IQ and a lack of skills needed for daily living.", "Learning disabilities are weaknesses in certain academic skills. usually, Reading, writing and math.", "Dyslexia is characterized by difficulties with accurate and/or fluent word recognition and by poor spelling and decoding abilities.", "Thomas Jefferson, George Washington, and John F. Kennedy were successful presidents while being dyslexic." ], "decomposition": [ "What are the practical effects of an intellectual disability?", "What are the practical effects of dyslexia?", "Is #2 within the scope of #1?" ], "evidence": [ [ [ [ "Intellectual disability-1" ] ], [ [ "Dyslexia-1" ] ], [ [ "Dyslexia-1", "Intellectual disability-1" ] ] ], [ [ [ "Intellectual disability-1" ] ], [ [ "Dyslexia-1" ] ], [ "operation" ] ], [ [ [ "Intellectual disability-1" ], "no_evidence" ], [ [ "Dyslexia-1" ] ], [ "operation" ] ] ] }, { "qid": "27db273aab9c056e306f", "term": "Ada Lovelace", "description": "English mathematician, computer programming pioneer", "question": "Did Ada Lovelace die tragically young for her era?", "answer": false, "facts": [ "Ada Lovelace died at the age of 36 in 1852.", "The life expectancy in the 1800s was between 35 and 39 years old.", "Tuberculosis was one of the leading causes of death in the 1800s and a vaccine was not made until the 1900s." ], "decomposition": [ "How old was Ada Lovelace when she died?", "In what year did Ada Lovelace die?", "What was the average life expectancy range around #2?", "Is #1 not is #3?" ], "evidence": [ [ [ [ "Ada Lovelace-6" ] ], [ [ "Ada Lovelace-1" ] ], [ [ "Life expectancy-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Ada Lovelace-6" ] ], [ [ "Ada Lovelace-6" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Ada Lovelace-20" ] ], [ [ "Ada Lovelace-20" ] ], [ [ "Life expectancy-10" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "b172aea303690917153e", "term": "Constitution of the Philippines", "description": "Supreme law of the Republic of the Philippines", "question": "Does the Constitution of the Philippines copy text from the British constitution?", "answer": false, "facts": [ "The Constitution of the Philippines is a document ratified in 1987", "The British constitution is not an actual document, but a collection of legal statutes, precedent, political custom and social convention" ], "decomposition": [ "What was the British Constitution?", "What kind of document was the Constitution of the Philippines?", "Can #1 copy something from #2" ], "evidence": [ [ [ [ "Constitution of the United Kingdom-1" ] ], [ [ "Constitution of the Philippines-1" ] ], [ "operation" ] ], [ [ [ "Constitution of the United Kingdom-1" ] ], [ [ "Constitution of the Philippines-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Constitution of the United Kingdom-5" ] ], [ [ "Constitution of the Philippines-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "497c4922b0552d7c5693", "term": "Argon", "description": "Chemical element with atomic number 18", "question": "Is Argon near Neon on the periodic table of elements?", "answer": true, "facts": [ "Argon is a noble gas.", "Neon is a noble gas. ", "The noble gases are all clumped together on the periodic table of elements." ], "decomposition": [ "What group of the periodic table is argon in?", "What group of the periodic table is neon in?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Noble gas-1" ] ], [ [ "Noble gas-1" ] ], [ "operation" ] ], [ [ [ "Argon-1" ] ], [ [ "Noble gas-1", "Noble gas-2" ] ], [ "operation" ] ], [ [ [ "Argon-1" ] ], [ [ "Neon-21" ] ], [ [ "Group (periodic table)-5" ] ] ] ] }, { "qid": "4994ea9dbe0705df08fd", "term": "Leaf", "description": "organ of a vascular plant, composing its foliage", "question": "Do oak trees have leaves during winter?", "answer": false, "facts": [ "Oak trees are deciduous.", "Deciduous trees lose their leaves during autumn, and they grow back during spring.", "Winter is between autumn and spring." ], "decomposition": [ "When do oak trees lose their leaves?", "When do oak trees leaves grow back", "Is winter not the season between #1 and #2?" ], "evidence": [ [ [ [ "Deciduous-3", "Oak-1" ] ], [ [ "Deciduous-9" ] ], [ "operation" ] ], [ [ [ "Oak-2" ] ], [ [ "Oak-2" ] ], [ [ "Winter-1" ] ] ], [ [ [ "Oak-2" ] ], [ [ "Oak-2" ], "no_evidence" ], [ [ "Winter-6" ], "operation" ] ] ] }, { "qid": "a11f537f67260464d010", "term": "Mitsubishi", "description": "group of autonomous, Japanese multinational companies", "question": "Can someone in Uberlandia work for Mitsubishi?", "answer": true, "facts": [ "Mitsubishi is a Japanese auto manufacturer", "Mitsubishi operates a plant in Catalao, Brazil", "Uberlandia is just under 70 miles from Catalao" ], "decomposition": [ "How far is Uberlandia from Catalao?", "Is #1 within reasonable distance to commute to work?", "Is there a Mitsubishi organization in Catalao?", "Are #2 and #3 positive?" ], "evidence": [ [ [ [ "Catalão-1", "Uberlândia-1" ], "no_evidence" ], [ "operation" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Catalão-4" ] ], [ "operation" ], [ [ "Catalão-1" ] ], [ "operation" ] ], [ [ [ "Catalão-1", "Uberlândia-1" ], "no_evidence" ], [ "no_evidence", "operation" ], [ [ "Catalão-1" ], "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "18d50392cc241326112b", "term": "Ronda Rousey", "description": "American professional wrestler, actress, author, mixed martial artist and judoka", "question": "Does Ronda Rousey avoid BBQ restaraunts?", "answer": true, "facts": [ "Ronda Rousey is a professional athlete in MMA.", "Ronda Rousey is a vegan.", "BBQ is a style of restaurant that predominantly serves cooked meat.", "Meat consumption is opposed and avoided by vegans.", "Vegans don't eat meat." ], "decomposition": [ "What kind of food is served at BBQ restaurants?", "What dietary restrictions does Ronda Rousey follow?", "Would #2 avoid #1?" ], "evidence": [ [ [ [ "Ribs (food)-4" ] ], [ [ "Ronda Rousey-73" ] ], [ [ "Veganism-24" ] ] ], [ [ [ "Barbecue restaurant-10" ] ], [ [ "Ronda Rousey-73" ] ], [ [ "Paleolithic diet-11" ], "operation" ] ], [ [ [ "Barbecue-3" ] ], [ [ "Ronda Rousey-73" ] ], [ [ "Veganism-1" ], "operation" ] ] ] }, { "qid": "dc3310bf2a61d1a9f2cf", "term": "Amtrak", "description": "Intercity rail operator in the United States", "question": "Does Amtrak run from NYC directly to the Moai location?", "answer": false, "facts": [ "Amtrak is a series of railways that transport people to various locations.", "The Moai are ancient stone statue faces that are a popular tourist destination.", "The Moai are located on Easter Island, an island in the Pacific ocean, near Chile." ], "decomposition": [ "Which major regions does Amtrak's passenger railroad service cover?", "Where are the Moai located?", "Is #2 located within any of #1?" ], "evidence": [ [ [ [ "Amtrak-1" ] ], [ [ "Moai-8" ] ], [ "operation" ] ], [ [ [ "Amtrak-3" ] ], [ [ "Rapa Nui people-9" ] ], [ "operation" ] ], [ [ [ "Amtrak-1" ] ], [ [ "Moai-1" ] ], [ [ "Polynesia-1" ] ] ] ] }, { "qid": "50bf347c2a05b645a0f9", "term": "Supreme Court of the United States", "description": "Highest court in the United States", "question": "Is Supreme Court of the United States analogous to High Courts of Justice of Spain?", "answer": false, "facts": [ "The Supreme Court of the United States is the final court ad has final say in judicial matters.", "The High Courts of Justice in Spain rule over single communities.", "The Supreme Court of Spain is the highest court in Spain and can overrule lesser courts." ], "decomposition": [ "What is the extent of the jurisdiction of The Supreme Court of the United States?", "Do the High courts of justice (Spain) have the same jurisdiction as #1?" ], "evidence": [ [ [ [ "Supreme Court of the United States-1" ] ], [ [ "High Courts of Justice of Spain-1", "Judiciary of Spain-7" ] ] ], [ [ [ "Supreme Court of the United States-1" ] ], [ [ "High Courts of Justice of Spain-1" ], "operation" ] ], [ [ [ "Supreme Court of the United States-60" ] ], [ "operation" ] ] ] }, { "qid": "dd3cb87f907f758229ec", "term": "Polymath", "description": "Individual whose knowledge spans a significant number of subjects", "question": "Would Tony Stark be considered a polymath?", "answer": true, "facts": [ "A polymath is a person who has knowledge in a wide variety of subjects.", "Tony Stark is considered a genius in mathematics, engineering, computer science, and physics, as well as demonstrating skills in metalworking, engine design, and genetics." ], "decomposition": [ "What does one have to have to be considered a polymath?", "Does Tony Stark have #1?" ], "evidence": [ [ [ [ "Polymath-1" ] ], [ [ "Iron Man-2" ], "operation" ] ], [ [ [ "Polymath-1" ] ], [ [ "Tony Stark (Marvel Cinematic Universe)-1" ] ] ], [ [ [ "Polymath-1" ] ], [ [ "Iron Man-71" ], "operation" ] ] ] }, { "qid": "f1c38036ed44c2422c7b", "term": "Watergate scandal", "description": "Political scandal that occurred in the United States in the 1970s", "question": "Would Hannah Nixon be proud of Richard Nixon following the Watergate scandal?", "answer": false, "facts": [ "Hannah Nixon was the mother of Richard Nixon.", "Richard Nixon resigned due to the unethical actions that he committed during the Watergate scandal.", "Parents are typically not proud of their children when they act immorally or unethically." ], "decomposition": [ "What is Hannah Nixon relation to Richard Nixon?", "What happened to Richard Nixon as a result of the Watergate scandal?", "Why did Richard Nixon have to #2?", "Are #1's usually proud if their child does #3?" ], "evidence": [ [ [ [ "Hannah Milhous Nixon-1" ] ], [ [ "Richard Nixon-94", "Richard Nixon-95" ] ], [ [ "Richard Nixon-94" ], "operation" ], [ "operation" ] ], [ [ [ "Hannah Milhous Nixon-1" ] ], [ [ "Richard Nixon-4" ] ], [ [ "Richard Nixon-4" ] ], [ "operation" ] ], [ [ [ "Hannah Milhous Nixon-1" ] ], [ [ "Watergate scandal-66" ] ], [ [ "Watergate scandal-65" ] ], [ "operation" ] ] ] }, { "qid": "5f49ef79fd5da00548e3", "term": "Rand Paul", "description": "American politician, ophthalmologist, and United States Senator from Kentucky", "question": "Did Rand Paul frequently swim in Lake Michigan during his undergraduate years?", "answer": false, "facts": [ "Rand Paul joined the swim team when he attended Baylor University.", "Baylor University is located in Waco, Texas.", "Lake Michigan is nearly 1,000 miles from Waco, Texas." ], "decomposition": [ "Where did Rand Paul do his undergraduate studies?", "In what state is #1?", "Is Lake Michigan near #2?" ], "evidence": [ [ [ [ "University of Pittsburgh School of Medicine-26" ] ], [ [ "Baylor University-1" ] ], [ "operation" ] ], [ [ [ "Rand Paul-2" ] ], [ [ "Baylor University-1" ] ], [ "operation" ] ], [ [ [ "Rand Paul-9" ] ], [ [ "Baylor University-1" ] ], [ [ "Lake Michigan-2" ] ] ] ] }, { "qid": "00e13b8d03d35929c049", "term": "Retail", "description": "Sale of goods and services from individuals or businesses to the end-user", "question": "Is SnapCap an example of a retail store?", "answer": false, "facts": [ "SnapCap specializes in small business loans.", "Retail stores sell products to individual consumers. ", "Small businesses are not individual consumers." ], "decomposition": [ "What does SnapCap specialize in?", "Who do #1's sell their products to?", "Who do retail stores sell their products to?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "LendingTree-8" ], "no_evidence" ], [ "no_evidence" ], [ [ "Retail-6" ] ], [ "operation" ] ], [ [ [ "LendingTree-8" ], "no_evidence" ], [ [ "LendingTree-1" ], "no_evidence" ], [ [ "Retail-1" ] ], [ "operation" ] ], [ [ [ "Payday loan-1" ], "no_evidence" ], [ [ "Payday loan-1" ] ], [ [ "Retail-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "2c9f3c00412a4018380f", "term": "Hair", "description": "protein filament that grows from follicles found in the dermis, or skin", "question": "Can furniture be made of hair?", "answer": true, "facts": [ "Hair is a protein filament that grows from living bodies.", "Hair is durable when woven together. ", "Furniture cushions can be maid from horse hair. " ], "decomposition": [ "What is hair?", "Can #1 be woven together securely?" ], "evidence": [ [ [ [ "Hair-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Hair-2" ] ], [ [ "Alpha-keratin-4" ], "operation" ] ], [ [ [ "Hair-2" ] ], [ [ "Braid-2", "Cushion-1" ], "no_evidence" ] ] ] }, { "qid": "3a343ae8735fc81f0377", "term": "March", "description": "third month in the Julian and Gregorian calendars", "question": "Is March named after Jupiter's son in Roman mythology?", "answer": true, "facts": [ "March is named after the Roman god Mars.", "Mars was the son of the Roman gods Jupiter and Juno." ], "decomposition": [ "Who are the sons of Jupiter in Roman mythology?", "Who is the month of March named after?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Hercules-1", "Mars (mythology)-1", "Vulcan (mythology)-41" ] ], [ [ "March-1" ] ], [ "operation" ] ], [ [ [ "Jupiter (mythology)-106" ] ], [ [ "Apollo-25" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Mars (mythology)-7" ] ], [ [ "Martius (month)-1" ] ], [ "operation" ] ] ] }, { "qid": "0b0dc8dc326765c1540b", "term": "Cultural hegemony", "description": "Marxist notion of cultural dominance", "question": "Can the theory of cultural hegemony explain global warming?", "answer": false, "facts": [ "Cultural hegemony is a theory of social and cultural dominance rooted in Marxism", "Marxism is a philosophy with applications in the social sciences and humanities", "Global warming is a phenomenon dealt with by environmental science" ], "decomposition": [ "The theory of cultural hegemony is rooted in which philosophy?", "Which branch of science does #1 have applications in?", "Which branch of science does global warming concern?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Cultural hegemony-1" ] ], [ [ "Marxism-3" ] ], [ [ "Global warming-71", "Svante Arrhenius-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Cultural hegemony-1" ] ], [ [ "Cultural hegemony-2" ], "no_evidence" ], [ [ "Global warming-19", "Scientific consensus on climate change-43" ] ], [ "operation" ] ], [ [ [ "Cultural hegemony-1" ] ], [ [ "Marxist philosophy-1" ] ], [ [ "Atmospheric chemistry-2" ] ], [ "operation" ] ] ] }, { "qid": "2bfe7f37f939ee456600", "term": "Pig Latin", "description": "secret language game", "question": "Is it impossible for pigs to use pig latin?", "answer": true, "facts": [ "Pig latin is a language game played by rearranging parts of words to disguise them", "Pigs are ungulates and incapable of speech using human languages" ], "decomposition": [ "What is referred to as pig latin?", "Which species are capable of using #1?", "Are pigs excluded from #2?" ], "evidence": [ [ [ [ "Pig Latin-1" ] ], [ [ "English language-1", "Human-1", "Language-15" ] ], [ "operation" ] ], [ [ [ "Pig Latin-1" ] ], [ [ "Language-1" ] ], [ "operation" ] ], [ [ [ "Pig Latin-1" ] ], [ [ "Great ape language-1", "Language-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "a7c6f840fb6a8be77625", "term": "J. D. Salinger", "description": "American writer", "question": "Is J.D. Salinger's most successful work influential to killers?", "answer": true, "facts": [ "J.D. Salinger's most popular work was Catcher in the Rye.", "John Hinckley Jr. tried to assassinate Ronald Reagan after reading Catcher in the Rye.", "Mark David Chapman had a copy of Catcher in the Rye when he assassinated John Lennon.", "Robert John Bardo carried a copy of Catcher in the Rye when he murdered actress Rebecca Schaeffer." ], "decomposition": [ "Which of J.D. Salinger's novels was most popular?", "has #1 been associated with inspiring murder?" ], "evidence": [ [ [ [ "J. D. Salinger-1" ] ], [ [ "The Catcher in the Rye-26" ] ] ], [ [ [ "J. D. Salinger-1" ] ], [ [ "The Catcher in the Rye-26" ] ] ], [ [ [ "J. D. Salinger-1" ] ], [ [ "The Catcher in the Rye-26" ] ] ] ] }, { "qid": "b7c897e34556ecccbc47", "term": "Ice", "description": "water frozen into the solid state", "question": "Did Ice make people rich?", "answer": true, "facts": [ "Trading ice was common in the 1800s.", "People created industries harvesting and selling ice.", "Some ice sellers became extremely rich. " ], "decomposition": [ "In the 1800's, what item was commonly traded?", "Did some people become rich off of selling #1?" ], "evidence": [ [ [ [ "Ice trade-1" ] ], [ [ "Ice trade-10" ] ] ], [ [ [ "Ice-48" ], "no_evidence" ], [ [ "Ice-49" ], "operation" ] ], [ [ [ "Ice trade-1" ] ], [ [ "Ice trade-2" ] ] ] ] }, { "qid": "05cce7e36ecb95166a28", "term": "Justin Bieber", "description": "Canadian singer-songwriter and actor", "question": "Does Justin Bieber vote in October?", "answer": true, "facts": [ "Justin Bieber is a Canadian citizen", "Canadian elections are held on the third Monday in October" ], "decomposition": [ "What country is Justin Bieber a citizen of?", "When does #1 hold its national elections?", "Is #2 October?" ], "evidence": [ [ [ [ "Justin Bieber-1", "Justin Bieber-51" ] ], [ [ "Fixed election dates in Canada-13" ] ], [ "operation" ] ], [ [ [ "Justin Bieber-1" ] ], [ [ "Elections in Canada-23" ] ], [ "operation" ] ], [ [ [ "Justin Bieber-1" ] ], [ [ "Elections in Canada-13" ] ], [ "operation" ] ] ] }, { "qid": "a60c5a3a8bd811b26208", "term": "Flying fish", "description": "Family of marine fish that can make powerful, self-propelled leaps out of water", "question": "Do flying fish have good eyesight?", "answer": true, "facts": [ "Flying fish are commonly found in the epipelagic zone, the top layer of the ocean to a depth of about 200 m (656 ft). ", "The epipelagic zone is the illuminated zone at the surface of the sea where enough light is available for photosynthesis. ", "Good eyesight is a necessary survival trait for animals living in well-lit areas." ], "decomposition": [ "Which layer of the ocean are flying fish usually found?", "What are the lighting conditions characteristic of #1?", "Would good eyesight be necessary for organisms in #2 environment?" ], "evidence": [ [ [ [ "Flying fish-6" ] ], [ [ "Photic zone-1" ] ], [ [ "Photic zone-3" ], "no_evidence" ] ], [ [ [ "Flying fish-6" ] ], [ [ "Photic zone-1" ] ], [ [ "Photic zone-1" ], "operation" ] ], [ [ [ "Flying fish-6" ] ], [ [ "Photic zone-1" ] ], [ "operation" ] ] ] }, { "qid": "b113a9be03fa98305a1a", "term": "Eddie Murphy", "description": "American stand-up comedian and actor", "question": "Could Eddie Murphy dial 911 in a car as a young child?", "answer": false, "facts": [ "Eddie Murphy was born in 1961.", "Car phones did not become commonplace in cars in America until 1984." ], "decomposition": [ "What year was Eddie Murphy born in?", "When did car phones become common in American cars?", "Is #1 after #2?" ], "evidence": [ [ [ [ "Eddie Murphy-1" ] ], [ [ "Car phone-2" ] ], [ "operation" ] ], [ [ [ "Eddie Murphy-1" ] ], [ [ "Car phone-8" ] ], [ "operation" ] ], [ [ [ "Eddie Murphy-1" ] ], [ [ "Car phone-4", "Car phone-6" ] ], [ "operation" ] ], [ [ [ "Eddie Murphy-1" ] ], [ [ "Car phone-8" ] ], [ "operation" ] ] ] }, { "qid": "2e586d3aea95150de42b", "term": "Carl Linnaeus", "description": "Swedish botanist, physician, and zoologist", "question": "Does Carl Linnaeus share the same final resting place as Michael Jackson?", "answer": false, "facts": [ "Carl Linnaeus is buried in the Uppsala Cathedral.", "Michael Jackson is entombed at the Forest Lawn Memorial Park." ], "decomposition": [ "Where is Carl Linnaeus buried?", "Where is Michael Jackson entombed?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Uppsala Cathedral-3" ], "no_evidence" ], [ [ "Forest Lawn Memorial Park (Glendale)-13" ] ], [ "operation" ] ], [ [ [ "Uppsala Cathedral-29" ] ], [ [ "Michael Jackson memorial service-17" ] ], [ [ "Michael Jackson memorial service-17", "Uppsala Cathedral-29" ] ] ], [ [ [ "Uppsala Cathedral-3" ] ], [ [ "Michael Jackson-70" ] ], [ "operation" ] ] ] }, { "qid": "26aefc40b2d04ca6b78b", "term": "Xenophobia", "description": "dislike of that which is perceived to be foreign or strange", "question": "Is xenophobia hypothetically unimportant between Saladin and Ali Askari?", "answer": true, "facts": [ "Xenophobia is the dislike of someone that is foreign or from a different background.", "Saladin was a Kurdish leader that became sultan of Egypt.", "Ali Askari was a Kurdish politician." ], "decomposition": [ "Which relation between two parties could lead bring about xenophobia?", "What was Saladin's ethnicity?", "What was Ali Askari's ethnicity?", "Does the relation between #2 and #3 fail to describe #1?" ], "evidence": [ [ [ [ "Xenophobia-1" ] ], [ [ "Saladin-1" ] ], [ [ "Ali Askari-3" ] ], [ "operation" ] ], [ [ [ "Xenophobia-1" ] ], [ [ "Saladin-1" ] ], [ [ "Ali Askari-2" ] ], [ "operation" ] ], [ [ [ "Xenophobia-1" ] ], [ [ "Saladin-1" ] ], [ [ "Ali Askari-3" ] ], [ [ "In-group and out-group-1" ] ] ] ] }, { "qid": "4cd5b0dd876ab2e528cb", "term": "Diary", "description": "Written record with discrete entries arranged by date", "question": "Can a dolphin keep a diary?", "answer": false, "facts": [ "A diary is a written record.", "Dolphins cannot write." ], "decomposition": [ "What is a diary?", "What does one need to do in order to keep #1?", "Can a dolphin do #2?" ], "evidence": [ [ [ [ "Diary-1" ] ], [ [ "Writing-1" ] ], [ "operation" ] ], [ [ [ "Diary-1" ] ], [ [ "Hand-1" ] ], [ [ "Dolphin-20" ] ] ], [ [ [ "Diary-1" ] ], [ [ "Diary-19" ] ], [ [ "Dolphin-1" ], "operation" ] ] ] }, { "qid": "ecc4a9173c24ace0215a", "term": "Tourism", "description": "travel for recreational or leisure purposes", "question": "Is the Jurassic era a tourist destination?", "answer": false, "facts": [ "The Jurassic era is a period of time in the past.", "Time travel does not currently exist. " ], "decomposition": [ "When did the Jurassic era occur?", "Can tourist travel to #1?" ], "evidence": [ [ [ [ "Jurassic-1" ] ], [ [ "Time travel-2" ] ] ], [ [ [ "Jurassic-1" ] ], [ "operation" ] ], [ [ [ "Jurassic-1" ] ], [ "operation" ] ] ] }, { "qid": "aed17ee939fc9dbe75fd", "term": "CAPTCHA", "description": "computer test to discriminate human users from spambots", "question": "Are any of the words that CAPTCHA stands for palindromes?", "answer": false, "facts": [ "A palindrome is a word that reads the same backwards and forwards like madam.", "CAPTCHA stands for: Completely Automated Public Turing test to tell Computers and Humans Apart." ], "decomposition": [ "What words does CAPTCHA stand for?", "What is the characteristic of a palindrome?", "Does any word from #1 have the characteristic of #2?" ], "evidence": [ [ [ [ "CAPTCHA-1" ] ], [ [ "Palindrome-1" ] ], [ "operation" ] ], [ [ [ "CAPTCHA-1" ] ], [ [ "Palindrome-1" ] ], [ "operation" ] ], [ [ [ "CAPTCHA-1" ] ], [ [ "Palindrome-1" ] ], [ "operation" ] ] ] }, { "qid": "38a0f2619ec98ba7fbd5", "term": "The Young and the Restless", "description": "television series", "question": "Would a binge watch of entire Young and the Restless take longer than a leap year?", "answer": true, "facts": [ "A leap year has 366 total days.", "As of March 19th, 2018, every episode of the Young and the Restless would take 467 days and 2 hours to watch." ], "decomposition": [ "How many days are in a leap year?", "How long would it take to watch every episode of the Young and the Restless?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Leap year-2" ] ], [ [ "The Young and the Restless-1", "The Young and the Restless-12" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Leap year-1", "Leap year-6" ] ], [ [ "The Young and the Restless-1", "The Young and the Restless-12" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Leap year-2" ] ], [ [ "The Young and the Restless-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d8bd905e243afdcf7f92", "term": "Metroid", "description": "Video game series", "question": "Was the original Metroid groundbreaking for its polygons?", "answer": false, "facts": [ "Metroid was a 1986 video game released on the Nintendo Entertainent System.", "Polygons were a graphics style that became prominent in the 1990s on the Sony Playstation.", "Nintendo Entertainment System games had a 2-D pixel-art style.", "Metroid is hailed as being the first mainstream game with a playable female protagonist." ], "decomposition": [ "When was video game Metroid originally released?", "What does polygons in video gaming represent?", "When did #2 become prominent?", "Is #1 after #3?" ], "evidence": [ [ [ [ "Metroid (video game)-1" ] ], [ [ "Polygon (computer graphics)-1" ] ], [ [ "Computer graphics-39" ] ], [ "operation" ] ], [ [ [ "Metroid (video game)-1" ] ], [ [ "3D computer graphics-1" ], "no_evidence" ], [ [ "Fifth generation of video game consoles-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Metroid (video game)-13" ] ], [ [ "Polygon (website)-1" ] ], [ [ "Polygon (website)-1" ] ], [ "operation" ] ] ] }, { "qid": "fb0aef4d6278a1623abc", "term": "Preventive healthcare", "description": "Prevent and minimize the occurrence of diseases", "question": "Do you need to schedule separate preventive healthcare and sickness visits? ", "answer": true, "facts": [ "Preventive healthcare options are typically covered at no charge by health insurance.", "Sick visits to the doctor are billed separately from preventive healthcare visits.", "Sick visits and preventive healthcare visits are generally given different time allotments. " ], "decomposition": [ "How are preventive healthcare visits billed to insurance companies?", "How are sick visits to the doctor billed to insurance companies?", "Is #1 different from #2?" ], "evidence": [ [ [ [ "Preventive healthcare-60" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Health care-11" ], "no_evidence" ], [ [ "Health care-15" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Preventive healthcare-54" ], "no_evidence" ], [ [ "Managed care-38" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "811f06c3d045430a5359", "term": "Black", "description": "The darkest shade, resulting from the absence or complete absorption of light. Like white and grey, it has no hue", "question": "Is Anakin Skywalker from Star Wars associated with the color black?", "answer": true, "facts": [ "As a Jedi during the Clone Wars, Anakin Skywalker often wore black robes.", "After he was burned and transformed into the cyborg Darth Vader, he received a distinctive and famous all-black outfit including a black mask." ], "decomposition": [ "What is the color of most outfits worn by Star Wars' Anakin Skywalker?", "Is #1 the same as black?" ], "evidence": [ [ [ [ "Darth Vader-1", "Darth Vader-15" ] ], [ "operation" ] ], [ [ [ "Darth Vader-15" ] ], [ "operation" ] ], [ [ [ "Darth Vader-1", "Darth Vader-15" ] ], [ "operation" ] ] ] }, { "qid": "b6d02dc46dfc49f32984", "term": "Whole genome sequencing", "description": "A process that determines the complete DNA sequence of an organism's genome at a single time", "question": "Can whole genome sequencing be used for COVID-19?", "answer": false, "facts": [ "Whole genome sequencing is used to analyze DNA", "RNA viruses do not have DNA", "COVID-19 is an RNA virus." ], "decomposition": [ "What does the whole genome sequencing process determine?", "Which virus is responsible for COVID-19?", "Does #2 have #1?" ], "evidence": [ [ [ [ "Whole genome sequencing-6" ] ], [ [ "Coronavirus-1" ] ], [ [ "Coronavirus-1" ], "operation" ] ], [ [ [ "Whole genome sequencing-1" ] ], [ [ "Coronavirus-1" ] ], [ [ "RNA virus-1", "RNA virus-2" ], "operation" ] ], [ [ [ "Whole genome sequencing-1" ] ], [ [ "Coronavirus disease-8" ] ], [ [ "Positive-sense single-stranded RNA virus-1" ] ] ] ] }, { "qid": "3848c0621fc2a9d1c79f", "term": "Viscosity", "description": "Resistance of a fluid to shear deformation", "question": "Do people with swallowing disorders need high viscosity drinks?", "answer": true, "facts": [ "Swallowing disorders can make thin liquids like water dangerous to drink.", "Liquid thickeners are marketed towards people with difficulty drinking." ], "decomposition": [ "If a person has a swallowing disorder, what types of liquids are dangerous for them to drink?", "Are high viscosity drinks the opposite of #1?" ], "evidence": [ [ [ [ "Thickened fluids-1" ] ], [ [ "Viscosity-1" ], "operation" ] ], [ [ [ "Dysphagia-2" ], "no_evidence" ], [ [ "Viscosity-1" ], "operation" ] ], [ [ [ "Oropharyngeal dysphagia-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7bb579fa94d5f0d58ae1", "term": "Blues", "description": "Musical form and music genre", "question": "Were Depeche Mode heavily influenced by blues music?", "answer": false, "facts": [ "Blues incorporated spirituals, work songs, field hollers, shouts, chants, and rhymed simple narrative ballads and was derived from African-Americans.", "Blues music uses instruments like slide guitar, harmonica, piano, and bass drums.", "Depeche Mode are a British pop synth group.", "Depeche Mode uses computer synthesizers to create their unique sound as well as heavy rock guitars.", "Depeche Mode was influenced by The Cure, and Ultravox, new wave rock bands." ], "decomposition": [ "What kind of songs and instruments are associated with Blues?", "What kind of musical instruments does the Depeche Mode use to create music?", "Is #2 very similar to #1?" ], "evidence": [ [ [ [ "Blues-37" ] ], [ [ "Depeche Mode-35" ] ], [ "operation" ] ], [ [ [ "Blues-1" ] ], [ [ "Depeche Mode-1" ] ], [ "operation" ] ], [ [ [ "Blues-1" ], "no_evidence" ], [ [ "Depeche Mode-6" ] ], [ "operation" ] ] ] }, { "qid": "83e854a290df701ecb8d", "term": "Mail carrier", "description": "employee of the post office or postal service, who delivers mail to residences and businesses", "question": "Was being a mail carrier considered one of the most dangerous jobs?", "answer": true, "facts": [ "The Pony Express was one of the first mail carrier services.", "The Pony Express operated form 1860 to 1861", "Pony Express riders would have to travel hundreds of miles on horse back through extreme weather and terrain. ", "The Pony Express sought to hire young expert horse riders willing to risk death." ], "decomposition": [ "What was the name of the mail carrier service that operated from 1860 to 1861?", "What would riders on #1 have to endure?", "Is #2 considered very dangerous?" ], "evidence": [ [ [ [ "Pony Express-1" ] ], [ [ "Pony Express-31" ] ], [ "operation" ] ], [ [ [ "Pony Express-1" ] ], [ [ "Pony Express-38", "Pony Express-40" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Pony Express-1" ] ], [ [ "Pony Express-30", "Pony Express-31" ] ], [ [ "Pony Express-30", "Pony Express-31" ] ] ] ] }, { "qid": "2cb0bc060c5fb708a43f", "term": "Leopard seal", "description": "Species of mammal", "question": "Is Sea World hazardous to leopard seal's health?", "answer": true, "facts": [ "Leopard seals have only one natural predator, the killer whale.", "Sea World is an aquatic show that involves many water animals.", "Killer Whales, such as Tilikum, are headliners at Sea World." ], "decomposition": [ "What is the leopard seals's predator?", "Would one find a #1 at Sea World?" ], "evidence": [ [ [ [ "Leopard seal-1" ] ], [ [ "Kamogawa Sea World-15" ] ] ], [ [ [ "Leopard seal-1" ] ], [ [ "Shamu-1" ], "operation" ] ], [ [ [ "Killer whale-7", "Leopard seal-1" ] ], [ [ "SeaWorld-1" ], "operation" ] ] ] }, { "qid": "812774b35638d154d0d1", "term": "Compact disc", "description": "Optical disc for storage and playback of digital audio", "question": "Would a compact disc melt in magma?", "answer": true, "facts": [ "Magma is the molten material beneath the earth's surface.", "The temperature of magma ranges between 1300F and 2400F degrees.", "A compact disc is made of polycarbonate plastic.", "Polycarbonate plastic melts at 311F degrees." ], "decomposition": [ "What material is a compact disc made of?", "At what temperature does #1 melt", "What is the typical temperature range of magma?", "Is #2 less than or within #3" ], "evidence": [ [ [ [ "Compact disc-23" ] ], [ [ "Polycarbonate-25" ], "no_evidence" ], [ [ "Magma-6" ] ], [ "no_evidence", "operation" ] ], [ [ [ "CD-ROM-4" ] ], [ [ "Plastic-19" ] ], [ [ "Magma-6" ] ], [ "operation" ] ], [ [ [ "CD-R-7" ] ], [ [ "Polycarbonate-10" ] ], [ [ "Magma-6" ] ], [ "operation" ] ] ] }, { "qid": "3bba311f7abb8ad4be82", "term": "Monarch", "description": "Person at the head of a monarchy", "question": "Does Canada have a relationship with a monarch?", "answer": true, "facts": [ "Canada is a constitutional monarchy.", "The head of the monarchy that rules Canada is Queen Elizabeth." ], "decomposition": [ "What system of government does Canada follow?", "Who is the head of #1?", "Is #2 a monarch?" ], "evidence": [ [ [ [ "Government of Canada-6" ] ], [ "no_evidence" ], [ [ "Records of heads of state-7" ] ] ], [ [ [ "By the Grace of God-10", "Constitutional monarchy-1" ], "no_evidence" ], [ [ "Monarchy-1" ] ], [ "operation" ] ], [ [ [ "Government of Canada-1" ] ], [ [ "Government of Canada-1" ] ], [ [ "Government of Canada-1" ] ] ] ] }, { "qid": "9b00b215a02a7e4a8aa1", "term": "Edmund Hillary", "description": "New Zealand mountaineer", "question": "Would Mount Wycheproof be a breeze for Edmund Hillary?", "answer": true, "facts": [ "Edmund Hillary was a mountaineer that climbed Mount Everest.", "Mount Everest reaches 29,029 feet in the air.", "Mount Wycheproof is the smallest mountain in the world.", "Mount Wycheproof rises a mere 486 feet above sea level." ], "decomposition": [ "Which famous mountain has Edmund Hillary climbed?", "How tall is #1?", "How tall is Mount Wycheproof?", "Is #3 several thousand feet smaller than #2?" ], "evidence": [ [ [ [ "Edmund Hillary-1" ] ], [ [ "Mount Everest-2" ] ], [ [ "Mount Wycheproof-1" ] ], [ "operation" ] ], [ [ [ "Edmund Hillary-1" ] ], [ [ "Mount Everest-2" ] ], [ [ "Mount Wycheproof-1" ] ], [ "operation" ] ], [ [ [ "Edmund Hillary-1" ] ], [ [ "Mount Everest-2" ] ], [ [ "Mount Wycheproof-3" ] ], [ "operation" ] ] ] }, { "qid": "1a0dcaf77f1c45f67d25", "term": "Modern Family", "description": "American comedy TV series", "question": "Did Modern Family win a Slammy award?", "answer": false, "facts": [ "Modern Family is a television sitcom", "The Slammy Awards were presented to people involved in professional wrestling" ], "decomposition": [ "What television genre is Modern Family?", "What genre are the Slammy Awards given to?", "Is #1 and #2 the same?" ], "evidence": [ [ [ [ "Modern Family-1" ] ], [ [ "Slammy Award-1" ] ], [ "operation" ] ], [ [ [ "Modern Family-1" ] ], [ [ "Slammy Award-1" ] ], [ "operation" ] ], [ [ [ "Modern Family-1" ] ], [ [ "Slammy Award-1" ] ], [ "operation" ] ] ] }, { "qid": "1ffe6c11a37f8e4e2542", "term": "Grey seal", "description": "species of seal", "question": "Can a grey seal swim in the same water as the subject of Moby Dick?", "answer": true, "facts": [ "The range of gray seals is limited to parts of the northern hemisphere bordered by the Atlantic ocean", "The subject of Moby Dick was a sperm whale", "Sperm whales can be found in the north Atlantic, in addition to most other bodies of water on earth." ], "decomposition": [ "What kind of whale was Moby Dick?", "What is the range of #1?", "What is the range of gray seals?", "Is there an overlap between #2 and #3?" ], "evidence": [ [ [ [ "Moby-Dick-1" ] ], [ [ "Sperm whale-2" ], "no_evidence" ], [ [ "Grey seal-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Moby-Dick-1" ] ], [ [ "Sperm whale-2" ] ], [ [ "Grey seal-1" ] ], [ "operation" ] ], [ [ [ "Moby-Dick-1" ] ], [ [ "Sperm whale-2" ] ], [ [ "Grey seal-7" ] ], [ "operation" ] ] ] }, { "qid": "7447fdc904d63a6f77bf", "term": "C-SPAN", "description": "American pay television network", "question": "Does the name C-SPAN refer to a form of telecommunications that utilizes outer space?", "answer": true, "facts": [ "The S in C-SPAN refers to Satellite.", "Satellite communications require communicating with satellites that orbit the Earth in outer space." ], "decomposition": [ "What does C-SPAN's transmission equipment consist of, according to its full meaning?", "Is any of #1 located in outer space?" ], "evidence": [ [ [ [ "Cable television-2" ] ], [ [ "Satellite-1" ], "operation" ] ], [ [ [ "C-SPAN-14" ] ], [ [ "Technology of television-4" ] ] ], [ [ [ "C-SPAN-1" ] ], [ [ "Satellite-1" ] ] ] ] }, { "qid": "0a84eaea4d26d46c7c30", "term": "Rainbow", "description": "meteorological phenomenon", "question": "Are flag of Gabon colors found in rainbow?", "answer": true, "facts": [ "Rainbows contain the following colors: red, orange, yellow, green, blue, indigo and violet.", "The flag of Gabon is green, yellow, and blue." ], "decomposition": [ "What colors are found in a rainbow?", "What colors are in the flag of the country Gabon?", "Are all the colors in #2 found in #1?" ], "evidence": [ [ [ [ "ROYGBIV-1" ] ], [ [ "Flag of Gabon-3" ] ], [ "operation" ] ], [ [ [ "ROYGBIV-1" ] ], [ [ "Flag of Gabon-5" ] ], [ "operation" ] ], [ [ [ "ROYGBIV-1" ] ], [ [ "Flag of Gabon-1" ] ], [ "operation" ] ] ] }, { "qid": "898ff9314804144d6f90", "term": "Johann Sebastian Bach", "description": "German composer", "question": "Did Johann Sebastian Bach influence heavy metal?", "answer": true, "facts": [ "Johann Sebastian Bach was a classical German composer born in 1685.", "Lead singer of heavy metal band Skid Row, Sebastian Bach, took his name from German composer Johann Sebastian Bach.", "Heavy Metal band Metallica released a live album with the San Francisco Symphony.", "Deep Purple, n English hard rock/heavy metal band has cited classical musicians as their inspiration.", "Deep Purple's keyboard and guitar solos on \"Highway Star,\" have been called Bach-like in harmonic progression and virtuosic arpeggio figuration." ], "decomposition": [ "Who is the lead singer of \"Skid Row\"?", "Who did #1 name himself after?", "Which classic musician's work have Deep Purple's solo on \"Highway Star\" been compared with?", "Are #2 and #3 Johann Sebastian Bach and both bands heavy metal?" ], "evidence": [ [ [ [ "Sebastian Bach-1" ] ], [ [ "Johann Sebastian Bach-1" ], "no_evidence" ], [ [ "Highway Star (song)-4" ] ], [ [ "Deep Purple-1", "Skid Row (American band) discography-2" ], "operation" ] ], [ [ [ "Skid Row (American band)-1" ] ], [ [ "Johann Sebastian Bach-1" ] ], [ [ "Highway Star (song)-3" ] ], [ "operation" ] ], [ [ [ "Sebastian Bach-1" ] ], [ [ "Johann Sebastian Bach-1" ] ], [ [ "Highway Star (song)-3" ] ], [ [ "Deep Purple-1", "Skid Row (American band)-1" ], "operation" ] ] ] }, { "qid": "756f99fb69f47193fe35", "term": "Hello", "description": "salutation or greeting", "question": "Can your psychologist say hello to you while you are out at the supermarket?", "answer": false, "facts": [ "Therapists are bound by confidentiality in all areas of their work. ", "It would violate the standard set by the APA for a therapist to acknowledge any client outside of a therapeutic setting." ], "decomposition": [ "What kind of code are therapists bound by in all areas of their work?", "Would it be in conformity to #1 to acknowledge a client outside a therapeutic setting?" ], "evidence": [ [ [ [ "Psychotherapy-16" ] ], [ "no_evidence" ] ], [ [ [ "Hippocratic Oath-1" ] ], [ [ "Confidentiality-13" ], "operation" ] ], [ [ [ "APA Ethics Code-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e2e70b3a5a5221852041", "term": "Dustin Hoffman", "description": "American actor and director", "question": "Will Dustin Hoffman likely vote for Trump in 2020?", "answer": false, "facts": [ "Dustin Hoffman is a liberal and has long supported the Democratic Party and Ralph Nader.", "Donald Trump belongs to the Republican Party." ], "decomposition": [ "What is Dustin Hoffman's political party affiliation?", "What party is Donald Trump affiliated with?", "Is someone from #1 likely to vote for a candidate from #2?" ], "evidence": [ [ [ [ "Dustin Hoffman-81" ] ], [ [ "Donald Trump-3" ] ], [ [ "Political parties in the United States-4" ], "no_evidence" ] ], [ [ [ "Dustin Hoffman-81" ] ], [ [ "Donald Trump-3" ] ], [ [ "Dustin Hoffman-81", "Two-party system-4" ] ] ], [ [ [ "Dustin Hoffman-81" ] ], [ [ "Donald Trump-3" ] ], [ "operation" ] ] ] }, { "qid": "05ab64dd540d095d9c62", "term": "Mood disorder", "description": "(psychology) Any of various disorders characterised by disturbance in an individual's mood", "question": "Do people with mood disorders need permanent institutionalization?", "answer": false, "facts": [ "Most mood disorders can be treated in the outpatient setting.", "Many people with mood disorders do not get diagnosed at all." ], "decomposition": [ "Do most mood disorders need the patient to leave their homes to get treatment?" ], "evidence": [ [ [ [ "Mood disorder-21" ], "operation" ] ], [ [ [ "Mood disorder-21" ], "no_evidence" ] ], [ [ [ "Bipolar disorder-44", "Mood (psychology)-17" ], "no_evidence" ] ] ] }, { "qid": "5d0248445c0be4f1dc13", "term": "Sahara", "description": "desert in Africa", "question": "Can Poland Spring make money in the Sahara?", "answer": true, "facts": [ "The Sahara is the largest hot desert", "Deserts are dry regions that receive little precipitation", "Poland Spring sells bottled water" ], "decomposition": [ "What does Poland Spring produce and sell?", "What is the weather condition in the Sahara?", "Would #2 make #1 highly desirable?" ], "evidence": [ [ [ [ "Poland Spring-1" ] ], [ [ "Sahara-14", "Sahara-15" ] ], [ [ "Thirst-1" ] ] ], [ [ [ "Poland Spring-2" ] ], [ [ "Sahara desert (ecoregion)-5" ] ], [ [ "Sahara-12" ] ] ], [ [ [ "Poland Spring-1" ] ], [ [ "Sahara-1", "Sahara-20" ] ], [ "operation" ] ] ] }, { "qid": "c4b44208283d5d405c43", "term": "Ivan the Terrible", "description": "Grand Prince of Moscow and 1st Tsar of Russia", "question": "Was 847 Pope Leo same iteration of his name as Ivan the Terrible?", "answer": true, "facts": [ "Pope Leo in 847 AD was the fourth Leo to have that name and was called Leo IV.", "Ivan the Terrible was the 4th Tsar to have the name Ivan and was known as Ivan IV Vasilyevich." ], "decomposition": [ "Which Pope Leo is associated with the year 847 AD?", "How many similarly named popes were before #1?", "What was Ivan the Terrible's title as a ruler?", "How many similarly named #3 ruled before him?", "Is #2 equal to #4?" ], "evidence": [ [ [ [ "Pope Leo IV-1" ] ], [ [ "Pope Leo I-1", "Pope Leo II-1", "Pope Leo III-1" ] ], [ [ "Ivan the Terrible-1" ] ], [ [ "Ivan I of Moscow-1", "Ivan II of Moscow-1", "Ivan III of Russia-1" ] ], [ "operation" ] ], [ [ [ "Pope Leo IV-1" ] ], [ "operation" ], [ [ "Ivan the Terrible-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Pope Leo IV-1" ] ], [ "operation" ], [ [ "Ivan the Terrible-1" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "06b9ed3f803e3d5796ed", "term": "The Powerpuff Girls", "description": "American animated television series", "question": "Could the Powepuff Girls make the background to the Azerbaijani flag?", "answer": true, "facts": [ "The national flag of the Republic of Azerbaijan is a horizontal tricolour featuring three equally sized fesses of blue, red, and green", "Each of the Powerpuff Girls creates a trail of a different color when she flies: Bubbles makes blue, Blossom makes red, and Buttercup makes green." ], "decomposition": [ "What colors are present on the Azerbaijani flag?", "What colors are the Powerpuff Girls?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Flag of Azerbaijan-1" ] ], [ [ "The Powerpuff Girls-5" ] ], [ "operation" ] ], [ [ [ "Flag of Azerbaijan-1" ] ], [ [ "The Powerpuff Girls-9" ] ], [ "operation" ] ], [ [ [ "Flag of Azerbaijan-1" ] ], [ [ "The Powerpuff Girls-5" ] ], [ "operation" ] ] ] }, { "qid": "ae25e990129848141330", "term": "Florence", "description": "Capital and most populous city of the Italian region of Tuscany", "question": "Is there a Harry Potter character named after Florence?", "answer": true, "facts": [ "Firenze is the native Italian form of the name Florence.", "There is a centaur who appars as a minor character in the Harry Potter series named Firenze.", "Firenze appears in three of the Harry Potter books but only one movie." ], "decomposition": [ "What is the native Italian form for the name Florence?", "What is the name of the centaur who appears in the Harry Potter series?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Florence (given name)-5" ] ], [ [ "Magical creatures in Harry Potter-65" ] ], [ [ "Florence (given name)-5", "Magical creatures in Harry Potter-65" ], "operation" ] ], [ [ [ "Florence-1" ] ], [ [ "Magical creatures in Harry Potter-65" ] ], [ "operation" ] ], [ [ [ "Florence-1" ] ], [ [ "Magical creatures in Harry Potter-65" ] ], [ "operation" ] ] ] }, { "qid": "1a50ff1647077d670841", "term": "Snakebite", "description": "Injury caused by a bite from a snake", "question": "Would a snakebite hypothetically be a threat to T-1000?", "answer": false, "facts": [ "Snakebites are dangerous because they inject venom into blood streams.", "The T-1000 is an android from the movie series Terminator.", "Androids are machines made of wires and computer parts." ], "decomposition": [ "Where does the injurous action of a snakebite happen?", "What kind of entity is a T-1000?", "Does a #2 have a #1?" ], "evidence": [ [ [ [ "Snakebite-32" ], "no_evidence" ], [ [ "T-1000-2" ] ], [ [ "T-1000-7" ], "no_evidence", "operation" ] ], [ [ [ "Skin-1" ] ], [ [ "T-1000-10" ] ], [ "operation" ] ], [ [ [ "Venomous snake-1" ] ], [ [ "T-1000-2" ] ], [ "operation" ] ] ] }, { "qid": "3ba4e4cd6db5e42e06b0", "term": "Ham", "description": "Pork from a leg cut that has been preserved by wet or dry curing, with or without smoking", "question": "Did Malcolm X avoid eating ham?", "answer": true, "facts": [ "Malcolm X was a practicing Muslim", "Muslims are prohibited from eating foods derived from pigs" ], "decomposition": [ "What religion did Malcolm X practice?", "Does #1 forbid its believers eating pig products?" ], "evidence": [ [ [ [ "Malcolm X-1" ] ], [ "operation" ] ], [ [ [ "Malcolm X-1" ] ], [ [ "Islamic culture-45" ] ] ], [ [ [ "Malcolm X-50" ] ], [ [ "Islamic culture-45" ] ] ] ] }, { "qid": "56ce10fe8bfede3f56ba", "term": "New Year's Eve", "description": "holiday celebrated on 31 December", "question": "Would New Year's Eve hypothetically be Bacchus's favorite holiday?", "answer": true, "facts": [ "Bacchus was the Roman god of wine and revelry.", "One of the main New Year's Eve traditions is drinking a toast to the new year.", "New Year’s Eve is the biggest day of the year for liquor stores in terms of sales." ], "decomposition": [ "What was Bacchus the Roman god of?", "Do people tend to celebrate with #1 on New Year's Eve?" ], "evidence": [ [ [ [ "Dionysus-1", "Dionysus-2" ] ], [ [ "New Year's Eve-15" ], "operation" ] ], [ [ [ "Dionysus-1", "Dionysus-2" ] ], [ [ "New Year's Eve-136", "New Year's Eve-97" ] ] ], [ [ [ "Dionysus-1" ] ], [ [ "New Year's Eve-15" ], "operation" ] ] ] }, { "qid": "42025ba75ec5d0f0f291", "term": "Zebra", "description": "Black and white striped animals in the horse family", "question": "Are black and white prison uniforms made to resemble a zebra?", "answer": false, "facts": [ "Prison stripes are made of parallel lines.", "Zebra stripes are jagged in appearance. " ], "decomposition": [ "What is the design on a prison uniform?", "What is the pattern on a zebra?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Prison uniform-28" ], "no_evidence" ], [ [ "Plains zebra-13" ] ], [ "operation" ] ], [ [ [ "Prison uniform-2" ], "no_evidence" ], [ [ "Zebra-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Prison uniform-24", "Prison uniform-26" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c3d270ad2ac26017e3b7", "term": "Jack Dempsey", "description": "American boxer", "question": "Did Jack Dempsey ever witness Conor McGregor's fights?", "answer": false, "facts": [ "Jack Dempsey died in 1983.", "Conor McGregor's first MMA fight was in 2008." ], "decomposition": [ "In what year did Jack Dempsey die?", "In what year was Conor McGregor's first fight?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Jack Dempsey-1" ] ], [ [ "Conor McGregor-7" ] ], [ "operation" ] ], [ [ [ "Jack Dempsey-1" ] ], [ [ "Conor McGregor-2" ] ], [ "operation" ] ], [ [ [ "Jack Dempsey-1" ] ], [ [ "Conor McGregor-7" ] ], [ "operation" ] ] ] }, { "qid": "a44c3d9161f5e5c3f41d", "term": "Immersion (virtual reality)", "description": "perception of being physically present in a non-physical world", "question": "Is immersion in virtual reality testable on cnidarians before humans?", "answer": false, "facts": [ "Perceptions are interpreted in the brain via the nervous system", "Hypothetical ways of interfacing with the central nervous system are being explored for immersive virtual reality", "Cnidarians have a nervous system, but not a central nervous system" ], "decomposition": [ "Which system of the human body will immersive virtual reality interact with?", "Do cnidarians have #1?" ], "evidence": [ [ [ [ "Motion sickness-8", "Virtual reality-58" ], "no_evidence" ], [ [ "Cnidaria-25" ], "operation" ] ], [ [ [ "Immersion (virtual reality)-9" ] ], [ [ "Ctenophora-5" ] ] ], [ [ [ "Immersion (virtual reality)-16" ] ], [ [ "Cnidaria-3" ], "no_evidence", "operation" ] ] ] }, { "qid": "ddf1a475bda7523ab5a9", "term": "Holy Land", "description": "Term used by Jews, Christians, and Muslims to describe the Land of Israel and Palestine", "question": "Is the Holy Land important to Eastern religions?", "answer": false, "facts": [ "Eastern religions include Hinduism, Buddhism, and Shintoism.", "Hinduism recognizes seven Holy Cities which are Ayodhya, Mathura, Haridwar, Varanasi, Kanchipuram, Dvaraka and Ujjain.", "Bodh Gaya: (in the current Mahabodhi Temple, Bihar, India), is the most important religious site and place of pilgrimage for Buddhists.", "The most sacred Shinto shrine is located in the city of Ise, within the Shima Peninsula of Japan." ], "decomposition": [ "What are some typical Eastern religions?", "Which place is referred to as the Holy Land?", "Which places do some of #1 consider sacred or holy?", "Is #2 included in #3?" ], "evidence": [ [ [ [ "Eastern religions-1" ] ], [ [ "Holy Land-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Eastern religions-1" ] ], [ [ "Holy Land-1" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Eastern religions-1" ] ], [ [ "Holy Land-1", "Holy Land-4" ] ], [ [ "Ganga in Hinduism-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "502d4dae6f08e73a5569", "term": "Crucifixion", "description": "Method of capital punishment in which the victim is tied or nailed to a large wooden beam and left to hang until eventual death", "question": "Is Home Depot a one stop shop for crucifixion supplies?", "answer": true, "facts": [ "A one stop shop is a store where multiple items are supplied.", "Crucifixion is a form of punishment in which a person is nailed to a wooden cross.", "Home Depot sells numerous supplies including: hammers, nails, and wood." ], "decomposition": [ "What is the definition of a one stop shop?", "What tools are necessary for Crucifixion?", "Is Home Depot a #1 for all of #2?" ], "evidence": [ [ [ [ "One stop shop-1" ] ], [ [ "Crucifixion-1" ] ], [ "operation" ] ], [ [ [ "One stop shop-1" ] ], [ [ "Descriptions in antiquity of the execution cross-6" ], "no_evidence" ], [ [ "The Home Depot-1" ], "operation" ] ], [ [ [ "One stop shop-1" ] ], [ [ "Crucifixion-1" ] ], [ [ "The Home Depot-1" ], "operation" ] ] ] }, { "qid": "9ce9d7604f3da80b07be", "term": "Chrysler", "description": "Automotive brand manufacturing subsidiary of Fiat Chrysler Automobiles", "question": "Can you carry a Chrysler in a laptop bag?", "answer": false, "facts": [ "Chrysler manufactures automobiles, which weigh several thousand pounds", "Laptop bags are designed to hold laptop computers, which typically weigh under ten pounds" ], "decomposition": [ "What kind of object is \"Chrysler\" referring to?", "How much do #1's typically weigh?", "What object is a laptop bag designed to carry?", "How much does #3 typically weigh?", "Is #4 greater than or equal to #2?" ], "evidence": [ [ [ [ "Chrysler-1" ] ], [ [ "Car-40" ], "no_evidence" ], [ [ "Laptop-43" ] ], [ [ "Laptop-13" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Chrysler-1" ] ], [ "no_evidence" ], [ [ "Laptop-48" ], "no_evidence" ], [ [ "Laptop-13" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Chrysler-1" ] ], [ [ "Car-40" ] ], [ [ "Backpack-23" ] ], [ [ "Laptop-7" ] ], [ "operation" ] ] ] }, { "qid": "c2bbc9f09c8ac750e8bd", "term": "Bodybuilding", "description": "use of progressive resistance exercise to control and develop musculature", "question": "Would a bodybuilder enjoy wearing a cast for several weeks?", "answer": false, "facts": [ "Casts encase a limb and prevent it from moving.", "Movement of limbs under resistance promote muscle growth.", "An absence of limb movement will result in decreased muscle size.", "The goal of bodybuilding is to increase the size of your muscles.", "Individuals are not happy when they are prevented from pursuing their goals." ], "decomposition": [ "What does a bodybuilder need to do on a daily basis?", "What does a cast limit freedom of?", "Does the limit on #2 make #1 possible?" ], "evidence": [ [ [ [ "Bodybuilding-1" ] ], [ [ "Orthopedic cast-1" ] ], [ [ "Muscle atrophy-1", "Muscle atrophy-7" ] ] ], [ [ [ "Bodybuilding-1" ] ], [ [ "Orthopedic cast-1" ] ], [ "no_evidence" ] ], [ [ [ "Bodybuilding-37" ], "no_evidence" ], [ [ "Orthopedic cast-1" ] ], [ "operation" ] ] ] }, { "qid": "6f6cc7ad88e1a4bd1880", "term": "Charlemagne", "description": "King of the Franks, King of Italy, and Holy Roman Emperor", "question": "Would Temujin hypothetically be jealous of Charlemagne's conquests?", "answer": false, "facts": [ "Temujin was the birth name of Genghis Khan.", "Genghis Khan founded the Mongol Empire which was the largest land empire in world history.", "Charlemagne, King of the Franks, conquered most of Western Europe.", "At its peak, the Mongol Empire had 110 million people.", "Charlemagne's empire had around 20 million people at its height." ], "decomposition": [ "Temujin was the name of which leader?", "How many people did #1's empire have at its peak?", "How many people did Charlemagne's empire have at its peak?", "Is #3 greater than #2?" ], "evidence": [ [ [ [ "Genghis Khan-1" ] ], [ "no_evidence" ], [ [ "Carolingian Empire-1", "Carolingian Empire-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Genghis Khan-1" ] ], [ [ "Mongol Empire-1" ], "no_evidence" ], [ [ "Carolingian Empire-3", "Charlemagne-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Genghis Khan-1" ] ], [ [ "Mongol Empire-109" ], "no_evidence" ], [ [ "Carolingian Empire-1", "Carolingian Empire-3" ] ], [ "operation" ] ] ] }, { "qid": "042101b10af2b128df10", "term": "Brooklyn", "description": "Borough in New York City and county in New York state, United States", "question": "Can DRL Racer X drone get across Brooklyn Bridge in 18 seconds?", "answer": false, "facts": [ "The Brooklyn Bridge is 1.1 miles long.", "The DRL Racer X drone can fly at a top speed of 179.6 MPH.", "The DRL Racer X drone can cover around 3 miles a minute." ], "decomposition": [ "What is the top speed of the DRL Racer X drone?", "How long is the Brooklyn Bridge?", "What is #2 multiplied by 60 and then divided by #1?", "is #3 less than or equal to 18?" ], "evidence": [ [ [ [ "Drone Racing League-22" ] ], [ [ "Brooklyn Bridge-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Drone Racing League-22" ] ], [ [ "Brooklyn Bridge-5", "Mile-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Drone Racing League-22" ] ], [ [ "Brooklyn Bridge-1" ] ], [ "no_evidence", "operation" ], [ "operation" ] ] ] }, { "qid": "ff848539d05ca985ed4f", "term": "Hyena", "description": "family of mammal", "question": "Would a human following a hyena diet be unwelcome at a vegan festival?", "answer": true, "facts": [ "A hyena is a carnivorous mammal that feeds on the flesh of other animals.", "Vegans are people that stick to a strict diet that does not include animals or animal products." ], "decomposition": [ "What does the hyena diet consist mainly of?", "What do people on a vegan diet eat?", "Is there an overlap between #1 and #2?" ], "evidence": [ [ [ [ "Hyena-1", "Hyena-3" ] ], [ [ "Veganism-1" ] ], [ "operation" ] ], [ [ [ "Hyena-21" ] ], [ [ "Veganism-1" ] ], [ "operation" ] ], [ [ [ "Striped hyena-15" ] ], [ [ "Veganism-31" ] ], [ "operation" ] ] ] }, { "qid": "35b27eb99d85b7dc674c", "term": "Red panda", "description": "Mammal of the family Ailuridae", "question": "Is it normal to see a red panda in Shanghai outside of a zoo?", "answer": false, "facts": [ "The red panda is endemic to the temperate forests of the Himalayas, and ranges from the foothills of western Nepal to China in the east.", "The red panda lives between 2,200 and 4,800 m (7,200 and 15,700 ft) altitude, inhabiting areas of moderate temperature between 10 and 25 °C (50 and 77 °F) with little annual change.", "Shanghai is located on an alluvial plain, as such the vast majority of its land area is flat, with an average elevation of 4 m (13 ft)." ], "decomposition": [ "At what elevations are red pandas found?", "What is the elevation of Shanghai?", "Is there an overlap between #1 and #2?" ], "evidence": [ [ [ [ "Red panda-7" ] ], [ [ "Shanghai-27" ] ], [ "operation" ] ], [ [ [ "Red panda-8" ], "no_evidence" ], [ [ "Shanghai-27" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Red panda-7" ] ], [ [ "Shanghai-27" ] ], [ "operation" ] ] ] }, { "qid": "c144bc5e23d0944b4f1c", "term": "Mayor", "description": "head of municipal government such as a town or city", "question": "Are Mayors safe from harm from the federal government?", "answer": false, "facts": [ "The Mayor of Portland is Ted Wheeler.", "Ted Wheeler was tear-gassed by federal troops sent to his state." ], "decomposition": [ "Who is the mayor of Portland?", "Has #1 been able to avoid harm when federal troops were sent to his state" ], "evidence": [ [ [ [ "Ted Wheeler-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Government of Portland, Oregon-3" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Ted Wheeler-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "63de2dfa4f52f3dbf28e", "term": "Snickers", "description": "brand name chocolate bar made by Mars, Incorporated", "question": "Would 2019 Natalie Portman avoid a Snickers bar due to her diet?", "answer": true, "facts": [ "Actress Natalie Portman resumed her vegan diet in 2018 after giving birth, and has been vegan ever since.", "Vegans do not eat animal products.", "Snickers contains egg whites as an ingredient." ], "decomposition": [ "What foods has Natalie Portman avoided since 2018?", "What are the ingredients in a Snickers bar?", "Is anything from #2 also in #1?" ], "evidence": [ [ [ [ "Natalie Portman-30", "Veganism-1" ] ], [ [ "Snickers-1" ] ], [ [ "Nougat-1", "Types of chocolate-6" ], "operation" ] ], [ [ [ "Natalie Portman-27", "Veganism-1" ] ], [ [ "Snickers-1" ] ], [ "operation" ] ], [ [ [ "Natalie Portman-27" ], "no_evidence" ], [ [ "Snickers-7" ] ], [ "operation" ] ] ] }, { "qid": "05fe6e26626c69b65ac1", "term": "Albatross", "description": "Large seabirds in the order Procellariiformes found in the Southern Ocean and the North Pacific", "question": "Do mollymawks live where albatrosses cannot?", "answer": false, "facts": [ "A mollymawk is a type of albatross", "Any place inaccessible to albatrosses in general is inaccessible to specific types of albatross" ], "decomposition": [ "Mollymawks are a type of which animal?", "Is #1 different from an albatross?" ], "evidence": [ [ [ [ "Mollymawk-1" ] ], [ "operation" ] ], [ [ [ "Mollymawk-4" ] ], [ [ "Mollymawk-4" ] ] ], [ [ [ "Mollymawk-1" ] ], [ "operation" ] ] ] }, { "qid": "4ba19380524bb1f05786", "term": "Year", "description": "Orbital period of the Earth around the Sun", "question": "Can you listen to the entire Itunes song catalog in one year?", "answer": false, "facts": [ "Itunes has around 43 million songs as of 2017.", "The average length of a song is 3 minutes.", "There are 525,600 minutes in a year." ], "decomposition": [ "How many songs are on iTunes?", "What is the average song length?", "What is #1 multiplies by #2?", "How many minutes are in a year?", "Is #4 greater than #3?" ], "evidence": [ [ [ [ "ITunes Store-2" ] ], [ [ "Popular music-19" ] ], [ "operation" ], [ [ "Year-57" ] ], [ "operation" ] ], [ [ [ "ITunes-20" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ], [ [ "Seasons of Love-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "ITunes Store-2" ] ], [ [ "Justin Bieber-29" ], "no_evidence" ], [ "operation" ], [ [ "Year-19" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "6574229a8596e013f8e6", "term": "Sesame", "description": "species of plant", "question": "Could white rice go rancid before sesame seeds?", "answer": false, "facts": [ "Sesame seeds should last 6-12 months unopened.", "White rice can last 4-5 years in a pantry." ], "decomposition": [ "What is the shelf life of sesame seeds?", "What is the shelf life of white rice?", "Is #2 shorter than #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "White rice-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sesame-17" ], "no_evidence" ], [ [ "Rice-84" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "ec77d443ba555a906316", "term": "Taco Bell", "description": "American fast-food chain", "question": "Does the Taco Bell kitchen contain cinnamon?", "answer": true, "facts": [ "Taco Bell serves churros.", "Cinnamon is an ingredient in churros." ], "decomposition": [ "What dough pastry based snack does Taco Bell serve?", "Does #1 contain Cinnamon?" ], "evidence": [ [ [ [ "Taco Bell-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Taco Bell-21" ] ], [ [ "Cinnabon-3" ] ] ], [ [ [ "Taco Bell-21" ] ], [ [ "Cinnabon-3" ], "no_evidence" ] ] ] }, { "qid": "2bec732d14cf2b289435", "term": "Santa Claus", "description": "Folkloric figure, said to deliver gifts to children on Christmas Eve", "question": "Are most mall Santa Claus actors white?", "answer": true, "facts": [ "In 2016, a black man playing Santa Claus at the Mall of America made national headlines.", "There are map websites dedicated to locating black Santa Claus mall actors." ], "decomposition": [ "What is the ethnicity of the man who made headlines for playing Santa Claus at the Mall of America in 2016?", "Does #1 imply that black Santas are a rare occurrence?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Santa Claus-2" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Santa Claus-50" ], "no_evidence", "operation" ] ] ] }, { "qid": "a253f98e01008297d4b3", "term": "Lip", "description": "Visible body part at the mouth", "question": "Does having lip piercings lead to more expensive dental bills?", "answer": true, "facts": [ "Lip piercings can rub the enamel on your teeth and can cause tissue damage to the gums.", "Tooth enamel protects the teeth from decay." ], "decomposition": [ "What is the function of Tooth Enamel?", "Can Lip piercing cause damage to #1", "Will #2 cost you more expensive dental bills?" ], "evidence": [ [ [ [ "Tooth enamel-1" ] ], [ [ "Tooth enamel-26" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Tooth enamel-1" ] ], [ [ "Lip piercing-5" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Tooth enamel-21" ] ], [ [ "Body piercing-42", "Lip piercing-4" ] ], [ [ "Tooth enamel-39" ], "no_evidence" ] ] ] }, { "qid": "47b4c3e06e1a2f42b91a", "term": "Bipolar disorder", "description": "mental disorder that causes periods of depression and abnormally elevated mood", "question": "Are you more likely to find bipolar disorder in a crowd than diabetes?", "answer": false, "facts": [ "Bipolar disorder is a condition that effects around 1% of the population.", "It is estimated that around 10% of the population suffers from diabetes." ], "decomposition": [ "What percent of the population has bipolar disorder?", "What percent of the population has diabetes?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Bipolar disorder-4" ] ], [ [ "Diabetes-4" ] ], [ [ "Diabetes-4" ] ] ], [ [ [ "Bipolar disorder-4" ] ], [ [ "Diabetes-4" ] ], [ "operation" ] ], [ [ [ "Bipolar disorder-4" ] ], [ [ "Diabetes-4" ] ], [ "operation" ] ] ] }, { "qid": "9384dfc0aabc73026925", "term": "Heart", "description": "organ for the circulation of blood in animal circulatory systems", "question": "Do anatomical and symbolic hearts look remarkably different?", "answer": true, "facts": [ "Symbolic hearts are sharply pointed at the bottom and feature a sharp valley between the bumps at the top.", "Anatomical hearts are rounded, have numerous vascular tubes entering and exiting them, and do not feature sharp angles." ], "decomposition": [ "What are the dimensions of the heart symbol?", "Do anatomical hearts lack the dimensions of #1?" ], "evidence": [ [ [ [ "Heart symbol-1" ] ], [ [ "Heart-6", "Heart-7" ] ] ], [ [ [ "Heart symbol-1" ], "no_evidence" ], [ [ "Heart-2" ], "no_evidence", "operation" ] ], [ [ [ "Heart symbol-3" ] ], [ [ "Heart-8" ] ] ] ] }, { "qid": "0e19518a97864d4babba", "term": "Gulf of Mexico", "description": "An Atlantic Ocean basin extending into southern North America", "question": "Are fossil fuels reducing jobs in the Gulf of Mexico?", "answer": true, "facts": [ "An oil spill is still polluting the Gulf of Mexico", "Workers such as fishermen are out of work due to pollution" ], "decomposition": [ "What are the consequences of fossil fuel presence in the Gulf of Mexico?", "Is #1 putting some people out of job?" ], "evidence": [ [ [ [ "Deepwater Horizon oil spill-2", "Taylor oil spill-2" ] ], [ [ "Deepwater Horizon oil spill-71", "Deepwater Horizon oil spill-72" ], "operation" ] ], [ [ [ "Gulf of Mexico-38" ] ], [ [ "Gulf of Mexico-38" ], "no_evidence" ] ], [ [ [ "Gulf of Mexico-42" ] ], [ "no_evidence" ] ], [ [ [ "Gulf of Mexico-42" ] ], [ [ "Gulf of Mexico-36" ], "no_evidence", "operation" ] ] ] }, { "qid": "39e49e586745cbe07870", "term": "Fair trade", "description": "form of trade", "question": "Did Medieval English lords engage in fair trade with peasants?", "answer": false, "facts": [ "Fair trade is a system in which fair prices are paid to the producers of a product.", "English lords had peasants working on their manors and the peasants were indentured servants.", "The peasants had few rights, were unpaid, and had to even ask their lord for permission to marry." ], "decomposition": [ "What is fair trade?", "Are peasants able to participate in #1 with Lords?" ], "evidence": [ [ [ [ "Fair trade-1" ] ], [ [ "Peasant-1" ], "no_evidence" ] ], [ [ [ "Fair trade-1" ], "no_evidence" ], [ [ "Peasant-8" ], "no_evidence", "operation" ] ], [ [ [ "Fair trade-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c6adebffcdabb5ba7063", "term": "Small Solar System body", "description": "object in the Solar System that is neither a planet, nor a dwarf planet, nor a satellite", "question": "Is the name of a mythical creature also the name of a Small Solar System body?", "answer": true, "facts": [ "A centaur is a kind of Small Solar System body with characteristics of both asteroids and comets.", "A centaur is also a mythical creature that has the body of a horse and the head of a man." ], "decomposition": [ "What is a mythical creature with the body of a horse and the head of a man called?", "Is any Small Solar System named after #1?" ], "evidence": [ [ [ [ "Centaur-1" ] ], [ [ "Centaur (small Solar System body)-1" ], "operation" ] ], [ [ [ "Centaur-5" ] ], [ [ "Centaurus-18" ] ] ], [ [ [ "Centaur-1" ] ], [ [ "Centaur (small Solar System body)-1" ] ] ] ] }, { "qid": "c7ca98c8ff55eb2dc75a", "term": "Astronomer", "description": "Scientist who studies celestial bodies", "question": "Do astronomers write horoscopes?", "answer": false, "facts": [ "Astronomer study the actual science of the stars.", "Horoscopes are written by astrologers, not astronomers." ], "decomposition": [ "Which field of science do horoscopes fall under?", "Which science field do astronomers study?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Astrology-1" ] ], [ [ "Astronomy-1" ] ], [ "operation" ] ], [ [ [ "Astrology-1" ] ], [ [ "Astronomer-1" ] ], [ "operation" ] ], [ [ [ "Horoscope-1" ] ], [ [ "Astronomer-1" ] ], [ "operation" ] ] ] }, { "qid": "7c3759cc1da78e9fbd79", "term": "Crustacean", "description": "subphylum of arthropods", "question": "Could a Diwali celebration feature a crustacean?", "answer": true, "facts": [ "Diwali is an important Hindu holiday which includes feasting.", "Crustaceans are shelled, mainly aquatic animals that include shrimp, lobster, and crabs.", "Hindus are allowed to eat any food besides beef, since cows are sacred.", "Hindus are allowed to eat " ], "decomposition": [ "What religion celebrates Diwali with feasting?", "What foods are on #1 s forbidden list?", "Is lobster part of #2?" ], "evidence": [ [ [ [ "Diwali-4" ] ], [ [ "Buddhist cuisine-9" ] ], [ "operation" ] ], [ [ [ "Diwali-1" ] ], [ [ "Hinduism-94" ] ], [ [ "Lobster-2" ] ] ], [ [ [ "Diwali-15" ] ], [ [ "Diet in Hinduism-27" ] ], [ "operation" ] ] ] }, { "qid": "946d0b97a56bbe7acca2", "term": "Chives", "description": "edible species of plant", "question": "Are there any chives hypothetically good for battling vampires?", "answer": true, "facts": [ "Vampires in folklore have a weakness to garlic.", "Chives, an edible plant species, come in a number of varieties.", "Garlic chives are a variant of chives first found in China thousands of years ago." ], "decomposition": [ "What items are used to ward off vampires according to folklore?", "What are the varieties of chives that exist?", "Is any of #1 included in #2?" ], "evidence": [ [ [ [ "Garlic-61" ] ], [ [ "Garlic-1" ] ], [ [ "Garlic-1" ], "operation" ] ], [ [ [ "Vampire-16" ] ], [ [ "Chives-1" ] ], [ "operation" ] ], [ [ [ "Garlic-61" ] ], [ [ "Allium-1" ] ], [ "operation" ] ] ] }, { "qid": "e304827eeb573bb2612e", "term": "Golden Gate Bridge", "description": "suspension bridge on the San Francisco Bay", "question": "Can you make an MP3 from the Golden Gate Bridge?", "answer": true, "facts": [ "MP3 is a file compression format for audio recordings", "The Golden Gate Bridge has been reported emitting sounds when the wind passes through its sidewalk railing slats" ], "decomposition": [ "What is an MP3 a compressed file of?", "Does the Golden Gate Bridge produce #1?" ], "evidence": [ [ [ [ "MP3-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "MP3-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "MP3-1" ] ], [ [ "Golden Gate Bridge-12" ] ] ] ] }, { "qid": "3301cea644c68aa24894", "term": "Ubuntu", "description": "Linux distribution based on Debian", "question": "If you were at an Apple store, would most of the computers be running Ubuntu?", "answer": false, "facts": [ "Apple stores stock only Mac brand computers.", "Mac computers come preinstalled with the latest iOS." ], "decomposition": [ "Which operating system do Apple computers run on?", "Is #1 the same as Ubuntu?" ], "evidence": [ [ [ [ "Operating system-40" ] ], [ [ "Ubuntu-1" ] ] ], [ [ [ "MacOS-1" ] ], [ "operation" ] ], [ [ [ "MacOS-1" ] ], [ "operation" ] ] ] }, { "qid": "1f4eecdce7604d783890", "term": "Charles Darwin", "description": "\"British naturalist, author of \"\"On the Origin of Species, by Means of Natural Selection\"\"\"", "question": "Did Lamarck and Darwin agree about the origin of species diversity?", "answer": false, "facts": [ "Darwin theorized that evolution was driven by the fittest animals surviving and passing their genes on.", "Lamarck theorized that animals' responses to needs in their life would influence the growth of their offspring." ], "decomposition": [ "What was Darwin's theory about the origins of species diversity?", "What was the theory of Lamarck regarding the origins of different species?", "Are the theories of #1 and #2 the same?" ], "evidence": [ [ [ [ "On the Origin of Species-1" ] ], [ [ "Lamarckism-1" ] ], [ "operation" ] ], [ [ [ "Charles Darwin-1" ] ], [ [ "Lamarckism-1" ] ], [ "operation" ] ], [ [ [ "Charles Darwin-2" ] ], [ [ "Jean-Baptiste Lamarck-1" ] ], [ "operation" ] ] ] }, { "qid": "6b3601b4c251046cbef4", "term": "Saint Vincent and the Grenadines", "description": "Country in the Caribbean", "question": "Was Saint Vincent and the Grenadines named by an Italian explorer?", "answer": true, "facts": [ "Christopher Columbus, an Italian explorer, was the first European to discover the islands.", "He named them after St. Vincent because he first saw the island on the saint's feast day, and the Spanish city of Granada." ], "decomposition": [ "Who discovered Saint Vincent and the Grenadines?", "Was #1 from Italy?" ], "evidence": [ [ [ [ "Saint Vincent and the Grenadines-7" ] ], [ [ "Christopher Columbus-1" ], "operation" ] ], [ [ [ "Saint Vincent and the Grenadines-7" ] ], [ [ "Christopher Columbus-1" ] ] ], [ [ [ "Saint Vincent and the Grenadines-7" ] ], [ [ "Christopher Columbus-1" ] ] ] ] }, { "qid": "289c2f15051024b8b27f", "term": "Alice in Wonderland (1951 film)", "description": "1951 American animated musical fantasy film produced by Walt Disney Productions", "question": "Was milliner in Alice in Wonderland (1951 film) likely in need of succimer?", "answer": true, "facts": [ "A milliner is someone who makes hats.", "The character of the Mad Hatter was a milliner in the 1951 Alice in Wonderland film.", "The phrase, Mad as a Hatter, comes from the fact that hat makers used mercury to line their hats and often suffered mercury poisoning.", "Succimer is a chemical that is used to treat lead, mercury, and arsenic poisoning." ], "decomposition": [ "What does a milliner do?", "Which Alice in Wonderland (1951 film) character did #1?", "Which element did #2 use for work that could be harmful to their mental health?", "Is succimer useful for treatment of the effects of #3?" ], "evidence": [ [ [ [ "Hatmaking-1" ] ], [ [ "Alice in Wonderland (1951 film)-7" ] ], [ [ "Erethism-1" ] ], [ [ "Dimercaptosuccinic acid-1" ], "operation" ] ], [ [ [ "Hatmaking-1" ] ], [ [ "Hatter (Alice's Adventures in Wonderland)-1" ] ], [ [ "Hatter (Alice's Adventures in Wonderland)-5" ] ], [ [ "Dimercaptosuccinic acid-1" ] ] ], [ [ [ "Hatmaking-1" ] ], [ [ "Hatter (Alice's Adventures in Wonderland)-1" ] ], [ [ "Hatter (Alice's Adventures in Wonderland)-5" ] ], [ [ "Dimercaptosuccinic acid-1" ] ] ] ] }, { "qid": "8e35a948ae9e0bf019f4", "term": "Curling", "description": "Team sport played on ice", "question": "Is a curling iron necessary in curling?", "answer": false, "facts": [ "A curling iron is a tool used to make the hair curly using heat.", "The sport of curling requires curling brooms, stones (rocks), and curling shoes.", "Changing the structure of your hair has no practical benefit to the sport of curling." ], "decomposition": [ "What equipment is used in the sport of curling?", "Is a curling iron included in #1?" ], "evidence": [ [ [ [ "Curling-31" ] ], [ "operation" ] ], [ [ [ "Curling-1" ] ], [ [ "Hair iron-1" ], "operation" ] ], [ [ [ "Curling-15", "Curling-21", "Curling-28", "Curling-32" ] ], [ "operation" ] ] ] }, { "qid": "6c6d1853b7e97e66ef46", "term": "Red hair", "description": "Hair color", "question": "If you have black hair and want red hair, do you need bleach?", "answer": true, "facts": [ "You cannot dye hair to be lighter than the starting color.", "To make hair a color lighter than the starting color, you need to bleach the hair." ], "decomposition": [ "Why would someone need bleach when dying their hair?", "Is red hair #1 than black hair?" ], "evidence": [ [ [ [ "Hair coloring-11" ] ], [ [ "Hair coloring-11" ] ] ], [ [ [ "Bleach-22" ], "no_evidence" ], [ [ "Red hair-2" ], "operation" ] ], [ [ [ "Hair coloring-26" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "281290ed644b03d1b789", "term": "Reddit", "description": "Online news aggregator", "question": "Can you buy Reddit at Walmart?", "answer": false, "facts": [ "Reddit is an online social networking forum and community", "Walmart sells tangible goods and services" ], "decomposition": [ "What is Reddit?", "Is #1 tangible?", "Does Walmart sell tangible items??", "Are #2 and #3 the same?" ], "evidence": [ [ [ [ "Reddit-1" ] ], [ "operation" ], [ [ "Walmart-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Reddit-5" ] ], [ [ "Reddit-5" ] ], [ [ "Walmart-5" ] ], [ [ "Walmart-5" ] ] ], [ [ [ "Reddit-1" ] ], [ "operation" ], [ [ "Walmart-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "141e0ea8af89ad9c91af", "term": "Management", "description": "Coordinating the efforts of people", "question": "In order to work in district management, does one need a car?", "answer": true, "facts": [ "District managers are responsible for supervising many stores within an area.", "District managers must travel to the various stores they supervise to ensure peak performance." ], "decomposition": [ "What is the main responsibility of district managers?", "In order to do #1 efficiently, is a car needed? " ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Account manager-1" ], "no_evidence" ], [ [ "Account manager-9" ], "operation" ] ], [ [ [ "District Programme Manager-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ff3811735ededd8ec3a7", "term": "Asparagus", "description": "species of plant", "question": "Are slime lilies in a different scientific family than asparagus?", "answer": false, "facts": [ "Asparagus is a species of plants of the Asparagaceae family.", "Slime lilies are the common name for the flowering albuca plant.", "The albuca plant belongs to the scientific family of Asparagaceae." ], "decomposition": [ "Which family does the asparagus belong to?", "Which plants are commonly referred to as slime lilies?", "Which family does #2 belong to?", "Is #1 different from #3?" ], "evidence": [ [ [ [ "Asparagaceae-1" ] ], [ [ "Albuca-1" ] ], [ [ "Albuca-1" ] ], [ "operation" ] ], [ [ [ "Asparagus-2" ] ], [ [ "Albuca-1" ] ], [ [ "Albuca-1" ] ], [ "operation" ] ], [ [ [ "Asparagaceae-1" ] ], [ [ "Albuca-1" ] ], [ [ "Albuca-1" ] ], [ "operation" ] ] ] }, { "qid": "184d0b1306899a9365ce", "term": "Forbidden City", "description": "Art museum, Imperial Palace, Historic site in Beijing, China", "question": "Is the Forbidden City host to a wooden rollercoaster?", "answer": false, "facts": [ "Wooden rollercoasters are relatively modern.", "The Forbidden City is an ancient historic site." ], "decomposition": [ "When were wooden rollercoasters first built?", "What is the Forbidden City?", "When was #2 built?", "Did #3 come before #1?" ], "evidence": [ [ [ [ "Wooden roller coaster-3" ] ], [ [ "Forbidden City-1" ] ], [ [ "Forbidden City-2" ] ], [ "operation" ] ], [ [ [ "Wooden roller coaster-3" ] ], [ [ "Forbidden City-1" ] ], [ [ "Forbidden City-2" ] ], [ "operation" ] ], [ [ [ "History of the roller coaster-8" ] ], [ [ "Forbidden City-1" ] ], [ [ "Forbidden City-2" ] ], [ "operation" ] ] ] }, { "qid": "73df0f851259af9ea647", "term": "Emu", "description": "Large flightless bird endemic to Australia", "question": "Are emus related to elks?", "answer": false, "facts": [ "Emus are a type of flightless bird.", "Elks are deer, which are mammals." ], "decomposition": [ "What type of animal are Emus?", "What type of animals are Elks?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Emu-1" ] ], [ [ "Elk-1" ] ], [ "operation" ] ], [ [ [ "Emu-2" ] ], [ [ "Elk-1" ] ], [ "operation" ] ], [ [ [ "Emu-1" ] ], [ [ "Elk-1" ] ], [ "operation" ] ] ] }, { "qid": "2e8e9e51f0f290e32d27", "term": "Celery", "description": "species of plant", "question": "Did any cultures associate celery with death?", "answer": true, "facts": [ "Ancient Greeks used garlands of celery leafs to bury their dead.", "Ancient Greece was considered a culture. " ], "decomposition": [ "What are the various forms of cultural depictions of celery?", "Is any of #1 associated with death?" ], "evidence": [ [ [ [ "Celery-44" ] ], [ "operation" ] ], [ [ [ "Celery-44" ] ], [ "operation" ] ], [ [ [ "Celery-44" ] ], [ [ "Celery-44" ] ] ] ] }, { "qid": "692119e8ebcbf634d224", "term": "Butter", "description": "dairy product", "question": "Would toast for a vegan have margarine instead of butter?", "answer": true, "facts": [ "Margarine is typically made without the use of dairy ingredients.", "Vegans do not eat any animal products, including dairy and eggs." ], "decomposition": [ "Which products are avoided in vegan diet?", "Is margarine free of #1?" ], "evidence": [ [ [ [ "Veganism-1" ] ], [ [ "Margarine-36" ] ] ], [ [ [ "Veganism-1" ] ], [ [ "Margarine-2" ], "operation" ] ], [ [ [ "Veganism-1" ] ], [ [ "Margarine-2" ] ] ] ] }, { "qid": "026a34caa057f988a881", "term": "Michael Crichton", "description": "American author, screenwriter, film director", "question": "Was Michael Crichton ever in danger of flunking out of Harvard as an undergraduate?", "answer": false, "facts": [ "Scholastic probation or academic dismissal, sometimes known as flunking out, is the termination of students at a higher educational institution as the result of poor academic achievement.", "Michael Crichton obtained his bachelor's degree in biological anthropology summa cum laude in 1964.", "Summa cum laude is the highest distinction a person can achieve in college for academic success.", "Someone who achieves summa cum laude cannot have even a single semester of poor grades." ], "decomposition": [ "What grade is considered flunking in US colleges?", "What honors did Michael Crichton graduate with?", "Can someone achieve #2 with grades of #1?" ], "evidence": [ [ [ [ "Academic grading in the United States-18" ] ], [ [ "Michael Crichton-7" ] ], [ [ "Academic grading in the United States-18", "Michael Crichton-7" ] ] ], [ [ [ "Grading systems by country-216" ], "no_evidence" ], [ [ "Michael Crichton-7" ] ], [ [ "Latin honors-5" ], "operation" ] ], [ [ [ "Grading systems by country-216" ] ], [ [ "Michael Crichton-7" ] ], [ [ "Latin honors-5" ], "operation" ] ] ] }, { "qid": "b3c8f537cfb900ba92e5", "term": "Salsa (sauce)", "description": "Sauce", "question": "Would the chef at La Grenouille find salsa to be a strange request?", "answer": true, "facts": [ "La Grenouille is a classic French cuisine restaurant in NYC.", "Salsa is a staple food in Mexican cuisine." ], "decomposition": [ "What type of cuisine does La Grenouille serve?", "Would you typically find salsa in #1?" ], "evidence": [ [ [ [ "La Grenouille (restaurant)-3" ], "operation" ], [ "no_evidence" ] ], [ [ [ "La Grenouille (restaurant)-1" ] ], [ [ "Mexican cuisine-28" ] ] ], [ [ [ "La Grenouille (restaurant)-3" ] ], [ [ "La Grenouille (restaurant)-1", "Salsa-1" ] ] ] ] }, { "qid": "42ea53f5d153e3d5e9ec", "term": "Pyrenees", "description": "Range of mountains in southwest Europe", "question": "Would Jolly Green Giant's largest monument look impressive next to Pyrenees?", "answer": false, "facts": [ "The Jolly Green Giant monument in Blue Earth, Minnesota is 55.5 feet tall.", "The Pyrenees mountains are 11,168 feet high.", "The Pyrenees mountains are 305 miles wide." ], "decomposition": [ "How tall is the tallest monument to the Jolly Green Giant?", "How high is the tallest of the Pyrenees?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Green Giant-10" ] ], [ [ "Pyrenees-1" ] ], [ "operation" ] ], [ [ [ "Green Giant-10" ] ], [ [ "Pyrenees-1" ] ], [ "operation" ] ], [ [ [ "Green Giant-10" ] ], [ [ "Pyrenees-13" ] ], [ "operation" ] ] ] }, { "qid": "979b4b0fa0a8606bfcae", "term": "Lecturer", "description": "tenure-track or tenured position at a university or similar institution", "question": "Would Quiet from Metal Gear be a poor hypothetical choice for lecturer at Haub?", "answer": true, "facts": [ "Quiet is an assassin from the Metal Gear video game series that does not speak.", "Haub is a school at Pace University that has annual lectures.", "Haub is a law school that has annual lectures on topics in the law field." ], "decomposition": [ "Who is Quiet?", "What is #1 unable to do?", "How does one convey information as a lecturer?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Quiet (Metal Gear)-1" ] ], [ [ "Quiet (Metal Gear)-5" ] ], [ [ "Lecture-1" ] ], [ "operation" ] ], [ [ [ "Quiet (Metal Gear)-1" ] ], [ [ "Quiet (Metal Gear)-7" ] ], [ [ "Lecture-1" ] ], [ "operation" ] ], [ [ [ "Quiet (Metal Gear)-1" ] ], [ [ "Quiet (Metal Gear)-7" ] ], [ [ "Lecture-1" ] ], [ "operation" ] ] ] }, { "qid": "24fc2cfc65f5230e2cd4", "term": "Morphine", "description": "Pain medication of the opiate family", "question": "Could morphine cure HIV?", "answer": false, "facts": [ "Morphine is an opioid that is used to treat pain.", "HIV is a virus that has no known cure, but can be treated with anti-retroviral drugs." ], "decomposition": [ "What is morphine used to treat?", "What type of system is affected by contraction of HIV?", "Will treatment of #1 cure HIV-infected #2?" ], "evidence": [ [ [ [ "Morphine-1" ] ], [ [ "HIV-2" ] ], [ [ "HIV-49" ], "operation" ] ], [ [ [ "Morphine-1" ] ], [ [ "HIV-1" ] ], [ "operation" ] ], [ [ [ "Morphine-1" ] ], [ [ "HIV-2" ] ], [ [ "HIV-49" ], "operation" ] ] ] }, { "qid": "1ad2148ea8c553c849c2", "term": "1960", "description": "Year", "question": "Could you buy Hershey's Kisses in red foil with farthings after 1960?", "answer": false, "facts": [ "The British farthing was made obsolete at the end of 1960", "In 1962, Hershey's Kisses began to be sold in colored wrappers (such as red foil)" ], "decomposition": [ "When was the British farthing made obsolete?", "When did Hershey's Kisses begin selling candy sold in colored wrappers?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Farthing (British coin)-1" ] ], [ [ "Hershey's Kisses-10" ] ], [ "operation" ] ], [ [ [ "Farthing (British coin)-1" ] ], [ [ "Hershey's Kisses-10" ] ], [ "operation" ] ], [ [ [ "Farthing (British coin)-1" ] ], [ [ "Hershey's Kisses-11" ] ], [ "operation" ] ] ] }, { "qid": "1b6fbe4fe4254a8e6f6e", "term": "Ammonia", "description": "Chemical compound of nitrogen and hydrogen", "question": "Is an ammonia fighting cleaner good for pet owners?", "answer": true, "facts": [ "Ammonia is a component in pet urine.", "Ammonia has a very pungent and unpleasant odor." ], "decomposition": [ "What unsanitary substances contain ammonia?", "Is animal waste included in #1?" ], "evidence": [ [ [ [ "Ammonia-32" ] ], [ "operation" ] ], [ [ [ "Ammonia-32" ] ], [ [ "Urination-3" ] ] ], [ [ [ "Ammonia-32" ] ], [ [ "Ammonia-32" ] ] ] ] }, { "qid": "95b3654439ebaf17f4da", "term": "Orange County, California", "description": "County in California, United States", "question": "Did the founders of the biggest city in Orange County, California speak Italian?", "answer": false, "facts": [ "Anaheim is the biggest city in Orange County, California", "Anaheim was founded by fifty German families", "People from Germany speak German" ], "decomposition": [ "What is the biggest city in Orange County, California?", "Who founded #1?", "Did #2's speak Italian? " ], "evidence": [ [ [ [ "Anaheim, California-1" ] ], [ [ "Anaheim, California-5" ] ], [ [ "Anaheim, California-5" ] ] ], [ [ [ "Anaheim, California-1" ], "no_evidence" ], [ [ "Anaheim, California-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Anaheim, California-1" ] ], [ [ "Anaheim, California-5" ] ], [ [ "German language-1", "Italian language-1" ], "operation" ] ] ] }, { "qid": "38819f5a64ab543fbd90", "term": "Isaac", "description": "Biblical character", "question": "Did Isaac's father almost commit similar crime as Marvin Gay Sr.?", "answer": true, "facts": [ "Filicide is the act of killing a son or a daughter.", "Marvin Gay Sr. committed filicide in 1984 when he shot his son, singer Marvin Gaye.", "Isaac's father Abraham, was commanded by God to sacrifice his son Isaac, but was spared by an angel." ], "decomposition": [ "What crime did Marvin Gay Sr commit?", "Was the Biblical Abraham going to commit #1?" ], "evidence": [ [ [ [ "Marvin Gaye-36" ] ], [ [ "Binding of Isaac-3" ] ] ], [ [ [ "Marvin Gay Sr.-1" ] ], [ [ "Binding of Isaac-1" ] ] ], [ [ [ "Marvin Gay Sr.-7" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "905f2b35daf680a54787", "term": "The Jackson 5", "description": "American pop music family group", "question": "Could the Jackson 5 play a full game of rugby with each other?", "answer": false, "facts": [ "The Jackson 5 consisted of five members.", "A full game of rugby is played between 2 teams of 15 players each." ], "decomposition": [ "How many members are in the Jackson 5?", "How many players are there in a full game of rugby?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "The Jackson 5-1" ] ], [ [ "Rugby union-1" ] ], [ "operation" ] ], [ [ [ "The Jackson 5-1" ] ], [ [ "Rugby union-1" ] ], [ "operation" ] ], [ [ [ "The Jackson 5-1" ] ], [ [ "Rugby league positions-1" ] ], [ "operation" ] ] ] }, { "qid": "23e54033c083f4358ea6", "term": "Snickers", "description": "brand name chocolate bar made by Mars, Incorporated", "question": "Is Snickers helpful for weight loss?", "answer": false, "facts": [ "Weight loss is best achieved through watching the calories and sugar in the food you eat.", "Snickers is high in fat, sugar, and calories, while being low in nutritional value." ], "decomposition": [ "What must you avoid to best achieve weight loss?", "Are snickers avoid of those #1?" ], "evidence": [ [ [ [ "Dieting-1" ] ], [ [ "Snickers-8" ], "operation" ] ], [ [ [ "Dieting-1" ] ], [ [ "Snickers-1", "Snickers-11" ], "no_evidence", "operation" ] ], [ [ [ "Weight loss-12" ] ], [ [ "Snickers-10" ], "operation" ] ] ] }, { "qid": "1eeb5b135e7120037e70", "term": "Fair trade", "description": "form of trade", "question": "Is the United States the largest exporter of Fair Trade products?", "answer": false, "facts": [ "Fair trade is an arrangement designed to help producers in developing countries achieve good trading.", "The United States is not considered a developing country." ], "decomposition": [ "What countries can use the designation \"fair trade\" for their goods? ", "Does the US have the designation in #1?" ], "evidence": [ [ [ [ "European Fair Trade Association-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Fair trade-5" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Fair trade-1" ] ], [ [ "Developed country-3", "Developing country-1" ] ] ] ] }, { "qid": "3295844627a9bd1b9135", "term": "The Matrix", "description": "1999 science fiction action film directed by the Wachowskis", "question": "Was Harry Potter a better investment than The Matrix for Warner Bros.?", "answer": true, "facts": [ "Warner Bros. distributes several movie franchises including The Matrix, Harry Potter, and The Dark Knight.", "The Matrix had 2 sequels.", "Harry Potter had 7 sequels and several spin-offs.", "Harry Potter and the Deathly Hallows – Part 2 is Warner Bros. highest grossing film worldwide with a box office gross of $1,342,932,398." ], "decomposition": [ "How much did the Harry Potter (film series) gross?", "How much did the The Matrix (franchise) gross?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Harry Potter (film series)-4" ] ], [ [ "The Matrix (franchise)-4" ] ], [ "operation" ] ], [ [ [ "Harry Potter (film series)-4" ] ], [ [ "The Matrix (franchise)-4" ] ], [ "operation" ] ], [ [ [ "Harry Potter-3" ], "no_evidence" ], [ [ "The Matrix-36" ] ], [ "operation" ] ] ] }, { "qid": "73c52134ab2a903d86db", "term": "Lemon", "description": "citrus fruit", "question": "Does Lemon enhance the flavor of milk?", "answer": false, "facts": [ "When milk becomes acidic, the water and fats separate from each other.", "When the water and fats separate in milk, it becomes clumpy and has a bad texture.", "Lemon is highly acidic." ], "decomposition": [ "What is the effect of acid on milk?", "Does #1 make milk more desirable?", "Is Lemon acidic?", "Is #2 or #3 negative?" ], "evidence": [ [ [ [ "Curdling-2" ], "no_evidence" ], [ "operation" ], [ [ "Lemon-2" ] ], [ "operation" ] ], [ [ [ "Soured milk-1" ] ], [ [ "Soured milk-1" ] ], [ [ "Lemon-21" ] ], [ "operation" ] ], [ [ [ "Curdling-2", "Curdling-3" ] ], [ [ "Curdling-2" ], "no_evidence" ], [ [ "Lemon-13", "Lemon-15" ] ], [ "operation" ] ] ] }, { "qid": "bd734e6cb6674e25dbe8", "term": "Tom Cruise", "description": "American actor and producer", "question": "Would Tom Cruise ever insult L. Ron Hubbard?", "answer": false, "facts": [ "Tom Cruise is an outspoken advocate for the Church of Scientology and its associated social programs.", "The Church of Scientology was founded by L. Ron Hubbard.", "L. Ron Hubbard is a revered and god-like figure in The Church of Scientology." ], "decomposition": [ "What was founded by Ron Hubbard? ", "Would Tom Cruise ever insult #1" ], "evidence": [ [ [ [ "L. Ron Hubbard-86" ] ], [ [ "Tom Cruise-36" ] ] ], [ [ [ "L. Ron Hubbard-1" ] ], [ [ "Tom Cruise-4" ] ] ], [ [ [ "L. Ron Hubbard-1" ] ], [ [ "Tom Cruise-4" ] ] ] ] }, { "qid": "87acce77a8b6362f4f96", "term": "Toyota Supra", "description": "A sports car and grand tourer manufactured by Toyota Motor Corporation", "question": "Would 2020 Toyota Supra lag behind at a Nascar rally?", "answer": true, "facts": [ "The 2020 Toyota Supra has a top speed of 155 MPH.", "Nascar stock cars routinely exceed 200 MPH." ], "decomposition": [ "What speeds do stock cars in a NASCAR race routinely attain?", "What is the top speed of a Toyota Supra?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Stock car racing-3" ] ], [ [ "Toyota Supra-61" ] ], [ "operation" ] ], [ [ [ "Stock car racing-2" ] ], [ [ "Toyota Supra-77" ] ], [ [ "Stock car racing-2", "Toyota Supra-77" ] ] ], [ [ [ "Stock car racing-65" ] ], [ [ "Toyota Supra-61" ] ], [ "operation" ] ] ] }, { "qid": "7b0a9a729b42f74e95aa", "term": "Reiki", "description": "Pseudoscientific healing technique", "question": "Can Reiki be stored in a bottle?", "answer": false, "facts": [ "Reiki practitioners use a technique called palm healing or hands-on healing through which a \"universal energy\" is said to be transferred through the palms of the practitioner to the patient in order to encourage emotional or physical healing.", "Medications are typically stored in pill bottles." ], "decomposition": [ "What basic property must a thing have to be able to be stored in a bottle?", "By definition, Reiki is a pseudoscientific healing what?", "Do #2's have the property stated in #1?" ], "evidence": [ [ [ [ "Bottle-1" ] ], [ [ "Reiki-2" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Reiki-1" ] ], [ "operation" ] ], [ [ [ "Bottle-1" ] ], [ [ "Reiki-1" ] ], [ "operation" ] ] ] }, { "qid": "7f6a3fac5a419d53e5f2", "term": "Düsseldorf", "description": "Place in North Rhine-Westphalia, Germany", "question": "Can you see Stonehenge from a window in Dusseldorf?", "answer": false, "facts": [ "Dusseldorf is a city in Germany.", "Stonehenge is a prehistoric monument in Wiltshire, England.", "Wiltshire England is around seven hours away from Dusseldorf Germany by car." ], "decomposition": [ "Where is Stonehenge located?", "Where is Düsseldorf located?", "Is #1 geographically close to #2?" ], "evidence": [ [ [ [ "Stonehenge-1" ] ], [ [ "Düsseldorf-1" ] ], [ "no_evidence" ] ], [ [ [ "Stonehenge-69" ], "operation" ], [ [ "Düsseldorf-36" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Stonehenge-1" ] ], [ [ "Düsseldorf-1" ] ], [ "operation" ] ] ] }, { "qid": "c34998385575553d090f", "term": "Lorem ipsum", "description": "Placeholder text used in publishing and graphic design", "question": "Does Lorem ipsum backwards fail to demonstrate alliteration?", "answer": false, "facts": [ "Lorem ipsum backwards is Muspi merol.", "Alliteration is the occurrence of the same letter or sound at the beginning of adjacent or closely connected words.", "Examples of alliteration are phrases like: Mixed messages, and big bang." ], "decomposition": [ "What is Lorem ipsum spelled backwards?", "What property makes a group of words alliterative?", "Is #2 not present in #1?" ], "evidence": [ [ [ "operation" ], [ [ "Alliteration-1" ] ], [ "operation" ] ], [ [ "operation" ], [ [ "Alliteration-3" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Alliteration-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "1e8d7da6c0e0fea74a09", "term": "War", "description": "Intense violent conflict between states", "question": "Could casualties from deadliest war rival France's population?", "answer": false, "facts": [ "The deadliest war in history was World War II.", "Over 56 million people died during World War II.", "The population of France as of 2019 is 66 million." ], "decomposition": [ "What is the population of France?", "What was the deadliest war?", "How many people died in #2?", "Is #3 greater than #1?" ], "evidence": [ [ [ [ "France-1" ] ], [ [ "World War II-1" ] ], [ [ "World War II-103" ] ], [ "operation" ] ], [ [ [ "France-1" ] ], [ [ "World War II casualties-1" ] ], [ [ "World War II casualties-1" ] ], [ "operation" ] ], [ [ [ "France-1" ] ], [ [ "World War II-1" ] ], [ [ "World War II-1" ] ], [ "operation" ] ] ] }, { "qid": "fe9207eff7fdc6606843", "term": "Abortion", "description": "Intentionally ending pregnancy", "question": "Do pediatricians perform abortions?", "answer": false, "facts": [ "Pediatricians specialize in the treatment of children and adolescents. ", "Training in abortions is not provided to pediatricians in their schooling." ], "decomposition": [ "What is a Pediatrician's job duties?", "Is abortion in #1?" ], "evidence": [ [ [ [ "Pediatrics-1" ] ], [ [ "Pediatrics-1" ] ] ], [ [ [ "Pediatrics-1" ] ], [ "operation" ] ], [ [ [ "Pediatrics-1" ] ], [ [ "Abortion-1" ], "operation" ] ] ] }, { "qid": "3413ef0978d5b3f55b27", "term": "Olive", "description": "Species of plant", "question": "If you're reducing salt intake, are olives a healthy snack?", "answer": false, "facts": [ "The average amount of sodium a healthy person should have is 2,300mg daily.", "A half cup serving of olives has an average of 735mg sodium in it." ], "decomposition": [ "What is the average amount of sodium a healthy person should have a day?", "How much sodium is in a serving of olives?", "Is #2 a small percentage of #1?" ], "evidence": [ [ [ [ "Health effects of salt-15", "Salt-5" ] ], [ [ "Olive-83" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Health effects of salt-15" ] ], [ [ "Olive-83" ] ], [ [ "Health effects of salt-15", "Olive-83" ] ] ], [ [ [ "Sodium in biology-3" ] ], [ [ "Olive-83" ] ], [ "operation" ] ] ] }, { "qid": "52e67de035f5274e26f3", "term": "Israelis", "description": "Ethnic group", "question": "Have the Israelis played the Hammerstein Ballroom?", "answer": false, "facts": [ "The Israelis are an ethnic group", "The Hammerstein Ballroom is a venue for concerts and musical performances" ], "decomposition": [ "What kind of groups play in the Hammerstein Ballroom?", "What kind of a group is the Israelis?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Hammerstein Ballroom-1", "Hammerstein Ballroom-4" ] ], [ [ "Israelis-1" ] ], [ "operation" ] ], [ [ [ "Hammerstein Ballroom-4" ] ], [ [ "Israelis-4" ] ], [ "operation" ] ], [ [ [ "Hammerstein Ballroom-1" ] ], [ [ "Israelis-1" ] ], [ "operation" ] ] ] }, { "qid": "d717f5321d1e1d3d5e37", "term": "Attack on Pearl Harbor", "description": "Surprise attack by the Imperial Japanese Navy on the U.S. Pacific Fleet in Pearl Harbor in Hawaii", "question": "Did the Pearl Harbor attack occur during autumn?", "answer": true, "facts": [ "Autumn runs from about September 20 to about December 20.", "Pearl Harbor was attacked on December 7, 1941." ], "decomposition": [ "When did the Pearl Harbor attack happen?", "Where is Pearl Harbor located?", "When is it autumn in #2?", "Does #1 fall within the range of #3?" ], "evidence": [ [ [ [ "Attack on Pearl Harbor-13" ] ], [ [ "Pearl Harbor-1" ] ], [ [ "Autumn-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Attack on Pearl Harbor-1" ] ], [ [ "Pearl Harbor-1" ] ], [ [ "Climate of Hawaii-11" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Attack on Pearl Harbor-1" ] ], [ [ "Pearl Harbor-1" ] ], [ [ "Autumn-3" ] ], [ "operation" ] ] ] }, { "qid": "6ae9ff023bf054219f6f", "term": "Al-Farabi", "description": "Philosopher in 10th century Central Asia", "question": "Did Al-Farabi ever meet Mohammed?", "answer": false, "facts": [ "Al-Farabi was born in 872 AD.", "Mohammed died in 832 AD." ], "decomposition": [ "How long ago did Mohammed die?", "When was Al-Farabi born?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Muhammad-63" ] ], [ [ "Al-Farabi-1" ] ], [ "operation" ] ], [ [ [ "Muhammad-1" ] ], [ [ "Al-Farabi-1" ] ], [ "operation" ] ], [ [ [ "Mohammed ibn Mohammed Alami-1" ] ], [ [ "Al-Farabi-9" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "caddf0bcf06640844a4c", "term": "NASCAR Cup Series", "description": "Top tier auto racing division within NASCAR", "question": "Could William Franklyn-Miller win a 2020 Nascar Cup Series race?", "answer": false, "facts": [ "William Franklyn-Miller is an actor known for the TV series Medici: The Magnificent.", "William Franklyn-Miller turned 16 in March of 2020.", "Nascar Cup Series races have a minimum age of 18." ], "decomposition": [ "Who is William Franklyn-Miller?", "How old is #1?", "What is the minimum age to join the Nascar Cup Series?", "Is #2 larger than #3?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Driver's licenses in the United States-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Medici (TV series)-15" ], "no_evidence" ], [ "no_evidence" ], [ [ "Learner's permit-19" ], "no_evidence" ], [ "operation" ] ], [ [ [ "William Franklyn-1" ], "operation" ], [ "no_evidence" ], [ [ "NASCAR Cup Series-11" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "d8bed090b4755e2f7b67", "term": "Knight", "description": "An award of an honorary title for past or future service with its roots in chivalry in the Middle Ages", "question": "Are the knights in the Medieval Times show not authentic knights?", "answer": true, "facts": [ "The Medieval Times show is popular in the United States.", "The United States does not actually bestow knighthood on its citizens.", "Authentic knights are citizens of certain countries, mainly the United Kingdom, who are given the honorary title by their country." ], "decomposition": [ "Which country is the Medieval Times show hosted in?", "Does #1 not confer knighthood on its citizen?" ], "evidence": [ [ [ [ "Medieval Times-2" ] ], [ "no_evidence" ] ], [ [ [ "Medieval Times-2" ] ], [ [ "Order of the British Empire-12", "Order of the British Empire-3" ], "operation" ] ], [ [ [ "Medieval Times-1" ] ], [ [ "Knight-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "21dd2c3906362e8860d3", "term": "Macbeth", "description": "play by William Shakespeare", "question": "Would costumes with robes and pointy hats be helpful for Macbeth?", "answer": true, "facts": [ "Macbeth features scenes with three witches throughout the play. ", "Witches are often displayed with pointy hats and long black robes." ], "decomposition": [ "What characters are in Macbeth?", "What characters wear pointy hats and robes?", "Would any of #1 wear #2?" ], "evidence": [ [ [ [ "Macbeth-2" ], "no_evidence" ], [ [ "Cloak-10", "Pointed hat-5" ] ], [ "operation" ] ], [ [ [ "Macbeth-5" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Macbeth-2" ] ], [ [ "Witch hat-1" ] ], [ "operation" ] ] ] }, { "qid": "0e4f1dbccd54ac5b8c32", "term": "Lord Voldemort", "description": "Fictional character of Harry Potter series", "question": "Was Lord Voldemort taught by Professor Dumbledore?", "answer": true, "facts": [ "Lord Voldemort grew up as the orphan boy Tom Riddle.", "Tom Riddle was brought to Hogwarts by Professor Dumbledore.", "Dumbledore was one of Riddle's teachers during his time as a student." ], "decomposition": [ "Who did Lord Voldemort grow up as?", "Where was #1 brought to by a professor as a child?", "Was Professor Dumbledore a teacher at #2 when Tom Riddle was there?" ], "evidence": [ [ [ [ "Lord Voldemort-23" ] ], [ [ "Lord Voldemort-12" ] ], [ "operation" ] ], [ [ [ "Lord Voldemort-5" ] ], [ [ "Lord Voldemort-12" ] ], [ [ "Lord Voldemort-12" ] ] ], [ [ [ "Lord Voldemort-1" ] ], [ [ "Lord Voldemort-12" ] ], [ "no_evidence" ] ] ] }, { "qid": "616e4f23506bcd9c6ef1", "term": "Satanism", "description": "group of ideological and philosophical beliefs based on Satan", "question": "Is Christianity better for global warming than Satanism?", "answer": true, "facts": [ "Global warming happens because CO2 is released into the atmosphere and warms it up.", "Humans release two tons of carbon dioxide a year.", "Reducing the population will lead to a reduction of CO2 in the atmosphere.", "The Christian Crusades caused the deaths of nearly three million people.", "In 1980s and 1990s there were allegations of Satanic ritual deaths, though only a few cases were substantiated." ], "decomposition": [ "How much population reduction is needed for there to be an impact on carbon dioxide levels in the atmosphere?", "How many people have been killed in the name of Christianity?", "How many people have been killed in the name of Satanism?", "Is #2 closer to #1 than #3 is?" ], "evidence": [ [ [ [ "Carbon footprint-18" ], "no_evidence" ], [ [ "Christianity and violence-25", "Crusades-1", "Persecution of Muslims-33" ], "no_evidence" ], [ [ "Satanic ritual abuse-32" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Global warming-32", "Global warming-54" ], "no_evidence" ], [ [ "Crusades-1" ], "no_evidence" ], [ [ "Satanism-79" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c72aa50ebf75e16dfac5", "term": "Purple", "description": "Range of colors with the hues between blue and red", "question": "Were mollusks an ingredient in the color purple?", "answer": true, "facts": [ "Mollusks are a group of invertebrate animals including snails and slugs.", "Purple dye was used in the early 15th century BC to color clothing.", "The purple dye used in ancient Tyre was made from a liquid extracted from a gland in snails." ], "decomposition": [ "Which animal was used in making purple glands in ancient Tyre?", "Is #1 a mollusk?" ], "evidence": [ [ [ [ "Tyrian purple-1" ] ], [ [ "Sea snail-1" ], "operation" ] ], [ [ [ "Tyrian purple-1" ] ], [ [ "Sea snail-1" ] ] ], [ [ [ "Purple-5" ] ], [ [ "Bolinus brandaris-1" ], "operation" ] ] ] }, { "qid": "690fdd980370f3145504", "term": "Artisan", "description": "skilled craft worker who makes or creates things by hand", "question": "Are twinkies considered artisan made products?", "answer": false, "facts": [ "Twinkies are mass produced in multiple commercial bakeries.", "In bakeries, Twinkies are made by machines." ], "decomposition": [ "How are artisan-made products made?", "How are Twinkies produced?", "Does #2 match with #1?" ], "evidence": [ [ [ [ "Artisanal food-1" ] ], [ [ "Twinkie-2" ] ], [ "operation" ] ], [ [ [ "Artisanal food-1" ] ], [ [ "Twinkie-12", "Twinkie-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Artisan-2" ] ], [ [ "Hostess CupCake-4" ] ], [ "operation" ] ] ] }, { "qid": "6c5e2f76bd94dfb33396", "term": "Sable", "description": "Species of marten", "question": "Was the sable depicted in Marvel comics anthropomorphic?", "answer": false, "facts": [ "Anthropomorphism is the process of giving human characteristics to an animal or non human object.", "A sable is a carnivorous mammal of the weasel family.", "Silver Sable was a female character that first appeared in Marvel Comics in 1985.", "Silver Sable was a human mercenary that wore a silver suit and used hand to hand combat and weaponry to battle war criminals." ], "decomposition": [ "What characteristics do anthropomorphic characters have? ", "What Marvel character is based on a sable?", "Does #2 fit the criteria for #1?" ], "evidence": [ [ [ [ "Talking animals in fiction-1" ] ], [ [ "Silver Sable-1" ] ], [ "operation" ] ], [ [ [ "Anthropomorphism-1" ] ], [ [ "Silver Sable-1" ] ], [ "operation" ] ], [ [ [ "Anthropomorphism-1" ] ], [ [ "Silver Sable-1" ] ], [ "operation" ] ] ] }, { "qid": "1c56958c04a98ccd5d1e", "term": "Mercenary", "description": "Soldier who fights for hire", "question": "Did mercenaries fight for England in the Glorious Revolution?", "answer": false, "facts": [ "Mercenaries are soldiers for hire", "There was no military conflict in England in the Glorious Revolution" ], "decomposition": [ "What happened in England during the Glorious Revolution?", "Did #1 involve combat?" ], "evidence": [ [ [ [ "Glorious Revolution-1" ] ], [ [ "Wincanton Skirmish-1" ] ] ], [ [ [ "Glorious Revolution-1", "Glorious Revolution-38" ] ], [ "operation" ] ], [ [ [ "Glorious Revolution-1" ] ], [ [ "Glorious Revolution-1" ] ] ] ] }, { "qid": "b3234ad7249fde05a126", "term": "Groundhog Day", "description": "Traditional method of weather prediction", "question": "Is groundhog day used as a global season indicator? ", "answer": false, "facts": [ "Groundhog Day is an American tradition that occurs on February 2nd. ", "Groundhog Day derives from a superstition that if a groundhog sees it's shadow it will mean there are six more weeks of winter.", "People living in the southern hemisphere of the world experience summer while the people in the north experience winter.", "Different global cultures define the dates of seasons differently." ], "decomposition": [ "Where is Groundhog Day celebrated?", "Is #1 in both the northern and southern hemisphere?" ], "evidence": [ [ [ [ "Groundhog Day-1" ] ], [ [ "Southern Hemisphere-1" ], "operation" ] ], [ [ [ "Groundhog Day-1" ] ], [ [ "North America-1", "North America-10", "North America-11" ] ] ], [ [ [ "Groundhog Day-1" ] ], [ [ "Winter-1" ], "operation" ] ] ] }, { "qid": "8ce466f6f1430a309edf", "term": "Yeti", "description": "Folkloric ape-like creature from Asia", "question": "Would a Yeti be likely to have prehensile limbs?", "answer": true, "facts": [ "The animals that Yetis are said to look similar to are able to use their hands or toes to grasp items", "The ability to grasp with hands or other limbs is to be prehensile. " ], "decomposition": [ "What does it mean to be prehensile?", "What animals are Yetis said to look like?", "Would #2 be considered #1?" ], "evidence": [ [ [ [ "Prehensile feet-1" ] ], [ [ "Yeti-28" ] ], [ "operation" ] ], [ [ [ "Prehensile feet-1" ] ], [ [ "Yeti-4" ] ], [ [ "Yeti-4" ] ] ], [ [ [ "Prehensility-1" ] ], [ [ "Yeti-1" ] ], [ "operation" ] ] ] }, { "qid": "0d6eda5f7e8e806c4d78", "term": "Asteroid", "description": "Minor planet that is not a comet", "question": "Can an asteroid be linked with virginity?", "answer": true, "facts": [ "An asteroid discovered in 1807 was named Vesta", "Vesta is the Roman virgin goddess of hearth, home and family" ], "decomposition": [ "What was the name of the asteroid that was discovered in 1807?", "What did #1 stand for as a Roman goddess?", "Is #2 related to virginity?" ], "evidence": [ [ [ [ "4 Vesta-1" ] ], [ [ "Vesta (mythology)-1" ] ], [ [ "Vesta (mythology)-8" ] ] ], [ [ [ "4 Vesta-1" ] ], [ [ "Vesta (mythology)-1" ] ], [ "operation" ] ], [ [ [ "4 Vesta-1" ] ], [ [ "Vesta (mythology)-1" ] ], [ "operation" ] ] ] }, { "qid": "283e6170947e33043c1b", "term": "Sugar Ray Robinson", "description": "American boxer", "question": "Did Sugar Ray Robinson win a fight against Canelo Alvarez?", "answer": false, "facts": [ "Sugar Ray Robinson died in 1989", "Canelo Alvarez was born in 1990" ], "decomposition": [ "In what year did Sugar Ray Robinson die?", "In what year was Canelo Alvarez born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Sugar Ray Robinson-28" ] ], [ [ "Canelo Álvarez-1" ] ], [ "operation" ] ], [ [ [ "Sugar Ray Robinson-1" ] ], [ [ "Canelo Álvarez-1" ] ], [ "operation" ] ], [ [ [ "Sugar Ray Robinson-1" ] ], [ [ "Canelo Álvarez-1" ] ], [ "operation" ] ] ] }, { "qid": "9d1e49ac4adff55b1b5c", "term": "Royal Observatory, Greenwich", "description": "observatory in Greenwich, London, UK", "question": "In geometry terms, is the Royal Observatory in Greenwich similar to a yield sign?", "answer": false, "facts": [ "The main building of the Royal Observatory is the Octagon Room.", "A yield sign is shaped like a rounded triangle.", "Two figures are similar if they have the same shape but not necessarily the same size." ], "decomposition": [ "What is the shape of the Royal Observatory in Greenwich?", "What is the shape of a yield sign?", "Is #1 geometrically similar to #2?" ], "evidence": [ [ [ [ "Royal Observatory, Greenwich-31" ] ], [ [ "Yield sign-3" ] ], [ "operation" ] ], [ [ [ "Royal Observatory, Greenwich-1" ], "no_evidence" ], [ [ "Yield sign-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Royal Observatory, Greenwich-7" ] ], [ [ "Yield sign-5" ] ], [ "operation" ] ] ] }, { "qid": "dfe89f86d25ea26ab85d", "term": "Monty Python's Flying Circus", "description": "British sketch comedy television series", "question": "Did Monty Python write the Who's on First sketch?", "answer": false, "facts": [ "Who's on First debuted in 1945.", "Monty Python's first show was in 1969." ], "decomposition": [ "When was the Who's on First sketch first performed?", "When was the debut of the Monty Python show?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Who's on First?-2" ] ], [ [ "Monty Python-1" ] ], [ "operation" ] ], [ [ [ "Abbott and Costello-25", "Who's on First?-3" ] ], [ [ "Monty Python's Flying Circus-16" ] ], [ "operation" ] ], [ [ [ "Who's on First?-2", "Who's on First?-3" ] ], [ [ "Monty Python-1" ] ], [ "operation" ] ] ] }, { "qid": "5fad3ffe51394cac5d37", "term": "Charlemagne", "description": "King of the Franks, King of Italy, and Holy Roman Emperor", "question": "Did Charlemagne have a bar mitzvah?", "answer": false, "facts": [ "Charlemagne was a Roman Catholic", "Bar mitzvah is a coming of age ceremony in Judaism" ], "decomposition": [ "What was Charlemagne's religion?", "In what religion is a bar mitzvah celebrated?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Charlemagne-50" ] ], [ [ "Bar and bat mitzvah-1" ] ], [ "operation" ] ], [ [ [ "Charlemagne-7" ] ], [ [ "Bar and bat mitzvah-2" ] ], [ "operation" ] ], [ [ [ "Charlemagne-2" ] ], [ [ "Bar and bat mitzvah-1" ] ], [ "operation" ] ] ] }, { "qid": "f778fcf4fce27aa1f7ae", "term": "Common Era", "description": "alternative (and religiously neutral) naming of the traditional calendar era, Anno Domini", "question": "Is entire Common Era minuscule to lifespan of some trees?", "answer": true, "facts": [ "The Common Era has lasted for over 2,000 years as of 2020.", "A tree named Methuselah, from California's White Mountains, is almost 5,000 years old." ], "decomposition": [ "How long has the Common Era lasted?", "How old is the oldest known tree?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Common Era-1" ] ], [ [ "Methuselah (tree)-3" ] ], [ [ "Methuselah (tree)-3" ], "operation" ] ], [ [ [ "21st century-1" ] ], [ [ "Methuselah (tree)-1" ] ], [ "operation" ] ], [ [ [ "Common Era-1" ], "no_evidence" ], [ [ "Pinus longaeva-1" ] ], [ "operation" ] ] ] }, { "qid": "98c69a646b5901ee10ce", "term": "Kaffir lime", "description": "A citrus fruit native to tropical Southeast Asia and southern China", "question": "Would kaffir lime be good in a White Russian?", "answer": false, "facts": [ "A White Russian is a drink containing cream, vodka, and Kahlua.", "Mixing lime and cream results in curdled milk, which is not good to drink." ], "decomposition": [ "What are the ingredients of a White Russian?", "Do any of #1 curdle when mixed with lime?" ], "evidence": [ [ [ [ "White Russian (cocktail)-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "White Russian (cocktail)-1" ] ], [ [ "Citric acid-1", "Curdling-2" ], "no_evidence", "operation" ] ], [ [ [ "White Russian (cocktail)-1" ] ], [ [ "Curdling-3" ], "operation" ] ] ] }, { "qid": "1969318e30a31e49e5ca", "term": "Chinook salmon", "description": "species of fish", "question": "Could eating Chinook salmon help Ryan Reynolds?", "answer": true, "facts": [ "Chinook salmon is high in omega-3 fatty acids.", "Omega-3 fatty acids can aid treatment of depression. ", "Ryan Reynolds has struggled with depression." ], "decomposition": [ "What mental disorder did Ryan Reynolds suffer from?", "What nutrient may be able to aid in treatment of #1?", "Is chinook salmon high in #2?" ], "evidence": [ [ [ [ "Ryan Reynolds-24" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Ryan Reynolds-24" ] ], [ [ "Anxiety disorder-29", "Anxiety-58" ], "no_evidence" ], [ [ "Chinook salmon-2" ], "no_evidence" ] ], [ [ [ "Ryan Reynolds-24" ] ], [ [ "Generalized anxiety disorder-20" ] ], [ "operation" ] ] ] }, { "qid": "35cc615c2a3356d8ba26", "term": "Depression (mood)", "description": "state of low mood and fatigue", "question": "In teenagers and young adults with depression, are SSRI medications less safe than they are for adults?", "answer": true, "facts": [ "In teens, SSRI medications may increase the risk of suicidal thinking.", "In adults over 25, SSRI medications are regarded as generally safe." ], "decomposition": [ "What are potential side effects of SSRIs for adults?", "What are the potential side effects of SSRIs for teenagers?", "Are the hazards in #2 worse than the hazards in #1?" ], "evidence": [ [ [ [ "Selective serotonin reuptake inhibitor-37" ] ], [ [ "Selective serotonin reuptake inhibitor-33" ] ], [ "operation" ] ], [ [ [ "Development and discovery of SSRI drugs-10" ], "no_evidence" ], [ [ "Development and discovery of SSRI drugs-10" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Selective serotonin reuptake inhibitor-21" ], "no_evidence" ], [ [ "Selective serotonin reuptake inhibitor-33" ] ], [ "operation" ] ] ] }, { "qid": "566d1ac4f66f33b71aee", "term": "Anorexia nervosa", "description": "Eating disorder characterized by refusal to maintain a healthy body weight, and fear of gaining weight due to a distorted self image", "question": "Are red legs a sign of failing health in those with Anorexia Nervosa?", "answer": true, "facts": [ "Heart failure or disease can lead to the legs becoming red or pink in color.", "Anorexia Nervosa can lead to heart failure and death." ], "decomposition": [ "What is a complication associated with Anorexia Nervosa that affects the heart?", "What happens to a person's legs when #1 occurs?", "Is #2 a sign of failing health?" ], "evidence": [ [ [ [ "Anorexia nervosa-58" ], "no_evidence" ], [ [ "Rash-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Anorexia nervosa-1" ], "no_evidence" ], [ [ "Cardiovascular disease-6" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Anorexia nervosa-53" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "46a30058a6a81a3b08df", "term": "Cheshire", "description": "County of England", "question": "Do citizens of Cheshire sing La Marseillaise?", "answer": false, "facts": [ "Cheshire is a county located in England in the United Kingdom", "La Marseillaise is the national anthem of France" ], "decomposition": [ "Which song is referred to as 'La Marseillaise'?", "#1 is usually sung by the citizens of which country?", "Which country is Cheshire located in?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "La Marseillaise-3" ] ], [ [ "La Marseillaise-3" ] ], [ [ "Cheshire-53" ] ], [ [ "Cheshire-54" ], "operation" ] ], [ [ [ "La Marseillaise-1" ] ], [ [ "La Marseillaise-1" ] ], [ [ "Cheshire-1" ] ], [ "operation" ] ], [ [ [ "La Marseillaise-1" ] ], [ [ "La Marseillaise-1" ] ], [ [ "Cheshire-1" ] ], [ "operation" ] ] ] }, { "qid": "593e66cbdec8e3a15852", "term": "Ancient Greek", "description": "Version of the Greek language used from roughly the 9th century BCE to the 6th century CE", "question": "Are seasons of Survivor surpassed by number of Ancient Greek letters?", "answer": false, "facts": [ "The reality show Survivor has aired 40 seasons as of 2020.", "The Ancient Greek alphabet consisted of 24 letters." ], "decomposition": [ "How many seasons of Survivor have there been as of 2020?", "How many letters were in the Ancient Greek alphabet?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Survivor (American TV series)-4" ] ], [ [ "Greek alphabet-2" ] ], [ "operation" ] ], [ [ [ "Survivor (American TV series)-4" ] ], [ [ "Greek alphabet-2" ] ], [ "operation" ] ], [ [ [ "Survivor: Winners at War-1" ] ], [ [ "Greek alphabet-2" ] ], [ "operation" ] ] ] }, { "qid": "b039b0d6e263ee8fb81a", "term": "Jumping spider", "description": "family of arachnids", "question": "Would a jumping spider need over half a dozen contact lenses?", "answer": true, "facts": [ "Jumping spiders have eight eyes.", "Half a dozen objects is equal to six objects.", "One contact lens is worn per eye." ], "decomposition": [ "How many eyes do jumping spiders have?", "How much is half a dozen?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Jumping spider-3" ] ], [ [ "Dozen-3" ] ], [ [ "Jumping spider-3" ] ] ], [ [ [ "Jumping spider-1" ] ], [ [ "Dozen-1" ], "operation" ], [ "operation" ] ], [ [ [ "Jumping spider-1" ] ], [ [ "Dozen-3", "One half-1" ] ], [ "operation" ] ] ] }, { "qid": "45ff478a039792001dbc", "term": "Harvey Milk", "description": "American politician who became a martyr in the gay community", "question": "Would Harvey Milk have approved of Obama?", "answer": true, "facts": [ "Obama awarded Harvey Milk a posthumous Medal of Freedom. ", "Obama was known for supporting marriage equality and LGBT rights. " ], "decomposition": [ "What was Harvey Milk known for?", "Did Obama support #1?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Harvey Milk-1" ] ], [ [ "Barack Obama-4" ] ], [ "operation" ] ], [ [ [ "Harvey Milk-1" ] ], [ [ "Barack Obama-4" ] ], [ "operation" ] ], [ [ [ "Harvey Milk-4" ] ], [ [ "Stuart Milk-4" ] ], [ "operation" ] ] ] }, { "qid": "1510637210330d817e74", "term": "Dual-energy X-ray absorptiometry", "description": "diagnostic test for bone mineral density testing", "question": "Would dual-energy X-ray absorptiometry be useful if performed on a crab?", "answer": false, "facts": [ "Dual-energy X-ray absorptiometry is typically used to diagnose and follow osteoporosis.", "Osteoporosis is a disease in which bone weakening increases the risk of a broken bone.", "Crabs are invertebrates.", "Invertebrates do not have bones." ], "decomposition": [ "What condition is diagnosed with dual-energy X-ray absorptiometry?", "What body parts are affected by #1?", "Do crabs have #2?" ], "evidence": [ [ [ [ "Dual-energy X-ray absorptiometry-1" ] ], [ [ "Dual-energy X-ray absorptiometry-2", "Osteoporosis-1" ] ], [ [ "Crab-2" ], "operation" ] ], [ [ [ "Dual-energy X-ray absorptiometry-2" ] ], [ [ "Osteoporosis-1" ] ], [ [ "Crab-1" ] ] ], [ [ [ "Dual-energy X-ray absorptiometry-2" ] ], [ [ "Osteoporosis-1" ] ], [ [ "Crab-1", "Invertebrate-1" ] ] ] ] }, { "qid": "367088abcacf2268edac", "term": "Aloe vera", "description": "Species of plant", "question": "Would a house full of aloe vera hypothetically be ideal for Unsinkable Sam?", "answer": false, "facts": [ "Aloe vera is a plant species that is toxic to cats.", "Unsinkable Sam was a cat that supposedly served during World War II." ], "decomposition": [ "What kind of animal was Unsinkable Sam?", "What is aloe vera?", "Is #2 safe for #1 to be around?" ], "evidence": [ [ [ [ "Unsinkable Sam-1" ] ], [ [ "Aloe vera-1" ] ], [ "no_evidence" ] ], [ [ [ "Unsinkable Sam-1" ] ], [ [ "Aloe vera-1" ] ], [ [ "Aloe vera-20", "Aloe vera-21" ] ] ], [ [ [ "Unsinkable Sam-1" ] ], [ [ "Aloe vera-1" ] ], [ [ "Aloe vera-21" ], "no_evidence", "operation" ] ] ] }, { "qid": "b208e5d641fbf43bc647", "term": "Hour", "description": "unit of time", "question": "Can a human eat an entire 12-lb roast turkey in an hour? ", "answer": false, "facts": [ "A serving of roast turkey is about 1 pound of uncooked turkey.", "A 12-lb roast turkey would contain about 12 servings of cooked turkey meat.", "One human cannot eat 12 1-lb servings of turkey in one sitting." ], "decomposition": [ "What is the most food a person has eaten in one hour?", "A 12 pound uncooked turkey provides how much cooked meat?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Hot dog-21", "Nathan's Hot Dog Eating Contest-2", "Nathan's Hot Dog Eating Contest-5", "Pound (mass)-31" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Competitive eating-2", "Competitive eating-6" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "669b3c6a48f494a5d74e", "term": "Kurt Cobain", "description": "American singer, composer, and musician", "question": "Was Kurt Cobain's death indirectly caused by Daniel LeFever?", "answer": true, "facts": [ "Kurt Cobain committed suicide with a shotgun.", "Daniel LeFever was the inventor of the American hammerless shotgun." ], "decomposition": [ "What object caused the death of Kurt Cobain?", "Was #1 invented by Daniel LeFever?" ], "evidence": [ [ [ [ "Suicide of Kurt Cobain-1" ] ], [ "operation" ] ], [ [ [ "Kurt Cobain-3" ] ], [ [ "Daniel Myron LeFever-1" ] ] ], [ [ [ "Kurt Cobain-55" ] ], [ [ "Shotgun-38" ] ] ] ] }, { "qid": "302819b66045c59ea78f", "term": "Charles Manson", "description": "American criminal, cult leader", "question": "Was Charles Manson's body unwanted?", "answer": false, "facts": [ "Charles Manson's body was debated over for so long that he had to stay on ice.", "Charles Manson had four people fighting over his corpse." ], "decomposition": [ "How many people tried to claim Charles Manson's body?", "Is #1 equal to zero?" ], "evidence": [ [ [ [ "Charles Manson-66" ] ], [ "operation" ] ], [ [ [ "Charles Manson-66" ] ], [ "operation" ] ], [ [ [ "Charles Manson-66" ] ], [ "operation" ] ] ] }, { "qid": "d8ba1dd2575be52d80be", "term": "Peach", "description": "species of fruit tree (for the fruit use Q13411121)", "question": "Are peaches best eaten when firm?", "answer": false, "facts": [ "Peaches are sweeter and easier to digest when they are soft to the touch.", "People tend to let their peaches sit until they soften. " ], "decomposition": [ "When a peach is considered best to be eaten, what characteristics does it have?", "When a peach is firm, does it have most of the characteristics listed in #1?" ], "evidence": [ [ [ [ "Peach-38" ], "no_evidence" ], [ [ "Peach-38" ], "no_evidence" ] ], [ [ [ "Peach-38" ] ], [ [ "Peach (fruit)-5" ] ] ], [ [ [ "Peach (fruit)-5" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "c77b8685911cb05e618b", "term": "Jack Black", "description": "American actor, comedian, musician, music producer and youtuber.", "question": "Is Jack Black's height enough to satisfy Coronavirus distancing?", "answer": false, "facts": [ "Jack Black is 5'6\" tall.", "The CDC recommends people stay 6 feet apart." ], "decomposition": [ "How tall is Jack Black?", "What is the minimum recommended length for social distancing?", "Is #1 at least #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Social distancing-9" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Social distancing-9" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Jack Black-1" ], "no_evidence" ], [ [ "Social distancing-9" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "0aba6ec3a1e6fa166b32", "term": "Soldier", "description": "one who serves as part of an organized armed force", "question": "Can children be soldiers in the US Army?", "answer": false, "facts": [ "A soldier is a member of the armed forces.", "The US Army is the land warfare branch of the United States.", "The minimum age for enlistment in the US Army is 18 years old. ", "A child is considered is considered to be anyone under the age of 18 years old." ], "decomposition": [ "What is the minimum age to enlist in the US Army?", "Is a child's age above #1?" ], "evidence": [ [ [ [ "Children in the military-68" ] ], [ [ "Children in the military-1" ], "operation" ] ], [ [ [ "United States Armed Forces-3" ] ], [ [ "Child-4" ], "operation" ] ], [ [ [ "United States Armed Forces-3" ], "no_evidence" ], [ [ "Child-4" ] ] ] ] }, { "qid": "f83ea7846fa2bfb0739c", "term": "Hotel manager", "description": "person managing a hotel", "question": "Could Charlie Bucket be a hotel manager?", "answer": false, "facts": [ "Charlie Bucket is a fictional character from \"Charlie and the Chocolate Factory\", portrayed as a child.", "Children cannot be hotel managers." ], "decomposition": [ "How was Charlie Bucket portrayed in Charlie and the Chocolate Factory?", "Is #1 as an adult?" ], "evidence": [ [ [ [ "Charlie and the Chocolate Factory (film)-6" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Charlie and the Chocolate Factory-4" ] ], [ "operation" ] ], [ [ [ "Charlie and the Chocolate Factory-27" ] ], [ [ "Boy-1" ], "operation" ] ] ] }, { "qid": "3836171bc71856136668", "term": "White", "description": "color", "question": "Can paresthesia be caused by a white pigment?", "answer": true, "facts": [ "Tingling in the hands or feet is a type of paresthesia", "Lead white exposure can lead to lead poisoning", "Symptoms of lead poisoning include tingling in the hands and feet" ], "decomposition": [ "What kinds of white pigment have adverse health effects?", "What are the symptoms of paresthesia?", "Can any of #1 cause #2?" ], "evidence": [ [ [ [ "Lead paint-7" ] ], [ [ "Paresthesia-1" ] ], [ [ "Lead poisoning-1" ], "operation" ] ], [ [ "no_evidence" ], [ [ "Paresthesia-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Powder-11" ], "no_evidence" ], [ [ "Paresthesia-1" ] ], [ "operation" ] ] ] }, { "qid": "589eb85285b438a0c59f", "term": "Astrology", "description": "Pseudoscience claiming celestial objects influence human affairs", "question": "Would Elon Musk be more likely to know about astrology than physics?", "answer": false, "facts": [ "Elon Musk is a businessman and engineer with a bachelor's degree and unfinished Ph.D. in physics", "Engineering is based on principles of applied physics", "Astrology is not a form of science or applied science" ], "decomposition": [ "Which field(s) of study did Elon Musk specialize in?", "Is Astrology closely related to (any of) #1?" ], "evidence": [ [ [ [ "Elon Musk-10" ] ], [ [ "Astrology-34", "Physics-2" ], "operation" ] ], [ [ [ "Elon Musk-2" ] ], [ [ "Astrology-1" ] ] ], [ [ [ "Elon Musk-10" ] ], [ [ "Astrology-1" ], "operation" ] ] ] }, { "qid": "987260ffef60cb2c5439", "term": "Pearl Harbor", "description": "Harbor on the island of Oahu, Hawaii", "question": "Is Pearl Harbor the mythical home of a shark goddess?", "answer": true, "facts": [ "The native Hawaiian people believed Pearl Harbor was the home of Kaʻahupahau.", "Kaʻahupahau is a shark goddess in Hawaiian legends. " ], "decomposition": [ "What did the native Hawaiian people believe Pearl Harbor was home to?", "What was #1?", "IS #2 the same as a shark goddess?" ], "evidence": [ [ [ [ "Pearl Harbor-2" ] ], [ [ "Pearl Harbor-2" ] ], [ [ "Pearl Harbor-2" ] ] ], [ [ [ "Pearl Harbor-2" ] ], [ [ "Pearl Harbor-2" ] ], [ "operation" ] ], [ [ [ "Pearl Harbor-2" ] ], [ [ "Pearl Harbor-2" ] ], [ "operation" ] ] ] }, { "qid": "557b6230413fc65f6a16", "term": "Ocean sunfish", "description": "species of fish", "question": "Would it be impossible to keep an ocean sunfish and a goldfish in the same tank?", "answer": true, "facts": [ "Ocean sunfish live in salt water environments.", "Goldfish live in fresh water environments.", "Putting a fish into the wrong water type can cause them to die." ], "decomposition": [ "What kind of water habitat does the ocean sunfish live in?", "What kind of water habitat do goldfish live in?", "Is #1 interchangeable with #2" ], "evidence": [ [ [ [ "Ocean sunfish-21", "Ocean-2" ] ], [ [ "Goldfish-1" ] ], [ "operation" ] ], [ [ [ "Ocean sunfish-1" ] ], [ [ "Goldfish-1" ] ], [ "operation" ] ], [ [ [ "Ocean-11", "Saltwater fish-3" ] ], [ [ "Freshwater fish-3", "Goldfish-1" ] ], [ [ "Freshwater fish-3", "Saltwater fish-3" ] ] ] ] }, { "qid": "2294d2f030b7fd6f77e3", "term": "Maize", "description": "Cereal grain", "question": "Did Demi Lovato's ancestors help turn maize into popcorn?", "answer": true, "facts": [ "Demi Lovato's father is Mexican.", "Maize is another word for corn.", "Corn was first domesticated in southern Mexico about 10,000 years ago.", "Popcorn is made from kernels of corn." ], "decomposition": [ "Where is popcorn from originally?", "Where is Demi Lovato's ancestors from?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Popcorn-5" ] ], [ [ "Demi Lovato-5" ] ], [ "operation" ] ], [ [ [ "Popcorn-5" ] ], [ [ "Demi Lovato-5" ] ], [ "operation" ] ], [ [ [ "Popcorn-5" ] ], [ [ "Demi Lovato-5" ] ], [ "operation" ] ] ] }, { "qid": "027a7b964c31a0540f9c", "term": "Baptism", "description": "Christian rite of admission and adoption, almost invariably with the use of water", "question": "Was Alexander the Great baptized?", "answer": false, "facts": [ "Baptism is a symbolic Christian rite using water.", "Christianity started in the first century AD.", "Alexander the Great lived from 356 BC- 323 BC." ], "decomposition": [ "Baptism is a rite in which religion?", "When did #1 develop?", "When did Alexander the Great die?", "Is #2 before #3?" ], "evidence": [ [ [ [ "Baptism-1" ] ], [ [ "Christianity in the 1st century-2" ] ], [ [ "Alexander the Great-62" ] ], [ "operation" ] ], [ [ [ "Baptism-1" ] ], [ [ "Baptism-8" ] ], [ [ "Alexander the Great-62" ] ], [ [ "Alexander the Great-62", "Baptism-8" ], "operation" ] ], [ [ [ "Baptism-1" ] ], [ [ "Christianity-3" ] ], [ [ "Alexander the Great-1" ] ], [ "operation" ] ] ] }, { "qid": "4a0e604f174af36e3ace", "term": "Globalization", "description": "process of international integration arising from the interchange of world views, products, ideas, and other aspects of culture", "question": "Are System of a Down opposed to globalization?", "answer": true, "facts": [ "In Boom!, System of a Down condemns globalization.", "The lead vocalist of the band System of a Down is outspoken against globalization. " ], "decomposition": [ "What is globalization?", "Is the lead vocalist of the band System of a Down against #1?" ], "evidence": [ [ [ [ "Globalization-1" ] ], [ [ "Serj Tankian-38" ], "no_evidence", "operation" ] ], [ [ [ "Globalization-1" ] ], [ [ "Serj Tankian-1", "Serj Tankian-16" ], "no_evidence", "operation" ] ], [ [ [ "Globalization-1" ] ], [ [ "System of a Down-32" ] ] ] ] }, { "qid": "c89862fb8a7e09c34f47", "term": "Scottish people", "description": "ethnic inhabitants of Scotland", "question": "Are Scottish people Albidosi?", "answer": true, "facts": [ "The Scottish people emerged from an amalgamation of two Celtic-speaking peoples, the Picts and Gaels, who founded the Kingdom of Scotland.", "What the Picts called themselves is unknown. It has been proposed that they called themselves Albidosi.", "The Kingdom of Scotland is also known as the Kingdom of Alba." ], "decomposition": [ "Which tribes did the Scottish people emerge from?", "Have any of #1 been referred to as Albidosi?" ], "evidence": [ [ [ [ "Scottish people-1" ] ], [ [ "Picts-6" ] ] ], [ [ [ "Scottish people-1" ] ], [ [ "Picts-6" ] ] ], [ [ [ "Scottish people-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "24b3b5e476a4c7b4824e", "term": "Aldi", "description": "Germany-based supermarket chain", "question": "Would you spend less on your food at Aldi than at Whole Foods?", "answer": true, "facts": [ "Whole Foods is known for costing 10-20% more than other stores.", "Aldi is known for having deeply discounted food and home supplies." ], "decomposition": [ "What is Aldi mainly known for?", "Compared to other stores, how do Whole Foods prices compare?", "Would #1 have goods that cost less than #2?" ], "evidence": [ [ [ [ "Aldi-1" ] ], [ [ "Whole Foods Market-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Aldi-1" ] ], [ [ "Whole Foods Market-1", "Whole Foods Market-24" ], "no_evidence" ], [ [ "Discount store-1" ], "operation" ] ], [ [ [ "Aldi-1" ] ], [ [ "Wild Oats Markets-11" ] ], [ "operation" ] ] ] }, { "qid": "7459eacff621fe539afc", "term": "Los Angeles Memorial Sports Arena", "description": "Former arena in California, United States", "question": "Was Los Angeles Memorial Sports Arena hypothetically inadequate for hosting Coachella?", "answer": true, "facts": [ "The Los Angeles Memorial Sports Arena had a capacity of 16,740 people.", "Coachella has had attendance numbers in excess of 99.000 people.", "Coachella relies on an outdoor set up to accommodate the massive crowds." ], "decomposition": [ "How many people can the Los Angeles Memorial Sports Arena hold?", "How many people usually attend Coachella?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Los Angeles Memorial Sports Arena-15" ] ], [ [ "Coachella Valley Music and Arts Festival-3" ] ], [ [ "Coachella Valley Music and Arts Festival-3", "Los Angeles Memorial Sports Arena-15" ], "operation" ] ], [ [ [ "Los Angeles Memorial Sports Arena-15" ] ], [ [ "Coachella Valley Music and Arts Festival-3" ] ], [ "operation" ] ], [ [ [ "Los Angeles Memorial Sports Arena-15" ] ], [ [ "Coachella Valley Music and Arts Festival-20" ] ], [ [ "Coachella Valley Music and Arts Festival-20" ], "operation" ] ] ] }, { "qid": "b8de21dae500bfa66b7b", "term": "Viscosity", "description": "Resistance of a fluid to shear deformation", "question": "Does water have viscosity?", "answer": false, "facts": [ "Viscosity is resistance of fluid to deformation.", "Water is not resistant to deformation." ], "decomposition": [ "What is viscosity?", "Is water #1?" ], "evidence": [ [ [ [ "Viscosity-1" ] ], [ "operation" ] ], [ [ [ "Viscosity-1" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Viscosity-1" ] ], [ "operation" ] ] ] }, { "qid": "dfdc7f7197f90ec78844", "term": "Pharmacology", "description": "Branch of biology concerning drugs", "question": "Did Julius Caesar read books on Pharmacology?", "answer": false, "facts": [ "Pharmacology has its origins in the Middle Ages.", "The Middle Ages took place from 476 AD-1453 AD.", "Julius Caesar lived from 100 BC-44 BC." ], "decomposition": [ "When did Julius Caesar die?", "When did Pharmacology emerge as a field of study?", "Is #1 after or within #2?" ], "evidence": [ [ [ [ "Assassination of Julius Caesar-1" ] ], [ [ "Pharmacology-7" ] ], [ "operation" ] ], [ [ [ "Julius Caesar-1" ] ], [ [ "Pharmacology-4" ] ], [ "operation" ] ], [ [ [ "Julius Caesar-1" ] ], [ [ "Pharmacology-7" ] ], [ "operation" ] ] ] }, { "qid": "27e9e6d368cab11438c4", "term": "NATO", "description": "Intergovernmental military alliance of Western states", "question": "NATO doesn't recognize double triangle flag countries?", "answer": true, "facts": [ "NATO is a members only alliance of several countries.", "Nepal has a double triangle flag.", "Nepal has not been recognized as a member of NATO." ], "decomposition": [ "What country has a flag with double triangles on it?", "Which countries are part of NATO?", "Is #1 not included in #2?" ], "evidence": [ [ [ [ "Flag of Nepal-5" ] ], [ [ "Member states of NATO-3" ] ], [ "operation" ] ], [ [ [ "Flag of Nepal-5" ] ], [ [ "Member states of NATO-2" ] ], [ "operation" ] ], [ [ [ "Flag of Nepal-1" ] ], [ [ "NATO-30" ] ], [ "operation" ] ] ] }, { "qid": "d0ed74f42be54dc99ec5", "term": "Chlorine", "description": "Chemical element with atomic number 17", "question": "Could a dichromat probably easily distinguish chlorine gas from neon gas?", "answer": false, "facts": [ "A dichromat is someone with color blindness that can have difficulty distinguishing red and green", "Chlorine gas is green or yellow-green", "Neon gas is red" ], "decomposition": [ "What two colors does a dichromat struggle to distinguish between?", "What color is Chlorine gas?", "What color is Neon Gas?", "Is #1 different from #2 and #3?" ], "evidence": [ [ [ [ "Dichromacy-1" ] ], [ [ "Chlorine-1" ] ], [ [ "Neon-3" ] ], [ "operation" ] ], [ [ [ "Gene therapy for color blindness-15" ] ], [ [ "Chlorine-2" ] ], [ [ "Neon lamp-25" ] ], [ "operation" ] ], [ [ [ "Dichromacy-1" ] ], [ [ "Chlorine-11" ] ], [ [ "Neon-1", "Neon-3" ] ], [ "operation" ] ] ] }, { "qid": "e12fef0504a959e49b23", "term": "New England", "description": "Region in the northeastern United States", "question": "Can someone from New England profit by growing coffee?", "answer": false, "facts": [ "Coffee can only be grown in subtropical and equatorial climates", "New England is located in a humid continental climate" ], "decomposition": [ "What climates does coffee grow in?", "What kind of climate does New England have?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Coffee-30" ] ], [ [ "Climate of New England-2" ] ], [ "operation" ] ], [ [ [ "Coffee-28" ] ], [ [ "Climate of New England-4" ] ], [ "operation" ] ], [ [ [ "Coffee bean-9" ] ], [ [ "England-43" ] ], [ "operation" ] ] ] }, { "qid": "da6af47f5a3bc2afb4fb", "term": "Saltwater crocodile", "description": "species of reptile", "question": "Is the saltwater crocodile less endangered than the European otter?", "answer": true, "facts": [ "The saltwater crocodile is listed as \"least concern\" on the International Union for the Conservation of Nature Red List.", "The European otter is listed as \"near threatened\" on the International Union for the Conservation of Nature Red List.", "The International Union for the Conservation of Nature Red List starts with \"least concern\", then \"near threatened\", \"vulnerable\", \"endangered\", \"critically endangered\", \"extinct in the wild\", and \"extinct\"." ], "decomposition": [ "What is the saltwater crocodile's conservation status on the IUCN red list?", "What is the European otter's conservation status on the IUCN red list?", "Is #1 less severe than #2?" ], "evidence": [ [ [ [ "Saltwater crocodile-1" ] ], [ [ "Eurasian otter-8" ] ], [ [ "Least-concern species-1" ], "operation" ] ], [ [ [ "Saltwater crocodile-1" ] ], [ [ "Eurasian otter-8" ] ], [ [ "IUCN Red List-11" ], "no_evidence", "operation" ] ], [ [ [ "Saltwater crocodile-1" ] ], [ [ "Otter-12" ] ], [ "operation" ] ] ] }, { "qid": "ca93c9ed9f7914eaa963", "term": "Muslim world", "description": "Muslim-majority countries, states, districts, or towns", "question": "Is the Muslim world hostile to Israel?", "answer": true, "facts": [ "Israel, a small Middle Eastern nation which is considered the Jewish holy land, contains the ancient city of Jerusalem and other ancient holy sites.", "Jerusalem is the third most holy site for Muslims, after Mecca and Medina.", "Prior to English occupation in the 18th-20th centuries, the Muslim-based Ottoman Empire controlled Jerusalem.", "The Muslims want to reclaim Jerusalem and the surrounding holy lands." ], "decomposition": [ "What is the religious significance of Israel's historic cities to Muslims?", "Are the Muslims presently in control of Israel?", "Considering #1, do the Muslims wish to change the situation of #2?" ], "evidence": [ [ [ [ "Jerusalem-3" ], "no_evidence" ], [ [ "Israel-68" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Religious significance of Jerusalem-15" ] ], [ [ "Islam in Israel-1" ] ], [ [ "Muslim supporters of Israel-23" ] ] ], [ [ [ "Holy Land-1" ] ], [ [ "Israel-33" ], "no_evidence" ], [ [ "2006 Lebanon War-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "9e18fca673458935cbe8", "term": "Carnation Revolution", "description": "revolution", "question": "Was the Carnation Revolution the deadliest revolution in Europe?", "answer": false, "facts": [ "The Carnation Revolution was initially a 25 April 1974 military coup in Lisbon which overthrew the authoritarian Estado Novo regime.", "Its name arose from the fact that almost no shots were fired, and Celeste Caeiro offered carnations to the soldiers when the population took to the streets to celebrate the end of the dictatorship; other demonstrators followed suit, and carnations were placed in the muzzles of guns and on the soldiers' uniforms.", "Portugal is a country located mostly on the Iberian Peninsula, in southwestern Europe." ], "decomposition": [ "Why was the Carnation Revolution so named?", "Does #1 imply that no lives were lost?", "Did the Revolution take place in Europe?", "Is #2 or #3 negative?" ], "evidence": [ [ [ [ "Carnation Revolution-2" ] ], [ [ "Carnation Revolution-7" ] ], [ [ "Carnation Revolution-1", "Portugal-1" ] ], [ "operation" ] ], [ [ [ "Carnation Revolution-2" ] ], [ "operation" ], [ [ "Carnation Revolution-1", "Lisbon-1" ] ], [ "operation" ] ], [ [ [ "Carnation Revolution-2" ] ], [ "operation" ], [ [ "Carnation Revolution-1" ] ], [ "operation" ] ] ] }, { "qid": "c7d6bfa777382af6c5bb", "term": "Table tennis", "description": "Racket sport", "question": "Does table tennis use prime numbers?", "answer": true, "facts": [ "A table tennis game is won by the player first scoring 11 points.", "11 is a prime number." ], "decomposition": [ "What are the scores that can be awarded in a game of table tennis?", "Is any of #1 a prime number?" ], "evidence": [ [ [ [ "Table tennis-9" ] ], [ [ "Prime number-13" ], "operation" ] ], [ [ [ "Table tennis-24" ] ], [ "operation" ] ], [ [ [ "Table tennis-24" ] ], [ "operation" ] ] ] }, { "qid": "594aac879ebd2a4123cb", "term": "Surveillance", "description": "monitoring of behavior, activities, or other changing information", "question": "Can you conduct surveillance from a teddy bear?", "answer": true, "facts": [ "Surveillance is the act of monitoring or observation", "Nanny cams are used for surveillance of behavior when a family leaves their home and/or children in the care of a third party", "Nanny cams are often placed in common household objects like teddy bears" ], "decomposition": [ "In what kind of context/environment are nanny cams used for surveillance?", "Would a teddy bear accommodate a nanny can and be commonly found in #1?" ], "evidence": [ [ [ [ "Hidden camera-8" ] ], [ [ "Hidden camera-8" ] ] ], [ [ [ "Hidden camera-1" ], "no_evidence" ], [ [ "Teddy bear-1" ], "no_evidence", "operation" ] ], [ [ [ "Hidden camera-8" ] ], [ "no_evidence" ] ] ] }, { "qid": "5e4b7582144e93c49c44", "term": "Osama bin Laden", "description": "Co-founder of al-Qaeda", "question": "Was a Tiny House ceiling out of Osama bin Laden's reach?", "answer": false, "facts": [ "Osama bin Laden was 6'5\" tall.", "Tiny Houses have ceilings that are between 7 and 8 feet tall.", "The standing reach of the average 6 foot man is around 8 feet." ], "decomposition": [ "How tall is Osama bin Laden?", "What is the standing reach of someone who is #1?", "How tall is the ceiling of a typical tiny house?", "Is #2 less than #3?" ], "evidence": [ [ [ [ "Osama bin Laden-14" ] ], [ "operation" ], [ [ "Tiny house movement-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Osama bin Laden-14" ] ], [ "no_evidence" ], [ [ "Tiny house movement-25" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Osama bin Laden-14" ] ], [ [ "NBA Draft Combine-3" ], "no_evidence" ], [ [ "Tiny house movement-25" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "f4a336168f5165de7f0d", "term": "Northern fur seal", "description": "The largest fur seal in the northern hemisphere", "question": "Is a northern fur seal needing emergency surgery in July likely a safe anesthesia candidate?", "answer": true, "facts": [ "Northern fur seals fast throughout the mating season", "It is recommended that patients, including animals, fast for a time before surgery that requires anesthesia ", "Peak mating season for northern fur seals occurs in June and July" ], "decomposition": [ "What is recommended for patients needing anesthesia?", "What do northern fur seals do in July?", "Does #2 include #1?" ], "evidence": [ [ [ [ "Anesthesia-12" ] ], [ [ "Northern fur seal-17" ] ], [ "operation" ] ], [ [ [ "Anesthesia-7" ] ], [ [ "Northern fur seal-17" ] ], [ "operation" ] ], [ [ [ "Anesthesia-34" ], "no_evidence" ], [ [ "Northern fur seal-17" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3fc6ba608677b8314b77", "term": "Red Sea", "description": "Arm of the Indian Ocean between Arabia and Africa", "question": "Would it be very difficult for Nuno Gomes to dive to the Red Sea's deepest point?", "answer": true, "facts": [ "The Red Sea has a maximum depth of 3,040 m (9,970 ft).", "Nuno Gomes' deepest dive in the Red Sea to date is 318 metres (1,043 ft)." ], "decomposition": [ "How deep is the Red Sea's maximum depth?", "What is the deepest Nuno Gomes can dive?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Red Sea-2" ] ], [ [ "Nuno Gomes (diver)-2", "Nuno Gomes (diver)-4" ] ], [ "operation" ] ], [ [ [ "Red Sea-2" ] ], [ [ "Nuno Gomes (diver)-4" ] ], [ "operation" ] ], [ [ [ "Red Sea-2" ], "no_evidence" ], [ [ "Nuno Gomes (diver)-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f7b1cc5b3fab95aa1be4", "term": "Breast cancer", "description": "cancer that originates in the mammary gland", "question": "Is breast cancer associated with a ribbon?", "answer": true, "facts": [ "Breast cancer is one of many diseases associated with a specific color of ribbon.", "Breast cancer's ribbon is pink." ], "decomposition": [ "Which diseases are associated with a (certain color of) ribbon?", "Is breast cancer included in #1?" ], "evidence": [ [ [ [ "Awareness ribbon-3" ] ], [ "operation" ] ], [ [ [ "Awareness ribbon-3" ] ], [ "operation" ] ], [ [ [ "Awareness ribbon-15" ] ], [ [ "Awareness ribbon-15" ] ] ] ] }, { "qid": "c4a4c8fb77384ecaa472", "term": "Common warthog", "description": "Wild member of the pig family", "question": "Could common warthog be useful for scrimshaw?", "answer": true, "facts": [ "Scrimshaw is the process of carving designs or symbols into materials such as ivory, whalebone, and tusks.", "The common warthog has two sets of long tusks.", "The common warthog has large teeth that are harnessed for ivory.", "The common warthog is not an endangered species." ], "decomposition": [ "What materials can be used in scrimshaw?", "Do warthogs have any of the things in #1?" ], "evidence": [ [ [ [ "Scrimshaw-1" ] ], [ [ "Phacochoerus-2" ], "operation" ] ], [ [ [ "Scrimshaw-1" ] ], [ [ "Phacochoerus-2" ] ] ], [ [ [ "Scrimshaw-1" ] ], [ [ "Common warthog-3" ] ] ] ] }, { "qid": "ebbf0b9ffffd08d812e5", "term": "Spice Girls", "description": "British girl group", "question": "Tata Hexa can accomodate every Spice Girl?", "answer": true, "facts": [ "The Spice Girls is a five woman musical group from Britain.", "The Tata Hexa is a car with 6 and 7 seat capacities." ], "decomposition": [ "How many women are in the Spice Girls group?", "How many people can the Tata Hexa seat?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Spice Girls-1" ] ], [ [ "Tata Aria-2", "Tata Aria-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Spice Girls-1" ] ], [ [ "Tata Hexa-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Spice Girls-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "af024277ec12d7845001", "term": "Church of Satan", "description": "international organization dedicated to the religion of Satanism", "question": "Is being 5 year Capital One Venture member more cost effective than being in Church of Satan?", "answer": false, "facts": [ "The Capital One Venture card has an annual fee of around $95.", "The Church of Satan has a one time lifetime membership fee of $225." ], "decomposition": [ "What is the Church of Satan's lifetime membership fee?", "How much does being a Capital One Venture member cost per year?", "What is #2 times 5?", "Is #3 less than #1?" ], "evidence": [ [ [ [ "Church of Satan-23" ] ], [ "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Church of Satan-23" ] ], [ [ "Credit card-71" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Church of Satan-23" ] ], [ [ "Capital One-6" ], "no_evidence" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "ed91d5142b8037b07abb", "term": "Dosa", "description": "Thin pancakes originating from South India", "question": "Would lumberjacks get full after eating three dosa?", "answer": false, "facts": [ "Dosa are thin rice pancakes from South India.", "One dosa is approximately 110 calories.", "The average lumberjack would eat 8000 calories per day." ], "decomposition": [ "What is a Dosa?", "How many calories are in #1?", "How many calories does a lumberjack need per day?", "Is 3 times #2 a significant amount of #3?" ], "evidence": [ [ [ [ "Dosa-1" ] ], [ "no_evidence" ], [ [ "Food energy-14" ] ], [ "operation" ] ], [ [ [ "Dosa-1" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Dosa-1" ] ], [ "no_evidence" ], [ [ "Food energy-14", "Lumberjack-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "cb9eae3948f28e236477", "term": "Gorilla", "description": "Genus of mammals", "question": "Do gorillas fight with panda bears?", "answer": false, "facts": [ "Gorilla distribution is limited to areas of the continent of Africa", "Panda bears are found in the wild only in China" ], "decomposition": [ "What is the range for Gorillas?", "What is the range for pandas?", "Does #1 overlap #2?" ], "evidence": [ [ [ [ "Gorilla-2" ] ], [ [ "Giant panda-2" ] ], [ "operation" ] ], [ [ [ "Gorilla-1" ] ], [ [ "Giant panda-1" ] ], [ "operation" ] ], [ [ [ "Gorilla-2" ] ], [ [ "Giant panda-2" ] ], [ "operation" ] ] ] }, { "qid": "5fe8b705fc7bb1783a61", "term": "Foreign and Commonwealth Office", "description": "Ministry of Foreign Affairs of the United Kingdom", "question": "Is the Foreign and Commonwealth Office a European political agency?", "answer": true, "facts": [ "The Office is part of the government of the United Kingdom.", "The United Kingdom is in Europe." ], "decomposition": [ "What country is the Foreign and Commonwealth Office part of?", "Is #1 located in Europe?" ], "evidence": [ [ [ [ "Foreign and Commonwealth Office-1" ] ], [ [ "Outline of the United Kingdom-1" ], "operation" ] ], [ [ [ "Foreign and Commonwealth Office-4" ] ], [ [ "Turkey–United Kingdom relations-13" ] ] ], [ [ [ "Foreign and Commonwealth Office-1" ] ], [ [ "Outline of the United Kingdom-1" ] ] ] ] }, { "qid": "787aedef693b5860aac5", "term": "Knight", "description": "An award of an honorary title for past or future service with its roots in chivalry in the Middle Ages", "question": "Are there any official American knights?", "answer": false, "facts": [ "The English monarchy bestows the title of knighthood upon deserving English citizens.", "They only knight English people.", "The American government does not do knightings of its own." ], "decomposition": [ "Which kind government bestows knighthood on its citizens?", "Would #1 confer knighthood on a citizen of another country?", "Is the American government an example of #1?", "Is #2 or #3 positive?" ], "evidence": [ [ [ [ "Knight-1" ] ], [ [ "Knight-3" ], "no_evidence" ], [ [ "Federal government of the United States-1" ] ], [ "operation" ] ], [ [ [ "Knight-1", "Orders, decorations, and medals of the United Kingdom-1" ], "no_evidence" ], [ [ "Order of the British Empire-6" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Order of the British Empire-2" ] ], [ [ "Order of the British Empire-3" ] ], [ [ "Federal government of the United States-1" ], "operation" ], [ "operation" ] ] ] }, { "qid": "8116aa0a9157b809ac9b", "term": "Hypertension", "description": "Long term medical condition", "question": "Are two cans of Campbell's Soup a day good for hypertension?", "answer": false, "facts": [ "Hypertension is a medical condition in which the blood pressure is high.", "Salt increases blood pressure.", "Campbell's Soup has an average of 1400 to 1800 mg of sodium (salt).", "The FDA states that sodium intake per day should not exceed 2300 mg." ], "decomposition": [ "Which substance has generated controversy about Campbell's canned soups regarding health concerns?", "Is excess of #1 good for people with hypertension?" ], "evidence": [ [ [ [ "Chicken soup-45" ] ], [ [ "Hypertension-24" ] ] ], [ [ [ "Soup-11" ] ], [ [ "Hypertension-16" ] ] ], [ [ [ "Campbell Soup Company-29" ] ], [ [ "Sodium-36" ] ] ] ] }, { "qid": "5c4b5e0487eaa93775cc", "term": "Underworld", "description": "The mythic Relm of the Dead, located far underground (aka, Hades; Underworld)", "question": "Can Kit & Kaboodle hypothetically help someone past the Underworld gates?", "answer": false, "facts": [ "The Underworld is guarded by a beast known as Cerberus.", "Cerberus is a three-headed dog.", "Cerberus eats the raw flesh of anyone that tries to escape the Underworld.", "Kit & Kaboodle is a brand of cat food. " ], "decomposition": [ "What guards the gates of the Underworld?", "What kind of creature is #1?", "What kind of animal is Kit & Kaboodle meant for?", "Is #3 the same as #2?" ], "evidence": [ [ [ [ "Cerberus-1" ] ], [ [ "Cerberus-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Cerberus-1" ] ], [ [ "Cerberus-1" ] ], [ [ "Jaclyn Linetsky-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Cerberus-1" ] ], [ [ "Cerberus-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7f4effbc97ab2b5fd4a7", "term": "British cuisine", "description": "culinary traditions of the United Kingdom", "question": "Could an American confuse breakfast in British cuisine for dinner?", "answer": true, "facts": [ "In British Cuisine, baked beans are served with toast for breakfast.", "In the US, baked beans are served alongside barbecue dinners.", "British 'Full breakfasts' include grilled vegetables like mushrooms and whole cherry tomatoes.", "Grilled mushrooms and tomatoes are used more often in evening dishes in the US." ], "decomposition": [ "What foods are part of a traditional British breakfast?", "What foods are part of a traditional American dinner?", "Is there overlap between #1 and #2?" ], "evidence": [ [ [ [ "Breakfast-63" ] ], [ [ "Meal-16" ] ], [ "operation" ] ], [ [ [ "Breakfast-63" ] ], [ [ "Burger King breakfast sandwiches-6" ] ], [ "operation" ] ], [ [ [ "Full breakfast-1" ] ], [ [ "Mushroom-28", "Tomato-85" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "399d5e61740a8a93dbe7", "term": "Reproduction", "description": "Biological process by which new organisms are generated from one or more parent organisms", "question": "Is it true that gay male couples cannot naturally reproduce?", "answer": false, "facts": [ "Gay men can have any of the various sex organs that humans have.", "Trans men will sometimes become pregnant with their significant other before transitioning medically. " ], "decomposition": [ "What defines a male gender?", "What organs are needed to impregnate someone?", "What organs are needed to carry a pregnancy?", "Does #1 exclude persons with either of #2 or #3?" ], "evidence": [ [ [ [ "Male-4" ] ], [ [ "Sex organ-1" ] ], [ [ "Uterus-1" ] ], [ "operation" ] ], [ [ [ "Gender-1" ] ], [ [ "Male reproductive system-2" ] ], [ [ "Female reproductive system-1" ] ], [ "operation" ] ], [ [ [ "Male-1" ] ], [ [ "Male reproductive system-2" ] ], [ [ "Female reproductive system-1", "Pregnancy-1" ] ], [ "operation" ] ] ] }, { "qid": "43198079fc7c683780fb", "term": "Electronic dance music", "description": "broad category of electronic music", "question": "Did Beethoven enjoy listening to EDM?", "answer": false, "facts": [ "Ludwig van Beethoven died in 1827.", "EDM originated in the mid-to-late 20th century." ], "decomposition": [ "When did Ludwig van Beethoven die?", "When did EDM originate?", "Is #1 after #2?" ], "evidence": [ [ [ [ "Death of Ludwig van Beethoven-15" ] ], [ [ "Trap music (EDM)-1" ] ], [ "operation" ] ], [ [ [ "Ludwig van Beethoven-61" ] ], [ [ "Electronic dance music-2" ] ], [ "operation" ] ], [ [ [ "Ludwig van Beethoven-1" ] ], [ [ "Electronic dance music-2" ] ], [ "operation" ] ] ] }, { "qid": "9a1bf03a1359fa94eb7c", "term": "Cheeseburger", "description": "hamburger topped with cheese", "question": "Is a krabby patty similar to a cheeseburger?", "answer": true, "facts": [ "A krabby patty is a fictional sandwich featuring a patty on a split bun with toppings like lettuce, onion, and tomato.", "A hamburger is typically served on a bun and offers toppings like lettuce, onion, and tomato." ], "decomposition": [ "What are the ingredients of a Krabby Patty?", "What are the ingredients of a cheeseburger?", "Is there significant overlap between #1 and #2?" ], "evidence": [ [ [ [ "Krabby Patty-4" ] ], [ [ "Cheeseburger-10" ] ], [ "operation" ] ], [ [ [ "Krabby Patty-4" ] ], [ [ "Cheeseburger-1" ] ], [ "operation" ] ], [ [ [ "Krabby Patty-4" ] ], [ [ "Cheeseburger-1" ] ], [ "operation" ] ] ] }, { "qid": "ed92da8e7f6ee25cb3ca", "term": "Ben & Jerry's", "description": "American ice cream company", "question": "Are both founders of Ben & Jerry's still involved in the company?", "answer": false, "facts": [ "Ben & Jerry's was founded by Ben Cohen and Jerry Greenfield.", "The founders sold the company to Unilever in 2000." ], "decomposition": [ "Who were the founders of Ben & Jerry's ice cream?", "Who owns Ben & Jerry's now?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Ben & Jerry's-2" ] ], [ [ "Ben & Jerry's-1" ] ], [ "operation" ] ], [ [ [ "Ben & Jerry's-2" ] ], [ [ "Ben & Jerry's-10" ] ], [ "operation" ] ], [ [ [ "Ben & Jerry's-2" ] ], [ [ "Ben & Jerry's-10" ] ], [ "operation" ] ] ] }, { "qid": "7a0e419ffb6009156828", "term": "Apollo 13", "description": "A failed crewed mission to land on the Moon", "question": "Was ship that recovered Apollo 13 named after a World War II battle?", "answer": true, "facts": [ "Apollo 13 was recovered by the USS Iwo Jima.", "Iwo Jima was captured from the Imperial Japanese Army during World War II by the US in a conflict called the Battle of Iwo Jima." ], "decomposition": [ "Which ship recovered Apollo 13 crew?", "What was #1 named for?", "Did #2 occur during World War II?" ], "evidence": [ [ [ [ "USS Iwo Jima (LPH-2)-13" ] ], [ [ "USS Iwo Jima (LPH-2)-1" ] ], [ [ "Battle of Iwo Jima-1" ] ] ], [ [ [ "Apollo 13-55" ] ], [ [ "USS Iwo Jima (LPH-2)-1" ] ], [ "operation" ] ], [ [ [ "Apollo 13-55" ] ], [ [ "Iwo Jima-3" ] ], [ [ "Iwo Jima-19" ] ] ] ] }, { "qid": "668a4e03534608476faf", "term": "Ringo Starr", "description": "British musician, drummer of the Beatles", "question": "Would Ringo Starr avoid the pot roast at a restaurant?", "answer": true, "facts": [ "Ringo Starr is a vegetarian.", "Vegetarianism is the practice of abstaining from the consumption of meat.", "Pot roast is a braised beef dish made by browning a roast-sized piece of beef before slow cooking the meat in a covered dish, sometimes with vegetables, in or over liquid." ], "decomposition": [ "What dietary system does Ringo Starr follow?", "What type of foods are not allowed to be eaten by someone following #1?", "What is pot roast made of?", "Is #3 part of #2?" ], "evidence": [ [ [ [ "Ringo Starr-71" ] ], [ [ "Vegetarianism-1" ] ], [ [ "Pot roast-1" ] ], [ "operation" ] ], [ [ [ "Ringo Starr-71" ] ], [ [ "Vegetarianism-1" ] ], [ [ "Pot roast-1" ] ], [ [ "Beef-1" ], "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "daa78a40ef9922db732d", "term": "Hamas", "description": "Palestinian Sunni-Islamist fundamentalist organization", "question": "Is starving Hamas agent eating pig bad?", "answer": false, "facts": [ "Hamas is a Sunni-Islam fundamentalist group that strictly prohibits the eating of a pig.", "Sunni Islam has a concept called the Law of Necessity.", "The Law of Necessity states that, \"That which is necessary makes the forbidden permissible.\"" ], "decomposition": [ "What religion do members of Hamas follow?", "What foods are forbidden in #1?", "Are there no exceptions to #2?" ], "evidence": [ [ [ [ "Hamas-2" ] ], [ [ "Islamic dietary laws-14" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Hamas-1" ] ], [ [ "Islam-50" ] ], [ [ "Islamic dietary laws-3" ], "operation" ] ], [ [ [ "Hamas-2" ] ], [ [ "Halal-6" ] ], [ [ "Halal-6" ] ] ] ] }, { "qid": "2549ab4ce062ef762c4c", "term": "Bern", "description": "Place in Switzerland", "question": "Are Citizens of Bern Switzerland are descendants of Genghis Khan?", "answer": true, "facts": [ "Genghis Khan had sixteen children.", "1 in 200 men are direct descendants of Genghis Khan.", "Switzerland has a large Asian immigration population which was around 19,000 in 2018." ], "decomposition": [ "What ethnic groups contain much of Genghis Khan's descendants?", "Is there a large population of any of #1 in Bern?" ], "evidence": [ [ [ [ "Descent from Genghis Khan-2" ], "no_evidence" ], [ [ "Bern-39" ], "no_evidence" ] ], [ [ [ "Descent from Genghis Khan-2", "Descent from Genghis Khan-22" ] ], [ "no_evidence" ] ], [ [ [ "Genghis Khan-2" ], "no_evidence" ], [ [ "Bern-39" ], "no_evidence", "operation" ] ] ] }, { "qid": "8c3ca12decfff256eddb", "term": "Lord Voldemort", "description": "Fictional character of Harry Potter series", "question": "Is Lord Voldemort associated with a staff member of Durmstrang?", "answer": true, "facts": [ "Igor Karkaroff is the headmaster of Durmstrang school.", "Karkaroff is a former Death Eater.", "The Death Eaters were Voldemort's minions." ], "decomposition": [ "Who is the headmaster of Durmstrang school?", "What did #1 part of in the past?", "Is #2 related to Lord Voldemort?" ], "evidence": [ [ [ [ "Places in Harry Potter-31" ], "no_evidence" ], [ [ "Places in Harry Potter-32" ] ], [ "no_evidence" ] ], [ [ [ "Death Eater-30" ] ], [ [ "Death Eater-31" ] ], [ [ "Death Eater-1" ] ] ], [ [ [ "Places in Harry Potter-31" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3bd6c0cff096123cc207", "term": "Will Ferrell", "description": "American actor, comedian, producer, writer and businessman", "question": "Does Dean Cain have less days to birthday than Will Ferrell every 4th of July?", "answer": false, "facts": [ "Will Ferrell was born on July 16th.", "Dean Cain was born on July 31st." ], "decomposition": [ "What day of the year was Will Ferrell born?", "What day of the year was Dean Cain born?", "How many days away from July fourth is #1?", "How many days away from July fourth is #2?", "Is #4 less than #3?" ], "evidence": [ [ [ [ "Will Ferrell-1" ] ], [ [ "Dean Cain-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Will Ferrell-1" ] ], [ [ "Dean Cain-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Will Ferrell-1" ] ], [ [ "Dean Cain-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "4c72798de7aca9e0954e", "term": "Metallica", "description": "American heavy metal band", "question": "Is Metallica protective over their music?", "answer": true, "facts": [ "Napster was a P2P music sharing service.", "Metallica sued Napster in order to remove their songs from the program, as they were not getting profit from it." ], "decomposition": [ "What did Metallica do in response to Napster hosting their songs?", "Did #1 involve legal action?" ], "evidence": [ [ [ [ "Metallica v. Napster, Inc.-1" ] ], [ [ "Lawsuit-1" ], "operation" ] ], [ [ [ "Metallica-3" ] ], [ "operation" ] ], [ [ [ "Metallica-3" ] ], [ [ "Metallica-29" ] ] ] ] }, { "qid": "1061271843722fb7a225", "term": "Rahul Dravid", "description": "Indian cricketer", "question": "Does Rahul Dravid belong to the family Gryllidae?", "answer": false, "facts": [ "Crickets (also known as \"true crickets\"), of the family Gryllidae, are insects related to bush crickets, and, more distantly, to grasshoppers.", "Cricket is a bat-and-ball game played between two teams of eleven players on a field at the centre of which is a 20-metre (22-yard) pitch with a wicket at each end, each comprising two bails balanced on three stumps.", "Human beings belong to the family Hominidae." ], "decomposition": [ "What kind of creature is Rahul Dravid?", "Which family does #1 belong to?", "Is #2 the same as Gryllidae?" ], "evidence": [ [ [ [ "Rahul Dravid-1" ] ], [ [ "Human-6" ] ], [ "operation" ] ], [ [ [ "Homo sapiens-1", "Rahul Dravid-1" ] ], [ [ "Hominidae-1" ] ], [ "operation" ] ], [ [ [ "Rahul Dravid-1" ] ], [ [ "Hominidae-20" ] ], [ "operation" ] ] ] }, { "qid": "449e539f14ff346b8f03", "term": "United States presidential election", "description": "type of election in the United States", "question": "Will bumblebees derail the United States presidential election?", "answer": false, "facts": [ "The United States presidential election is held in November of each year.", "Bees die off during the winter months except for the queen bee.", "During November, bees go into their hives and hibernate." ], "decomposition": [ "When is the presidential election?", "What is a necessary characteristic for something to disrupt the election? ", "Are bees #2 in #1?" ], "evidence": [ [ [ [ "United States presidential election-3" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Election Day (United States)-1" ] ], [ "no_evidence" ], [ [ "Bumblebee-30" ], "operation" ] ], [ [ [ "2020 United States presidential election-2" ] ], [ [ "Electrical disruptions caused by squirrels-9" ] ], [ "operation" ] ] ] }, { "qid": "33858996585a4d2ab95e", "term": "Ludacris", "description": "American rapper and actor", "question": "Does Ludacris have Greek heritage?", "answer": true, "facts": [ "Ludacris's real name is Christopher Brian Bridges", "Christopher is a name derived from Greek origins" ], "decomposition": [ "What is Ludacris's real name?", "Where is #1 derived from?" ], "evidence": [ [ [ [ "Ludacris-1" ] ], [ "no_evidence" ] ], [ [ [ "Ludacris-1" ] ], [ [ "Christopher-1" ] ] ], [ [ [ "Ludacris-1" ] ], [ [ "Ludacris-3" ], "operation" ] ] ] }, { "qid": "f5e5ec91462ee970cf86", "term": "Statue of Freedom", "description": "19th-century statue by Thomas Crawford on top of the US Capitol", "question": "Can you see the Statue of Freedom from the Statue of Liberty?", "answer": false, "facts": [ "The Statue of Freedom is in Washington, D.C. on the Capitol Building", "The Statue of Liberty is in New York City" ], "decomposition": [ "Where is the Statue of Freedom located?", "Where is the Statue of Liberty located?", "Is #1 within reasonable range of visibility from #2?" ], "evidence": [ [ [ [ "Statue of Freedom-2" ] ], [ [ "Statue of Liberty-1" ] ], [ [ "Statue of Freedom-2" ] ] ], [ [ [ "Statue of Freedom-1" ] ], [ [ "Statue of Liberty-1" ] ], [ "operation" ] ], [ [ [ "Statue of Freedom-1" ] ], [ [ "Statue of Liberty-1" ] ], [ "operation" ] ] ] }, { "qid": "01b8f2656a604fd71549", "term": "Spinach", "description": "species of plant", "question": "Has spinach been a source of power in a comic movie?", "answer": true, "facts": [ "The comic character Popeye uses spinach as a source of power.", "A movie was made about Popeye.", "Popeye consumes spinach as a source of power in the movie." ], "decomposition": [ "Which movie was made for comic character Popeye?", "What was Popeye's source of power in #1", "Is #2 spinach?" ], "evidence": [ [ [ [ "Popeye-53" ] ], [ [ "Popeye-68" ] ], [ [ "Popeye-68" ], "operation" ] ], [ [ [ "Popeye (film)-1" ] ], [ [ "Popeye-6" ] ], [ "operation" ] ], [ [ [ "Popeye the Sailor (film)-1" ] ], [ [ "Popeye the Sailor (film)-4" ] ], [ "operation" ] ] ] }, { "qid": "9608fbb33f01b799a816", "term": "Chlorophyll", "description": "group of chemical compounds", "question": "For Hostas to look their best, do they need lots of chlorophyll?", "answer": true, "facts": [ "Hostas are characterized by large green striped leaves.", "The green color in plants is attributed to chlorophyll. " ], "decomposition": [ "What color is a visually appealing hosta?", "Do the get #1 from chlorophyll?" ], "evidence": [ [ [ [ "Hosta-2" ] ], [ [ "Chlorophyll-2" ], "operation" ] ], [ [ [ "Hosta-6" ], "no_evidence" ], [ [ "Chloroplast-1", "Hosta-2" ], "operation" ] ], [ [ [ "Hosta-2" ] ], [ [ "Chlorophyll-2" ], "operation" ] ] ] }, { "qid": "ab5cd41c7f7a74451bdf", "term": "Jeremy Irons", "description": "English actor", "question": "Did Jeremy Irons master sweep picking as a child?", "answer": false, "facts": [ "Jeremy Irons was the drummer and harmonica player in a four-man school band called the Four Pillars of Wisdom.", "Sweep picking is a guitar playing technique." ], "decomposition": [ "What kind of musical instrument involves sweet picking?", "What musical instruments did Jeremy Irons play in the school band Four Pillars of Wisdom?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Sweep picking-1" ] ], [ [ "Jeremy Irons-5" ] ], [ "operation" ] ], [ [ [ "Sweep picking-1" ] ], [ [ "Jeremy Irons-5" ] ], [ "operation" ] ], [ [ [ "Guitar picking-14" ] ], [ [ "Jeremy Irons-5" ] ], [ "operation" ] ] ] }, { "qid": "f251e2b2a577f7dccc0e", "term": "Cookie Monster", "description": "character from the television series Sesame Street", "question": "Would the Cookie Monster decline an offer of free Keebler products?", "answer": false, "facts": [ "The Cookie Monster has an enormous appetite and craving for cookies.", "The Keebler Company is an American cookie and cracker manufacturer." ], "decomposition": [ "What type of food does the Cookie Monster enjoy the most?", "What types of food are produced by the Keebler Company?", "Is #1 not included in #2?" ], "evidence": [ [ [ [ "Cookie-22" ] ], [ [ "Keebler Company-1" ] ], [ "operation" ] ], [ [ [ "Cookie Monster-1" ] ], [ [ "Keebler Company-1" ] ], [ "operation" ] ], [ [ [ "Cookie Monster-2" ] ], [ [ "Keebler Company-9" ] ], [ "operation" ] ] ] }, { "qid": "49c06b1626c68ca4a8d6", "term": "Winemaking", "description": "the production of wine, starting with the selection of the fruit, its fermentation into alcohol, and the bottling of the finished liquid", "question": "Are grapes essential to winemaking?", "answer": false, "facts": [ "Winemaking involves a process known as fermentation where energy is extracted from carbohydrates.", "A cup of cherries has about 19 grams of carbohydrates.", "Cherry wine does not contain any grapes." ], "decomposition": [ "In winemaking, what is the process in which energy is drawn?", "In #1, where does the energy come from?", "Are grapes the only thing that contains #2?" ], "evidence": [ [ [ [ "Winemaking-19" ] ], [ [ "Winemaking-19" ] ], [ "no_evidence" ] ], [ [ [ "Ethanol fermentation-1" ], "no_evidence" ], [ [ "Winemaking-1", "Yeast-27" ], "no_evidence" ], [ [ "Fruit brandy-1" ], "operation" ] ], [ [ [ "Winemaking-32" ] ], [ [ "Winemaking-7" ] ], [ [ "Fruit-36" ] ] ] ] }, { "qid": "192bea4bc6d8b65a513a", "term": "QWERTY", "description": "keyboard layout where the first line is \"QWERTYUIOP\"", "question": "Can the majority of vowels be typed on the first line of a QWERTY keyboard?", "answer": true, "facts": [ "In English the vowels consist of A, E, I, O, U, and sometimes Y.", "The first line of the QWERTY keyboard contains the vowels E, I, O, U, and Y.", "A majority means more than half of the total." ], "decomposition": [ "What letters are vowels in the English language?", "What are the letters on the first line of a Qwerty keyboard?", "Is more than half of the letters listed in #1 also listed in #2?" ], "evidence": [ [ [ [ "English alphabet-20" ] ], [ [ "QWERTY-1", "Ray Tomlinson-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Vowel-49" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Vowel-49" ] ], [ [ "QWERTY-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "f486ba373c1b13c27667", "term": "Golden eagle", "description": "species of bird", "question": "Is the Golden eagle considered a scavenger bird?", "answer": false, "facts": [ "Scavengers are defined as animals that feed on dead carcasses of animals they have not killed themselves.", "Vultures are scavengers that hover in the air and swoop down to feed once they see an animal killed by another animal.", "The Golden eagle has sharp talons to hunt its own prey.", "Golden eagles kill and feed on hares, rabbits, and ground squirrels." ], "decomposition": [ "Who kills the prey that scavengers feed on?", "Who kills the prey that Golden eagles feed on?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Scavenger-1" ] ], [ [ "Golden eagle-22" ] ], [ "operation" ] ], [ [ [ "Scavenger-1" ] ], [ [ "Golden eagle-28" ] ], [ "operation" ] ], [ [ [ "Scavenger-1" ] ], [ [ "Golden eagle-22" ] ], [ "operation" ] ] ] }, { "qid": "6d15c4ab6720ee4f6411", "term": "Pyrenees", "description": "Range of mountains in southwest Europe", "question": "Can an elite runner circle the Pyrenees in one day?", "answer": false, "facts": [ "The Pyrenees mountains are 305 miles wide.", "An elite runner can cover 100 miles in around 12 hours." ], "decomposition": [ "How many miles can an elite runner cover in one day?", "How far around in miles are The Pyrenees mountains?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Usain Bolt-106" ] ], [ [ "Pyrenees-13" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Ultramarathon-9" ], "no_evidence" ], [ [ "Pyrenees-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "How Many Miles to Babylon?-7" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "05a8b1683097fc70ac73", "term": "Hermes", "description": "ancient Greek god of roads, travelers, and thieves", "question": "Is Hermes equivalent to the Roman god Vulcan?", "answer": false, "facts": [ "Hermes is the Greek messenger god and god of roads and travelers.", "Mercury is the Roman messenger god of roads and travelers.", "The Roman Vulcan is actually equivalent to the Greek Hephaestus.", "Hermes is equivalent to Mercury." ], "decomposition": [ "What is Hermes god of?", "Who is the god of #1 in Roman mythology?", "Is #2 the same as Vulcan?" ], "evidence": [ [ [ [ "Hermes-1", "Hermes-8" ] ], [ [ "Hermes-4" ] ], [ "operation" ] ], [ [ [ "Hermes-1" ] ], [ [ "Hermes-4" ] ], [ [ "Vulcan (mythology)-1" ], "operation" ] ], [ [ [ "Hermes-1" ] ], [ [ "Mercury (mythology)-2" ] ], [ "operation" ] ] ] }, { "qid": "75d0e535fa8064903afa", "term": "Kelly Clarkson", "description": "American singer-songwriter, actress, and television personality", "question": "Has Kelly Clarkson outsold season 4 American Idol winner?", "answer": false, "facts": [ "Carrie Underwood was the winner of the fourth season of American Idol.", "Carrie Underwood has sold a little over 65 million albums.", "Kelly Clarkson has sold a little over 25 million albums." ], "decomposition": [ "Who was the season 4 American Idol winner?", "How many albums has Kelly Clarkson sold?", "How many albums by #1 have been sold?", "Is #2 more than #3?" ], "evidence": [ [ [ [ "American Idol (season 4)-20" ] ], [ [ "Kelly Clarkson-3" ] ], [ [ "Carrie Underwood-79" ] ], [ "operation" ] ], [ [ [ "American Idol (season 4)-1" ] ], [ [ "Kelly Clarkson-3" ] ], [ [ "Carrie Underwood-3" ] ], [ "operation" ] ], [ [ [ "American Idol (season 4)-1" ] ], [ [ "Kelly Clarkson-3" ] ], [ [ "Carrie Underwood-3" ] ], [ "operation" ] ] ] }, { "qid": "dabf94934721c26b2422", "term": "European wildcat", "description": "Small wild cat", "question": "Do black-tailed jackrabbits fear the European wildcat?", "answer": false, "facts": [ "The European wildcat is native to continental Europe, Scotland, Turkey and the Caucasus.", "The black-tailed jackrabbit is native to Mexico and the western United States." ], "decomposition": [ "What is the range of the black-tailed jackrabbit?", "What is the range of the European wildcat?", "Does #1 and #2 overlap?" ], "evidence": [ [ [ [ "Black-tailed jackrabbit-1" ] ], [ [ "European wildcat-1" ] ], [ "operation" ] ], [ [ [ "Black-tailed jackrabbit-1" ] ], [ [ "European wildcat-1" ] ], [ "operation" ] ], [ [ [ "Black-tailed jackrabbit-1" ] ], [ [ "European wildcat-1" ] ], [ "operation" ] ] ] }, { "qid": "bcd77b200a2b90ca2ecc", "term": "Jackson Pollock", "description": "American painter", "question": "Is it understandable to compare a blood spatter pattern to a Jackson Pollock piece?", "answer": true, "facts": [ "Jackson Pollock is well known for a style of art formed through splashing liquids on canvas.", "Blood spatter patterns are caused by a splash of blood onto a surface or multiple surfaces." ], "decomposition": [ "What kinds of work pieces is Jackson Pollock well known for?", "How does he form #1", "How is a blood splatter formed?", "Is #2 comparable to #3?" ], "evidence": [ [ [ [ "Jackson Pollock-1" ] ], [ [ "Jackson Pollock-2" ] ], [ [ "Bloodstain pattern analysis-5" ] ], [ "operation" ] ], [ [ [ "Jackson Pollock-1" ] ], [ [ "Jackson Pollock-2" ] ], [ [ "Bloodstain pattern analysis-4" ] ], [ "operation" ] ], [ [ [ "Jackson Pollock-10" ] ], [ [ "Jackson Pollock-2" ] ], [ [ "Blood squirt-1" ] ], [ "operation" ] ] ] }, { "qid": "d8262444ec05bdbe1094", "term": "Johns Hopkins University", "description": "Private research university in Baltimore, Maryland", "question": "Has Johns Hopkins University always treated subjects ethically?", "answer": false, "facts": [ "Henrietta Lacks' DNA was used and replicated by Johns Hopkins University without her family's knowledge or approval.", "Henrietta Lacks' family medical history was released by Johns Hopkins University without their knowledge." ], "decomposition": [ "Did researchers at John Hopkins obtain approval from Henrietta Lacks before using her cancer cells?", "Did John Hopkins obtain approval from Henrietta Lacks or her family before releasing their medical records to the public?", "Are #1 and #2 positive?" ], "evidence": [ [ [ [ "Henrietta Lacks-2" ] ], [ [ "Henrietta Lacks-19" ] ], [ "operation" ] ], [ [ [ "Henrietta Lacks-2" ] ], [ [ "Henrietta Lacks-4" ] ], [ "operation" ] ], [ [ [ "Henrietta Lacks-19" ] ], [ [ "Henrietta Lacks-19" ] ], [ [ "Henrietta Lacks-19" ], "operation" ] ] ] }, { "qid": "b92b31f0e7124066eb48", "term": "Porch", "description": "a room or gallery at the front entrance of a building forming a low front", "question": "In Hey Arnold, did any characters stay on a porch all the time?", "answer": true, "facts": [ "Hey Arnold was an animated children's series.", "Hey Arnold featured 'Stoop Kid', a character who never left the front stoop of his home.", "A stoop is the city equivalent of a porch." ], "decomposition": [ "Where is 'Stoop Kid' in Hey Arnold known to never leave?", "Is #1 in the series equivalent to a porch in real life?" ], "evidence": [ [ [ [ "Hey Arnold!-7" ], "no_evidence" ], [ [ "Porch-1", "Stoop (architecture)-1" ], "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Stoop (architecture)-2" ] ] ], [ [ [ "Hey Arnold!-1" ], "no_evidence" ], [ [ "Stoop (architecture)-2" ] ] ] ] }, { "qid": "b5cdf7f8a6b440cdd451", "term": "Carl Friedrich Gauss", "description": "German mathematician and physicist", "question": "Could Carl Friedrich Gauss speak to someone 100 miles away?", "answer": false, "facts": [ "Carl Friedrich Gauss was born in 1777.", "Speaking to someone 100 miles away requires a telephone.", "The telephone was invented in 1876." ], "decomposition": [ "What device allows people to speak to each other even if they are 100 miles apart?", "When was #1 invented?", "When did Carl Friedrich Gauss die?", "Is #2 before #3?" ], "evidence": [ [ [ [ "Telephone-1" ] ], [ [ "Telephone-19" ] ], [ [ "Carl Friedrich Gauss-13" ] ], [ "operation" ] ], [ [ [ "Telephone-1" ] ], [ [ "Telephone-22" ] ], [ [ "Carl Friedrich Gauss-13" ] ], [ "operation" ] ], [ [ [ "Telephone-1" ] ], [ [ "Alexander Graham Bell-31" ] ], [ [ "Carl Friedrich Gauss-1" ] ], [ "operation" ] ] ] }, { "qid": "e43424acbaf3f64feefd", "term": "Brazilian Navy", "description": "Naval warfare branch of Brazil's military forces", "question": "Are some Brazilian Navy ships built in Britian?", "answer": true, "facts": [ "The Brazilian Navy stated in 2018 that they had purchased the helicopter carrier ship HMS Ocean.", "HMS stands for \"His/Her Majesty's Ship\", which is emblazoned on ships of the British Royal Navy. ", "Some of the ships in the Brazilian Navy are guided missile frigates built in Britian." ], "decomposition": [ "Which helicopter carrier ship did the Brazilian Navy announce that they had acquired in 2018?", "Was #1 built in Britain?" ], "evidence": [ [ [ [ "Brazilian Navy-62" ] ], [ [ "HMS Ocean (L12)-1" ] ] ], [ [ [ "HMS Ocean (L12)-2" ] ], [ [ "HMS Ocean-1" ] ] ], [ [ [ "Aircraft carrier-43" ] ], [ "operation" ] ] ] }, { "qid": "80aa769f55b14c1e4d8d", "term": "Sable", "description": "Species of marten", "question": "Are sables related to wolverines?", "answer": true, "facts": [ "The sable is a species of marten, which make up the genus Martes.", "Wolverines are from the genus Gulo.", "Both the Martes and the Gulo are from the family Mustelidae." ], "decomposition": [ "What species is a sable?", "What genus is #1 from?", "What genus are wolverines from?", "Are #2 and #3 from the same family?" ], "evidence": [ [ [ [ "Sable-1" ] ], [ [ "Marten-1" ] ], [ [ "Gulo-1" ] ], [ [ "Gulo-1", "Marten-1" ] ] ], [ [ [ "Sable-1" ] ], [ [ "Marten-1" ] ], [ [ "Gulo-1", "Wolverine-1" ] ], [ "operation" ] ], [ [ [ "Sable-1" ] ], [ [ "Marten-1" ] ], [ [ "Gulo-1" ] ], [ "operation" ] ] ] }, { "qid": "89b3fdc55964eeb17d7d", "term": "Watchmaker", "description": "artisan who makes and repairs watches", "question": "Is a watchmaker likely to be able to fix an Apple Watch?", "answer": false, "facts": [ "A watchmaker makes and repairs watches using tiny instruments to fix coils, springs, gears, and other metal parts..", "The Apple Watch is a computer driven watch that can connect to devices using wireless technology.", "Apple Watches do not have the usual springs and gears of traditional watches." ], "decomposition": [ "What components of watches do watchmakers repair?", "What are the main components of an Apple Watch?", "Is there an overlap between #1 and #2?" ], "evidence": [ [ [ [ "Watchmaker-1", "Watchmaker-2" ], "no_evidence" ], [ [ "Apple Watch-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Watchmaker-2" ] ], [ [ "Apple Watch-29" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Watchmaker-1" ] ], [ [ "Apple Watch-15" ] ], [ "operation" ] ] ] }, { "qid": "d3bfac8218a421be63fe", "term": "Noah", "description": "Biblical figure", "question": "Was Noah concerned with buoyancy?", "answer": true, "facts": [ "Buoyancy is the property of an object related to its ability to float in liquid", "Noah was tasked with building a boat to house many animals and survive a catastrophic flood", "Boats must be properly buoyant or they will sink" ], "decomposition": [ "What was Noah famous for building?", "Did #1 have to be buoyant to work?" ], "evidence": [ [ [ [ "Noah-2" ] ], [ [ "Buoyancy-1", "Buoyancy-2" ], "operation" ] ], [ [ [ "Noah's Ark-1" ] ], [ [ "Ark (river boat)-4" ] ] ], [ [ [ "Noah's Ark-1" ] ], [ [ "Buoyancy-1" ], "operation" ] ] ] }, { "qid": "0467bce4b8304e2f5000", "term": "Fair trade", "description": "form of trade", "question": "Can you buy a fair trade laptop?", "answer": false, "facts": [ "Fair trade is a term used with sustainable development focusing on agricultural production", "Laptops are consumer electronics" ], "decomposition": [ "What type of product is the fair trade label used with? ", "What type of product is a laptop?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Fair trade-4" ] ], [ [ "Laptop-1" ] ], [ "operation" ] ], [ [ [ "Fair trade-1" ] ], [ [ "Laptop-1" ] ], [ "operation" ] ], [ [ [ "Fair trade-1" ] ], [ [ "Laptop-1" ] ], [ "operation" ] ] ] }, { "qid": "740b4c542a9c9512c3e3", "term": "Giraffe", "description": "Tall African ungulate", "question": "Is it foolish to stand on giraffe's head to see over Eiffel Tower?", "answer": true, "facts": [ "The neck of a giraffe can be up to 7 feet in length.", "Including their necks, giraffes can be as tall as 20 feet.", "The Eiffel Tower is 1,063 feet tall." ], "decomposition": [ "How tall is a giraffe?", "How tall is the Eiffel Tower?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Giraffe-16" ] ], [ [ "Eiffel Tower-3" ] ], [ "operation" ] ], [ [ [ "Giraffe-16" ] ], [ [ "Eiffel Tower-3" ] ], [ "operation" ] ], [ [ [ "Giraffe-16" ] ], [ [ "Eiffel Tower-3" ] ], [ "operation" ] ] ] }, { "qid": "80562274b771c2c50ebd", "term": "Napoleonic Wars", "description": "Series of early 19th century European wars", "question": "Was a nuclear bomb used in the Napoleonic Wars?", "answer": false, "facts": [ "The Napoleonic Wars took place between 1803 and 1815.", "Nuclear bombs have only been used in warfare twice, both times in 1945." ], "decomposition": [ "When was the Napoleonic Wars?", "What year were nuclear bombs used in war?", "Is #2 in the range of years of #1?" ], "evidence": [ [ [ [ "Napoleonic Wars-1" ] ], [ [ "Nuclear weapon-2" ] ], [ "operation" ] ], [ [ [ "Napoleonic Wars-1" ] ], [ [ "Nuclear weapons debate-1" ] ], [ "operation" ] ], [ [ [ "Napoleonic Wars-1" ] ], [ [ "Nuclear weapon-2" ] ], [ "operation" ] ] ] }, { "qid": "5058960a0fd4c1af7f0c", "term": "Lionel Richie", "description": "American singer-songwriter, musician, record producer and actor", "question": "Is Lionel Richie related to Sheila E?", "answer": false, "facts": [ "Lionel Richie is an American singer and raised Nicole Richie.", "Nicole Richie was born to Sheila E's brother, Peter Michael Escovedo.", "Lionel Richie adopted Nicole Richie from Peter Michael Escovedo.", "Adoptive parents are not considered related to birth parents." ], "decomposition": [ "What is the relationship between Lionel Richie and Nicole Richie?", "Are #1 considered related to birth parents?" ], "evidence": [ [ [ [ "Lionel Richie-27" ] ], [ [ "Adoption-1" ], "operation" ] ], [ [ [ "Nicole Richie-3" ] ], [ "operation" ] ], [ [ [ "Lionel Richie-27" ] ], [ [ "Adoption-1" ] ] ] ] }, { "qid": "704003c5c9786ae43746", "term": "Nicole Kidman", "description": "Australian-American actress and film producer", "question": "Does Nicole Kidman know any Scientologists?", "answer": true, "facts": [ "Nicole Kidman was married to Tom Cruise.", "Tom Cruise is a Scientologist. " ], "decomposition": [ "Who has Nicole Kidman been married to?", "Have any of #1 practiced Scientology?" ], "evidence": [ [ [ [ "Nicole Kidman-4" ] ], [ [ "Tom Cruise-36" ], "operation" ] ], [ [ [ "Nicole Kidman-32" ] ], [ [ "Tom Cruise-4" ] ] ], [ [ [ "Nicole Kidman-4" ] ], [ [ "Tom Cruise-4" ] ] ] ] }, { "qid": "2533aef219d77a6860ef", "term": "Cream", "description": "Dairy product", "question": "Would Kylee Jenner ask for no cream in her coffee?", "answer": true, "facts": [ "Kylee Jenner is lactose intolerant.", "Lactose intolerance makes it uncomfortable for people to digest foods containing lactose.", "Cream is a dairy product and is rich in lactose." ], "decomposition": [ "What dietary condition does Kylee (Kylie) Jenner suffer from?", "What do people who have #1 have to avoid?", "Does cream have #2 in it?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Lactose intolerance-1" ] ], [ [ "Cream-1" ], "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Kylie Jenner-1" ] ], [ [ "Lactose intolerance-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "0fe658b221ff8ad08a14", "term": "Roman numerals", "description": "Numbers in the Roman numeral system", "question": "Does the FDA require sell by dates using Roman Numerals?", "answer": false, "facts": [ "There are no requirements for food to have sell by dates. ", "Sell by dates on most food items are written using arabic numerals." ], "decomposition": [ "Is there any regulation on the sell by dates of food products?" ], "evidence": [ [ [ "no_evidence" ] ], [ [ [ "Shelf life-1", "Shelf life-31" ], "operation" ] ], [ [ [ "Shelf life-31" ] ] ] ] }, { "qid": "3496d185679a8f55b799", "term": "JPEG", "description": "Lossy compression method for reducing the size of digital images", "question": "Could the leader of Heaven's Gate save images in JPEG format?", "answer": true, "facts": [ "Marshall Applewhite, Jr. was the leader of the Heaven's Gate cult", "Marshall Applewhite, Jr. died in 1997", "JPEG format was released in 1992" ], "decomposition": [ "Who led the Heaven's Gate cult?", "During what span of years was #1 alive?", "What year was JPEG released?", "Is #2 inclusive of #3?" ], "evidence": [ [ [ [ "Marshall Applewhite-1" ] ], [ [ "Marshall Applewhite-1" ] ], [ [ "JPEG-2" ] ], [ "operation" ] ], [ [ [ "Heaven's Gate (religious group)-20" ] ], [ [ "Marshall Applewhite-1" ] ], [ [ "JPEG-15" ] ], [ "operation" ] ], [ [ [ "Heaven's Gate (religious group)-1" ] ], [ [ "Heaven's Gate (religious group)-1" ] ], [ [ "JPEG-5" ] ], [ "operation" ] ] ] }, { "qid": "673be9f6d35f74ae8e91", "term": "Vitamin C", "description": "nutrient found in citrus fruits and other foods", "question": "Did pirates who had scurvy need more Vitamin C?", "answer": true, "facts": [ "Pirates were known for having poor teeth and deteriorated gums.", "Gum deterioration and tooth decay is a symptom of scurvy.", "Scurvy is caused by a lack of dietary vitamin C." ], "decomposition": [ "What causes scurvy?", "Is #1 the same as insufficient vitamin C intake?" ], "evidence": [ [ [ [ "Scurvy-1" ] ], [ "operation" ] ], [ [ [ "Scurvy-1" ] ], [ "operation" ] ], [ [ [ "Scurvy-1" ] ], [ "operation" ] ] ] }, { "qid": "0d528ce1fdd22c9d8aa0", "term": "Easy Rider", "description": "1969 film by Dennis Hopper", "question": "Did producer of Easy Rider ever star in a movie with Dean Cain's Princeton girlfriend?", "answer": true, "facts": [ "Easy Rider was produced by Peter Fonda.", "Dean Cain dated Brooke Shields while at Princeton.", "Brooke Shields and Peter Fonda star in the movie Wanda Nevada." ], "decomposition": [ "Who produced Easy Rider?", "Who did Dean Cain date while at Princeton?", "What movies did #1 star in?", "What movies did #2 star in?", "Is at least one element of #3 also found in #4?" ], "evidence": [ [ [ [ "Easy Rider-1" ] ], [ [ "Dean Cain-16" ] ], [ [ "Peter Fonda-48" ], "no_evidence" ], [ [ "Brooke Shields-1" ], "no_evidence" ], [ [ "Wanda Nevada-1" ], "no_evidence", "operation" ] ], [ [ [ "Easy Rider-1" ] ], [ [ "Dean Cain-3" ] ], [ [ "Wanda Nevada-1" ] ], [ [ "Wanda Nevada-1" ] ], [ "operation" ] ], [ [ [ "Easy Rider-40" ], "operation" ], [ [ "Dean Cain-16" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "f416f193c9cba1e27d4f", "term": "Laziness", "description": "disinclination to activity or exertion", "question": "Can depression be mistaken for laziness?", "answer": true, "facts": [ "Symptoms of depression include low energy, inability to get out of bed, and low motivation.", "Low energy and low motivation can cause someone to seem like they are slacking on responsibility." ], "decomposition": [ "What are some common symptoms of depression?", "Does any of #1 share characteristics with laziness?" ], "evidence": [ [ [ [ "Depression (mood)-1" ] ], [ "operation" ] ], [ [ [ "Symptom-11" ] ], [ [ "Laziness-1" ] ] ], [ [ [ "Depression (mood)-4" ] ], [ [ "Laziness-1" ], "operation" ] ] ] }, { "qid": "e4a065a8ea3691b12e6f", "term": "Bulk carrier", "description": "merchant ship specially designed to transport unpackaged bulk cargo", "question": "Is the average bulk carrier ideal for transporting bromine at room temperature?", "answer": false, "facts": [ "Bulk carriers are defined as a ship that carries nonliquid cargoes such as grain or ore in bulk.", "Bromine is a liquid at room temperature.", "The average bulk carrier is used for unpackaged bulk cargo, such as grains, coal, ore, steel coils and cement." ], "decomposition": [ "What are the kinds of cargo that a typical bulk carrier can transport?", "What kind of substance is bromine at room temperature?", "Can any of #1 be classified as #2?" ], "evidence": [ [ [ [ "Bulk carrier-1" ] ], [ [ "Bromine-1" ] ], [ "operation" ] ], [ [ [ "Bulk carrier-4" ], "operation" ], [ [ "Bromine-26" ] ], [ "no_evidence" ] ], [ [ [ "Bulk carrier-1" ] ], [ [ "Bromine-1" ] ], [ "operation" ] ] ] }, { "qid": "acb7edac010554509519", "term": "Chlorophyll", "description": "group of chemical compounds", "question": "Would human race go extinct without chlorophyll?", "answer": true, "facts": [ "Chlorophyll is a pigment in plants responsible for photosynthesis.", "Photosynthesis is the process by which plants release oxygen into the atmosphere.", "Humans need oxygen to live." ], "decomposition": [ "What is Chlorophyll responsible for in plants?", "What does #1 release into the air?", "Do humans need #2 in order to survive?" ], "evidence": [ [ [ [ "Chlorophyll-1" ] ], [ [ "Photosynthesis-1" ] ], [ "operation" ] ], [ [ [ "Chlorophyll-6" ] ], [ [ "Photosynthesis-1" ] ], [ [ "Breathing-2" ] ] ], [ [ [ "Chlorophyll-1" ] ], [ [ "Photosynthesis-1" ] ], [ [ "Breathing-2" ] ] ] ] }, { "qid": "c06605d435462122d1de", "term": "Walt Disney", "description": "American entrepreneur, animator, voice actor and film producer", "question": "Walt Disney dominated his amusement park peers at Academy Awards?", "answer": true, "facts": [ "Walt Disney won a total of 26 Academy Awards.", "The founder of Six Flags, Angus G Wynne, had 0 academy awards.", "The founder of Knott's Berry Farm, Walter Knott, had 0 academy awards." ], "decomposition": [ "At the Academy Awards, how many awards did Walt Disney win?", "At the Academy Awards, how many awards did Angus G Wynne win?", "At the Academy Awards, how many awards did Walter Knott win?", "Is #1 more than #2 and #3?" ], "evidence": [ [ [ [ "Walt Disney-1" ] ], [ [ "Angus G. Wynne-1" ], "no_evidence" ], [ [ "Walter Knott-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Walt Disney-1" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Walt Disney-1" ] ], [ [ "Angus G. Wynne-1" ] ], [ [ "Walter Knott-1" ] ], [ "operation" ] ] ] }, { "qid": "d4d98ff0a195302735a5", "term": "Toyota Hilux", "description": "Series of light commercial vehicles produced by the Japanese car-manufacturer Toyota.", "question": "Can a human heart last from NYC to Raleigh NC by Toyota Hiux?", "answer": true, "facts": [ "Human hearts can last up to six hours outside the body.", "The distance from NYC to Raleigh, NC is 505 miles.", "The top speed of a Toyota Hilux is 105 MPH." ], "decomposition": [ "How many hours can a human heart last outside of the human body?", "What is the distance between NYC to Raleigh, NC in miles?", "What is the top speed of a Toyota Hilux in MPH?", "Is #1 times #3 more than #2?" ], "evidence": [ [ [ [ "Organ transplantation-3" ] ], [ [ "New York City-1", "Raleigh, North Carolina-1" ], "operation" ], [ [ "Toyota Hilux-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Organ donation-8" ], "no_evidence" ], [ "no_evidence" ], [ [ "Toyota Hilux-9" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Toyota Hilux-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "43422a156aa28bf24d7c", "term": "History of art", "description": "history of human creation of works for aesthetic, communicative, or expressive purposes", "question": "Can the history of art be learned by an amoeba?", "answer": false, "facts": [ "The history of art is the academic study of the development of human artistic expression over time", "Academic study requires human-level intelligence", "An amoeba is a single-celled organism " ], "decomposition": [ "What intellectual ability is necessary to study the history of art?", "Does an amoeba possess #1?" ], "evidence": [ [ [ [ "Human brain-66", "Human brain-67" ] ], [ [ "Amoeba-1", "Cell (biology)-1", "Cell (biology)-16" ], "operation" ] ], [ [ [ "Learning-1" ], "no_evidence" ], [ [ "Amoeba-1" ], "no_evidence", "operation" ] ], [ [ [ "Art history-6" ] ], [ [ "Amoeba-25" ] ] ] ] }, { "qid": "67b18bca7559b657c66b", "term": "Snow White", "description": "fairy tale", "question": "Do Snow White dwarves best The Hobbit dwarves in battle?", "answer": false, "facts": [ "Snow White had seven dwarves.", "There are 13 dwarves in The Hobbit.", "Several of The Hobbit dwarves, including Thorin Oakenshield, were acclaimed warriors." ], "decomposition": [ "How many dwarves are there in the Snow White story?", "How many dwarves are in The Hobbit?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Snow White and the Seven Dwarfs (1937 film)-7" ] ], [ [ "The Hobbit-7" ] ], [ "operation" ] ], [ [ [ "Snow White-3" ] ], [ [ "Hobbit-13" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Snow White (Fables)-1" ] ], [ [ "The Hobbit-26" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "8a3be2ae0a7b67f7bc2f", "term": "Sonnet", "description": "form of poetry with fourteen lines; by the thirteenth century it signified a poem of fourteen lines that follows a strict rhyme scheme and specific structure", "question": "Would Rime of the Ancient Mariner make a good sonnet?", "answer": false, "facts": [ "A sonnet is a form of poetry that sticks to a strict 14 line rule.", "The Rime of the Ancient Mariner is a story by Samuel Taylor Coleridge and contains over thirty lines." ], "decomposition": [ "How many lines are in a Sonnet?", "How many lines is the Rime of the Ancient Mariner?", "Is #2 equal to #1?" ], "evidence": [ [ [ [ "Sonnet-2" ] ], [ [ "The Rime of the Ancient Mariner-1" ] ], [ [ "Sonnet-2", "The Rime of the Ancient Mariner-9" ] ] ], [ [ [ "Sonnet-2" ] ], [ [ "The Rime of the Ancient Mariner-1", "The Rime of the Ancient Mariner-13" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sonnet-2" ] ], [ [ "The Rime of the Ancient Mariner-13" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "f4ac300578dfab653d3f", "term": "Pompey", "description": "1st/2nd-century BC Roman general", "question": "Has type of political association Pompey had with Caesar influenced reality TV?", "answer": true, "facts": [ "Pompey, Julius Caesar, and Marcus Licinius Crassus formed a political association called a triumvirate.", "A triumvirate spits rule between three powerful people that get to make decisions.", "Reality show The Challenge: Total Madness appoints three weekly winners to make decisions for the group, known as the Tribunal.", "Reality show American Idol has had three judges making decisions about which contestants advance." ], "decomposition": [ "Which political association did Pompey form with Julius Caesar and Marcus Licinius Crassus?", "How many people does #1 typically involve?", "How many judges are on reality show American Idol?", "Is #2 equal to #3?" ], "evidence": [ [ [ [ "First Triumvirate-1" ] ], [ [ "Triumvirate-1" ] ], [ [ "American Idol-10", "American Idol-9" ] ], [ "operation" ] ], [ [ [ "Pompey-2" ] ], [ [ "First Triumvirate-1" ] ], [ [ "American Idol-3" ] ], [ "operation" ] ], [ [ [ "Triumvirate-5" ] ], [ [ "Triumvirate-3" ] ], [ [ "American Idol-3" ] ], [ "operation" ] ] ] }, { "qid": "3eb2a8cfa2616c47672e", "term": "Nissan", "description": "Japanese automobile manufacturer", "question": "Is CEO of Nissan an internationally wanted fugitive?", "answer": true, "facts": [ "Carlos Ghosn was CEO of Nissan. ", "With help from an American private-security contractor, Carlos Ghosn fled from Japan to Lebanon on 30 December, breaking his bail conditions", "On 2 January 2020, Interpol issued a red notice to Lebanon seeking Carlos Ghosn's arrest." ], "decomposition": [ "Which of Nissan's former CEOs have been the subject of corporate unrest?", "Is #1 presently a fugitive on the run?" ], "evidence": [ [ [ [ "Carlos Ghosn-13" ] ], [ [ "Carlos Ghosn-4" ], "operation" ] ], [ [ [ "Carlos Ghosn-29" ] ], [ [ "Carlos Ghosn-1" ], "operation" ] ], [ [ [ "Carlos Ghosn-4" ] ], [ [ "Carlos Ghosn-4" ] ] ] ] }, { "qid": "8669c9d0666951d25ef0", "term": "Lighthouse of Alexandria", "description": "Ancient lighthouse in Egypt", "question": "Is Statue of Unity hypothetically more level with Statue of Liberty than Lighthouse of Alexandria?", "answer": false, "facts": [ "The Statue of Liberty rises 305 feet into the air.", "The Statue of Unity is 597 feet high.", "The Lighthouse of Alexandria was between 338 and 387 feet tall." ], "decomposition": [ "What is the height of the Statue of Liberty?", "What is the height of the Statue of Unity?", "What is the height of the Lighthouse of Alexandria?", "Is #2 minus #1 less than #3 minus #1?" ], "evidence": [ [ [ [ "Statue of Liberty-31" ] ], [ [ "Statue of Unity-9" ] ], [ [ "Lighthouse of Alexandria-1" ] ], [ "operation" ] ], [ [ [ "Statue of Liberty-18" ] ], [ [ "Statue of Unity-1" ] ], [ [ "Lighthouse of Alexandria-1" ] ], [ "operation" ] ], [ [ [ "Statue of Liberty-18" ] ], [ [ "Statue of Unity-1" ] ], [ [ "Lighthouse of Alexandria-1" ] ], [ "operation" ] ] ] }, { "qid": "7c32706f05b201f6ffcd", "term": "Surfing", "description": "sport that consists of riding a wave", "question": "Is surfing popular in Des Moines, Iowa?", "answer": false, "facts": [ "Des Moines is a city in the landlocked state Iowa.", "Surfing involves riding the waves at a beach or ocean.", "There are no beaches in Iowa." ], "decomposition": [ "What conditions are necessary to be able to surf?", "Does Iowa have (or is close to) the conditions listed in #1?" ], "evidence": [ [ [ [ "Surfing-1" ] ], [ [ "Des Moines, Iowa-106", "Des Moines, Iowa-108" ] ] ], [ [ [ "Surfing-1" ] ], [ [ "Iowa-16" ] ] ], [ [ [ "Surfing-1" ] ], [ [ "Iowa-1" ], "operation" ] ] ] }, { "qid": "2acddc0a893af847a21b", "term": "Palaeography", "description": "Study of ancient handwriting", "question": "Paleography hypothetically helps to understand Cthulhu?", "answer": true, "facts": [ "Palaeography involves the study of ancient writings.", "Paleography has helped decode Anatolian hieroglyphics from all the way back as the first millenia BC.", "Author H.P. Lovecraft's Cthulhu is an ancient mystical being from eons ago." ], "decomposition": [ "Palaeography is the study of what?", "Cthulhu is from which age?", "Will a language from #2 be a subject of focus of #1?" ], "evidence": [ [ [ [ "Palaeography-1" ] ], [ [ "Cthulhu-9" ] ], [ "operation" ] ], [ [ [ "Palaeography-1" ] ], [ [ "Cthulhu-9" ] ], [ "operation" ] ], [ [ [ "Palaeography-1" ] ], [ [ "Cthulhu-1", "Cthulhu-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c69a3eeea9743e782831", "term": "Supreme Court of the United States", "description": "Highest court in the United States", "question": "Do members of the Supreme Court of the United States have longer terms than most senators?", "answer": true, "facts": [ "Senators, on average, serve for 10 years.", "Supreme Court Justices serve for their entire life.", "The average term for a Supreme court justice is 16 years." ], "decomposition": [ "How many years is in a term for a U.S. Senator?", "What is the term for a Supreme Court justice?", "Is #1 a shorter term than #2?" ], "evidence": [ [ [ [ "United States Senate-2" ] ], [ [ "Supreme Court of the United States-31" ] ], [ "operation" ] ], [ [ [ "United States Senate-16" ] ], [ [ "Supreme Court of the United States-2" ] ], [ "operation" ] ], [ [ [ "Member of Congress-3" ] ], [ [ "Supreme Court of the United States-2" ] ], [ "operation" ] ] ] }, { "qid": "c32b7909c41d4af5933d", "term": "Sea shanty", "description": "work song sung to accompany labor on board large merchant sailing vessels", "question": "Did travelers sing sea shanties on the Oregon Trail?", "answer": false, "facts": [ "Sea shanties are sung on seaborne vessels", "The Oregon Trail was a land-based emigration trail" ], "decomposition": [ "In what mode of travel are sea shanties typically sang?", "What mode of travel was mostly used on the Oregon Trail?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Sea shanty-1" ] ], [ [ "Oregon Trail-1" ] ], [ "operation" ] ], [ [ [ "Sea Songs-3" ] ], [ [ "Oregon Trail-116" ] ], [ "operation" ] ], [ [ [ "Sea shanty-1" ] ], [ [ "Oregon Trail-1" ] ], [ "operation" ] ] ] }, { "qid": "e1aee6fefd776d661c33", "term": "1999", "description": "Year", "question": "Were some people afraid of New Years Day coming in 1999?", "answer": true, "facts": [ "It was believed that computers might not know how to change from 1999 to 2000 on New Years Day.", "People were concerned that human services and utilities that were computerized might crash due to the Y2K bug.", "People believed that the year 2000 would cause computers to crash due to the 'Y2K' bug." ], "decomposition": [ "Which New Year's Day followed 1999?", "What concerns were there about computers during the transition from 1999 to #1?", "Was #2 a cause of fear?" ], "evidence": [ [ [ [ "January 1-1" ], "no_evidence" ], [ [ "Year 2000 problem-1" ] ], [ "operation" ] ], [ [ [ "Year 2000 problem-3" ] ], [ [ "Year 2000 problem-1" ] ], [ "operation" ] ], [ [ [ "New Year's Day-1", "Year 2000 problem-15" ] ], [ [ "Year 2000 problem-1" ] ], [ [ "2000-3", "Year 2000 problem-15" ] ] ] ] }, { "qid": "018306d2af9359be2d8c", "term": "Chiropractic", "description": "form of alternative medicine", "question": "Are some chiropractic manipulations dangerous?", "answer": true, "facts": [ "Manipulations of the neck can lead to complications such as stroke or paralysis.", "Manipulation of the lower back can lead to herniated disks." ], "decomposition": [ "What body parts do chiropractors manipulate?", "Are any of #1 prone to damage if mishandled?" ], "evidence": [ [ [ [ "Chiropractic-18" ] ], [ [ "Chiropractic controversy and criticism-34" ] ] ], [ [ [ "Chiropractic-1" ] ], [ "operation" ] ], [ [ [ "Chiropractic-1" ] ], [ [ "Chiropractic-2" ], "operation" ] ] ] }, { "qid": "01430717f590ad3f4e74", "term": "Spinal cord", "description": "long, thin, tubular structure made up of nervous tissue", "question": "Can you buy spinal cord at Home Depot?", "answer": false, "facts": [ "Home Depot sells home improvement and building supplies", "The spinal cord is an anatomical feature located in the vertebrae" ], "decomposition": [ "Where are spinal cords found?", "What does Home Depot sell?", "Is #1 listed in #2?" ], "evidence": [ [ [ [ "Spinal cord-1" ] ], [ [ "The Home Depot-1" ] ], [ "operation" ] ], [ [ [ "Spinal cord-1" ] ], [ [ "The Home Depot-1" ] ], [ "operation" ] ], [ [ [ "Spinal cord-1" ] ], [ [ "The Home Depot-1" ] ], [ "operation" ] ] ] }, { "qid": "86791c3eb5f380b56439", "term": "Bulk carrier", "description": "merchant ship specially designed to transport unpackaged bulk cargo", "question": "Would eliminating competition in the Japanese bulk carrier market be profitable for a steel company?", "answer": true, "facts": [ "62% of bulk carriers are built in Japan", "Bulk carrier hulls are made of steel" ], "decomposition": [ "Where are most bulk carriers built?", "What materials would #1 use in making bulk carriers?", "Is steel a major component of #2?" ], "evidence": [ [ [ [ "Bulk carrier-2" ] ], [ [ "Bulk carrier-48" ] ], [ [ "Bulk carrier-48" ] ] ], [ [ [ "Bulk carrier-20", "Bulk carrier-22" ], "no_evidence" ], [ [ "Shipbuilding-45" ] ], [ "operation" ] ], [ [ [ "Malaysian Bulk Carriers-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "038d2f23ebc149069a74", "term": "Isaac Newton", "description": "Influential British physicist and mathematician", "question": "Is Issac Newton often associated with a red fruit?", "answer": true, "facts": [ "Issac Newton claimed to have contemplated gravity for the first time after seeing an apple fall.", "In most illustrations of Issac Newton discovering gravity, the apple shown falling is red." ], "decomposition": [ "Which of Isaac Newton's famous discoveries featured a fruit?", "Is #1 colored red in popular depictions?" ], "evidence": [ [ [ [ "Isaac Newton-84" ] ], [ "no_evidence" ] ], [ [ [ "Isaac Newton-84" ] ], [ [ "Apple-8" ], "operation" ] ], [ [ [ "Isaac Newton-88" ] ], [ [ "Gala (apple)-1" ] ] ] ] }, { "qid": "0edaff8446b0f53033c3", "term": "Alcatraz Island", "description": "United States historic place", "question": "Would it be possible to fit a football field in Alcatraz Island?", "answer": true, "facts": [ "Alcatraz is 511 meters by 180 meters", "A football field is 91 meters by 48 meters" ], "decomposition": [ "What is the land area of a football field?", "What is the land area of the Alcatraz Island?", "s #1 less than or equal to #2?" ], "evidence": [ [ [ [ "Comparison of American football and rugby union-24" ] ], [ [ "Alcatraz Island-4" ] ], [ [ "Alcatraz Island-4", "Comparison of American football and rugby union-24" ] ] ], [ [ [ "System of measurement-25" ] ], [ [ "Alcatraz Island-4" ] ], [ "operation" ] ], [ [ [ "American football field-2" ] ], [ [ "Alcatraz Island-4" ] ], [ "operation" ] ] ] }, { "qid": "fabdc0199bd6444eb78c", "term": "Mongoose", "description": "family of mammals", "question": "Did mongoose come from later period than rhinos?", "answer": true, "facts": [ "The mongoose originated in the Neogene geological period.", "Rhinos are from the Paleogene geological period.", "The Paleogene period spans 43 million years from the end of the Cretaceous Period 66 million years ago to the beginning of the Neogene Period." ], "decomposition": [ "During which period did the mongoose originate?", "Which period did Rhinos originate from?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Mongoose-2" ] ], [ [ "Rhinoceros-5" ] ], [ [ "Mongoose-2", "Rhinoceros-5" ], "operation" ] ], [ [ [ "Mongoose-1", "Mongoose-2" ] ], [ [ "Rhinoceros-5" ] ], [ "operation" ] ], [ [ [ "Mongoose-2" ] ], [ [ "Rhinoceros-5" ] ], [ "operation" ] ] ] }, { "qid": "bd7193627e010af0ecba", "term": "Penny", "description": "unit of currency in various countries", "question": "Would 1943-S penny be good for making silverware?", "answer": true, "facts": [ "Modern pennies are made of zinc and copper.", "The 1943-S penny was made of 99% steel and 1% zinc.", "Modern silverware is made from stainless steel." ], "decomposition": [ "What are 1943-S pennies made out of?", "What is typically modern silverware made out of?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Penny (United States coin)-5" ] ], [ [ "Cutlery-5", "Cutlery-6" ] ], [ "operation" ] ], [ [ [ "1943 steel cent-1" ] ], [ [ "Spoon-17", "Tableware-3" ] ], [ "operation" ] ], [ [ [ "1943 steel cent-1" ] ], [ [ "Cutlery-1" ] ], [ "operation" ] ] ] }, { "qid": "b4414a29cba573a24c6c", "term": "Flag of the United States", "description": "National flag", "question": "Would someone with leukophobia enjoy looking at the Flag of the United States?", "answer": false, "facts": [ "Leukophobia is a fear of the color white.", "The United States flag is colored red, white, and blue.", "People do not typically enjoy facing their fears." ], "decomposition": [ "What does someone suffering from leukophobia fear?", "What are the colors of the United States flag?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Chromophobia-8" ] ], [ [ "Flag of the United States-1" ] ], [ "operation" ] ], [ [ [ "Chromophobia-3" ] ], [ [ "Flag of the United States-40" ] ], [ "operation" ] ], [ [ [ "Chromophobia-3" ] ], [ [ "Flag of the United States-1" ] ], [ "operation" ] ] ] }, { "qid": "d808a0c72dbe605309ab", "term": "Dodgeball", "description": "sport", "question": "Does Felix Potvin have a position on a dodgeball team?", "answer": false, "facts": [ "Felix Potvin was an NHL goaltender", "There is no goalie position on a dodgeball team" ], "decomposition": [ "Which sport and position did Félix Potvin play as a professional sportsman?", "Is #1 the same as or required in dodgeball?" ], "evidence": [ [ [ [ "Félix Potvin-4" ] ], [ [ "Dodgeball-10" ], "operation" ] ], [ [ [ "Félix Potvin-1" ] ], [ [ "Dodgeball-1" ], "operation" ] ], [ [ [ "Félix Potvin-1" ] ], [ "operation" ] ] ] }, { "qid": "fe7f4a0c9a98439b41a8", "term": "Steven Spielberg", "description": "American film director and screenwriter", "question": "Could Steven Spielberg send emails as a child?", "answer": false, "facts": [ "Steven Spielberg was born in 1946.", "Email did not become available to the general public until 1995." ], "decomposition": [ "When was Stephen Spielberg born?", "When was email invented?", "What is #1 plus 10?", "Is #2 before #3?" ], "evidence": [ [ [ [ "Spielberg (disambiguation)-1" ] ], [ [ "History of email-12" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Steven Spielberg-1" ] ], [ [ "Email-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Steven Spielberg-5" ] ], [ [ "Email-14" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "09a493a0d04b4bed378b", "term": "Red hair", "description": "Hair color", "question": "Does a Disney princess on Broadway have red hair?", "answer": true, "facts": [ "Ariel, the princess from Disney's the Little Mermaid, has red hair.", "The Little Mermaid is one of several Disney animated classics that was adapted for the stage and performed on Broadway." ], "decomposition": [ "What is the name of the princess with red hair?", "What is the name of the animated classic in which #1 is the main star of?", "Has #2 been adapted for Broadway?" ], "evidence": [ [ [ [ "Merida (Brave)-9" ] ], [ [ "Merida (Brave)-10" ] ], [ [ "Disney Princess-36" ] ] ], [ [ [ "Ariel (The Little Mermaid)-7", "Merida (Brave)-9" ] ], [ [ "Ariel (The Little Mermaid)-1", "Merida (Brave)-1" ] ], [ [ "Ariel (The Little Mermaid)-33" ] ] ], [ [ [ "Ariel (The Little Mermaid)-7" ] ], [ [ "Ariel (The Little Mermaid)-49" ] ], [ [ "The Little Mermaid (musical)-1" ], "operation" ] ] ] }, { "qid": "ab1cd501d3590b46c009", "term": "Bob Marley", "description": "Jamaican singer-songwriter", "question": "Can you find Bob Marley's face in most smoke shops?", "answer": true, "facts": [ "Bob Marley's face is on the packaging of a popular brand of rolling papers.", "Bob Marley is a popular graphic to print on t-shirts for sale to smokers." ], "decomposition": [ "On what items is Bob Marley's face commonly found?", "Are some of #1 sold in most smoke shops?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Tobacconist-1" ] ] ], [ [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bob Marley-1" ] ], [ [ "Head shop-1", "Tobacconist-1" ], "operation" ] ] ] }, { "qid": "cbc489c97e6797962787", "term": "Martyr", "description": "person who suffers persecution and death for advocating, refusing to renounce, and/or refusing to advocate a belief or cause, usually a religious one", "question": "Can a martyr saint have been excommunicated?", "answer": true, "facts": [ "Joan of Arc was excommunicated by the Catholic Church in 1431.", "Joan of Arc was declared a martyr in 1456 after an investigation ordered by King Charles VII.", "Joan of Arc was canonized a Saint by the Catholic Church on May 16, 1920." ], "decomposition": [ "Is Joan of Arc considered a matyr?", "Was she initially excommunicated by the Catholic Church?", "Is #1 or #2 negative?" ], "evidence": [ [ [ [ "Canonization of Joan of Arc-1" ] ], [ [ "Canonization of Joan of Arc-2" ] ], [ "operation" ] ], [ [ [ "Joan of Arc-3" ] ], [ [ "Heresy-3", "Joan of Arc-37" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Joan of Arc-3" ] ], [ [ "Canonization of Joan of Arc-2" ] ], [ "operation" ] ] ] }, { "qid": "c1b017ede9f23c442166", "term": "Homelessness", "description": "circumstance when people desire a permanent dwelling but do not have one", "question": "Could Toyota stadium house people suffering homelessness in Michigan?", "answer": true, "facts": [ "As of 2019 there were an estimated 8,575 people experiencing homelessness in Michigan.", "Toyota stadium has a capacity of 45,000 people." ], "decomposition": [ "How many people are experiencing homelessness in Michigan?", "What is the capacity of the Toyota stadium?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Homelessness in the United States by state-127" ] ], [ [ "Toyota Stadium (Texas)-1", "Toyota Stadium-1" ] ], [ "operation" ] ], [ [ [ "Michigan-1" ], "no_evidence" ], [ [ "Toyota Stadium-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Homelessness-58" ], "no_evidence" ], [ [ "Toyota Stadium (Texas)-1" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "bccf23eb36cf7607b6b0", "term": "Chief Justice of the United States", "description": "Presiding judge of the U.S. Supreme Court", "question": "Would it be impossible to seat every Chief Justice of the United States on a Boeing 737?", "answer": false, "facts": [ "There have been a total of 17 Chief Justices since the Supreme Court was established.", "The Boeing 737 has evolved through four generations, offering several variants for 85 to 215 passengers." ], "decomposition": [ "How many Chief Justices has the Supreme Court had?", "What is the least amount of people that a Boeing 737 could hold?", "Is #2 smaller than #1?" ], "evidence": [ [ [ [ "Chief Justice of the United States-5" ] ], [ [ "Boeing 737-4" ] ], [ "operation" ] ], [ [ [ "Chief Justice of the United States-5" ] ], [ [ "Boeing 737-5" ] ], [ "operation" ] ], [ [ [ "Supreme Court of Alabama-10" ], "no_evidence" ], [ [ "Boeing 737-61" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "03065f31c1a550a97107", "term": "Cookie Monster", "description": "character from the television series Sesame Street", "question": "Is Cookie Monster's diet Paleo friendly?", "answer": false, "facts": [ "Cookie Monster is a Sesame Street character that eats copious amounts of chocolate chip cookies.", "The Paleo diet includes foods made from ingredients found during the Paleolithic area.", "Chocolate chip cookies contain soy lecithin and artificial grains.", "Lecithin is used in complex modern industrial processes." ], "decomposition": [ "What is the major component of the Cookie Monster's diet?", "What does Paleo diet consist of?", "Is #1 one of #2" ], "evidence": [ [ [ [ "Cookie Monster-1" ] ], [ [ "Paleolithic diet-1", "Paleolithic diet-3" ] ], [ "operation" ] ], [ [ [ "Cookie Monster-1" ] ], [ [ "Paleolithic diet-3" ] ], [ "operation" ] ], [ [ [ "Cookie Monster-1" ] ], [ [ "Paleolithic diet-9" ] ], [ [ "Paleolithic diet-9" ] ] ] ] }, { "qid": "69030830a83aa7209cf7", "term": "Bronze Age", "description": "Prehistoric period and age studied in archaeology, part of the Holocene Epoch", "question": "Were all the materials to make a cannon known during the bronze age?", "answer": false, "facts": [ "The Bronze Age happened from about 3300 BC to 300 BC.", "Cannons require a fuse, gunpowder, and iron or other material to house the chemicals.", "Gunpowder was discovered around the 9th century AD." ], "decomposition": [ "What years did the Bronze age encompass?", "What materials are required for a cannon to fire?", "When were all the parts of #2 discovered?", "Are all the dates in #3 before or during #1?" ], "evidence": [ [ [ [ "Bronze Age Europe-21" ] ], [ [ "Cannon-68" ] ], [ [ "History of cannon-1" ] ], [ "operation" ] ], [ [ [ "Late Bronze Age collapse-4" ], "no_evidence" ], [ [ "Cannon-1" ] ], [ [ "Cannon-5" ] ], [ "operation" ] ], [ [ [ "Prehistory of Anatolia-28" ], "no_evidence" ], [ [ "Gunpowder-1" ], "no_evidence" ], [ [ "Military history of the Five Dynasties and Ten Kingdoms-22" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "4936ddf51863958ade85", "term": "Desperate Housewives", "description": "American comedy-drama TV series", "question": "Did Teri Hatcher last twice as many episodes on Desperate Housewives as her Superman show?", "answer": true, "facts": [ "Actress Teri Hatcher completed a total of 180 episodes of Desperate Housewives.", "Teri Hatcher starred in Lois & Clark: The New Adventures of Superman based on the Superman comic.", "Teri Hatcher was in 87 episodes of Lois & Clark: The New Adventures of Superman." ], "decomposition": [ "How many episodes of Desperate Housewives did Teri Hatcher appear in?", "What show did Teri Hatcher appear in that is based on a Superman comic?", "How many episodes of #2 did Teri Hatcher appear in?", "What is #3 multiplied by 2?", "Is #1 greater than or equal to #4?" ], "evidence": [ [ [ [ "Desperate Housewives-1", "Susan Mayer-1" ] ], [ [ "Lois & Clark: The New Adventures of Superman-1" ] ], [ [ "The Booth at the End-21" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Teri Hatcher-1" ] ], [ [ "Lois & Clark: The New Adventures of Superman-1" ] ], [ [ "Teri Hatcher-1" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Desperate Housewives-1" ], "no_evidence" ], [ [ "Teri Hatcher-9" ] ], [ [ "Lois & Clark: The New Adventures of Superman-1" ], "no_evidence" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "d19a209a6cddeca38a94", "term": "National Hockey League", "description": "North American professional ice hockey league", "question": "Do American teams in National Hockey League outnumber Canadian teams?", "answer": true, "facts": [ "The National Hockey League is the premiere North American hockey league.", "The National Hockey League has 7 Canadian teams.", "The National Hockey League has 24 teams from the United States." ], "decomposition": [ "How many Canadian teams are in the The National Hockey League?", "How many American teams are in the The National Hockey League?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "National Hockey League-1" ] ], [ [ "National Hockey League-1" ] ], [ "operation" ] ], [ [ [ "Ice hockey in the United States-5" ] ], [ [ "Ice hockey in the United States-5" ] ], [ "operation" ] ], [ [ [ "National Hockey League-1" ] ], [ [ "National Hockey League-1" ] ], [ "operation" ] ] ] }, { "qid": "9eec85fab510de606494", "term": "Elon Musk", "description": "American industrialist and investor", "question": "Has Elon Musk's hairline changed?", "answer": true, "facts": [ "When Elon Musk was much younger, he was clearly balding.", "Elon Musk does not show any signs of balding as of 2020." ], "decomposition": [ "What feature of Elon Musk's hair was notable when he was younger?", "Is #1 no longer observable in present times?" ], "evidence": [ [ [ [ "Elon Musk-5" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Elon Musk-1" ], "no_evidence" ], [ [ "Hair loss-4" ], "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "cda85328c8825e86d3f0", "term": "Astronomer", "description": "Scientist who studies celestial bodies", "question": "Does Nintendo's link ever see an astronomer?", "answer": true, "facts": [ "Link is the main character of the Nintendo franchise 'Zelda\".", "In \"Legend of Zelda: Majora's Mask\" Link meets an astronomer in an observatory." ], "decomposition": [ "Which game is Link from?", "In #1, did link meet an astronomer?" ], "evidence": [ [ [ [ "The Legend of Zelda-24" ] ], [ [ "Universe of The Legend of Zelda-60" ], "no_evidence", "operation" ] ], [ [ [ "Link (The Legend of Zelda)-1" ] ], [ "no_evidence" ] ], [ [ [ "Link (The Legend of Zelda)-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f6e1ccbadcebd21aadf9", "term": "Swastika", "description": "a geometrical figure and an ancient religious icon in the cultures of Eurasia and 20th-century symbol of Nazism", "question": "Does the swastika have positive uses?", "answer": true, "facts": [ "The swastika is used in the Hindu religion to represent the sun.", "People practicing Hindu believe the swastika represents prosperity and good luck." ], "decomposition": [ "What does the swastika represent in Hinduism?", "What beliefs do Hindu worshippers associate with #1?", "Are #2 positive?" ], "evidence": [ [ [ [ "Swastika-34" ] ], [ [ "Swastika-34" ] ], [ [ "Swastika-34" ] ] ], [ [ [ "Swastika-1", "Swastika-3" ] ], [ [ "Swastika-3" ] ], [ "operation" ] ], [ [ [ "Swastika-3" ] ], [ [ "Swastika-3" ] ], [ [ "Luck-24", "Prosperity-1" ] ] ] ] }, { "qid": "95c7c7b36b2dd981b820", "term": "Doctorate", "description": "academic or professional degree", "question": "Is a doctorate required to teach at a SUNY School?", "answer": false, "facts": [ "At SUNY schools, there are some full time professors with doctorates.", "At SUNY schools, there are adjunct professors who teach with a Masters degree. " ], "decomposition": [ "Is it the case, that there are no people teaching at SUNY that do not have a doctorate degree?" ], "evidence": [ [ [ "no_evidence" ] ], [ [ [ "New York (state)-93", "Professor-18", "Professor-5" ] ] ], [ [ [ "SUNY Downstate Medical Center-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "a202af46315d9970d768", "term": "University of Pittsburgh", "description": "American state-related research university located in Pittsburgh, Pennsylvania", "question": "Did University of Pittsburgh founder have great deal in common with Judith Sheindlin?", "answer": true, "facts": [ "Hugh Henry Brackenridge founded University of Pittsburgh in 1787.", "Judith Sheindlin is a judge, lawyer, and author.", "Hugh Henry Brackenridge was a writer, lawyer, judge, and Justice of the Supreme Court of Pennsylvania." ], "decomposition": [ "Who was the founder of University of Pittsburgh?", "What are the major things #1 is known for?", "What are the major things Judith Sheindlin is known for?", "Is there an overlap between #2 and #3?" ], "evidence": [ [ [ [ "History of the University of Pittsburgh-2" ] ], [ [ "Hugh Henry Brackenridge-4" ] ], [ [ "Judy Sheindlin-1" ] ], [ [ "Judge-5" ], "operation" ] ], [ [ [ "University of Pittsburgh-1" ] ], [ [ "Hugh Henry Brackenridge-1" ] ], [ [ "Judy Sheindlin-1" ] ], [ "operation" ] ], [ [ [ "History of the University of Pittsburgh-2" ] ], [ [ "Hugh Henry Brackenridge-1" ] ], [ [ "Judy Sheindlin-1" ] ], [ "operation" ] ] ] }, { "qid": "1a4eb85c31188af515b0", "term": "Los Angeles County, California", "description": "County in California, United States", "question": "Is Disney associated with Los Angeles County?", "answer": true, "facts": [ "Disney Concert Hall and Disney Studio are located in Los Angeles.", "The city of Los Angeles is located in Los Angeles County." ], "decomposition": [ "Where are Disney Concert Hall and Disney Studio located?", "Is #1 located in Los Angeles County?" ], "evidence": [ [ [ [ "Walt Disney Animation Studios-1", "Walt Disney Concert Hall-1" ] ], [ [ "Burbank, California-1", "Central Los Angeles-1", "Downtown Los Angeles-1" ], "operation" ] ], [ [ [ "Walt Disney Concert Hall-1", "Walt Disney Studios (division)-1" ] ], [ [ "Burbank, California-1", "Los Angeles County, California-1" ] ] ], [ [ [ "Walt Disney Studios (Burbank)-4" ] ], [ [ "Burbank, California-1" ], "operation" ] ] ] }, { "qid": "b2c1c69dbfc82dc9da50", "term": "Pea", "description": "species of plant", "question": "Could a bee hummingbird balance a scale with a single pea on it?", "answer": false, "facts": [ "The average pea weighs between 0.1 and 0.36 grams.", "Female bee hummingbirds on average weigh 2.6 grams, while on average male bee hummingbirds weigh 1.95 grams." ], "decomposition": [ "What is the weight range of the average pea?", "What is the weight range of the average bee hummbingbird?", "Is there an overlap between #1 and #2?" ], "evidence": [ [ [ [ "Pea-2" ] ], [ [ "Bee hummingbird-2" ] ], [ "operation" ] ], [ [ [ "Pea-2" ] ], [ [ "Bee hummingbird-2" ] ], [ "operation" ] ], [ [ [ "Pea-2" ] ], [ [ "Bee hummingbird-2" ] ], [ "operation" ] ] ] }, { "qid": "2787ada17c12601bcd0c", "term": "Abstract art", "description": "Art with a degree of independence from visual references in the world", "question": "Can photography be considered abstract art?", "answer": false, "facts": [ "Abstract art is a form of modern art that does not reflect images of our every day world.", "Abstract art relies on exaggerated colors and shapes.", "Photography is an art that uses cameras to take pictures of events unfolding in the real world." ], "decomposition": [ "What kind of events/scenarios is depicted in abstract art?", "What kind of imagery does photography capture?", "Is #1 very similar to #2?" ], "evidence": [ [ [ [ "Abstract art-5" ] ], [ [ "Photography-1" ] ], [ "operation" ] ], [ [ [ "Abstract art-1", "Abstract art-3" ] ], [ [ "Photography-1", "Photography-2" ] ], [ "operation" ] ], [ [ [ "Abstract art-1" ] ], [ [ "Photography-68" ] ], [ "operation" ] ] ] }, { "qid": "02f5f11bf98632578d50", "term": "Great Depression", "description": "20th-century worldwide economic depression", "question": "Can the Great Depression be treated with Prozac?", "answer": false, "facts": [ "Prozac is a pharmaceutical antidepressant for treatment of psychological disorders", "The Great Depression was an economic phenomenon occurring in the early 20th century" ], "decomposition": [ "What conditions can be treated with Prozac?", "The conditions in #1 are inflicted upon what?", "Does the Great Depression have #2?" ], "evidence": [ [ [ [ "Fluoxetine-1" ] ], [ [ "Depression (mood)-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Fluoxetine-1" ] ], [ [ "Major depressive disorder-1" ] ], [ [ "Great Depression-1" ], "operation" ] ], [ [ [ "Fluoxetine-6" ] ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "01844485471f396ee9ef", "term": "Edward Snowden", "description": "American whistleblower and former National Security Agency contractor", "question": "Is Edward Snowden in hiding from the United States?", "answer": false, "facts": [ "Edward Snowden has an active twitter account and has been on political commentary shows.", "Edward Snowden's country of residence is listed on his Wikipedia." ], "decomposition": [ "Where does Edward Snowden live?", "Is the location of #1 kept secret?" ], "evidence": [ [ [ [ "Edward Snowden-77" ] ], [ [ "Edward Snowden-77" ], "operation" ] ], [ [ [ "Edward Snowden-78" ] ], [ [ "Edward Snowden-78" ], "no_evidence" ] ], [ [ [ "Edward Snowden-78" ] ], [ "operation" ] ] ] }, { "qid": "99f1b07917c2df314619", "term": "Bumblebee", "description": "genus of insects", "question": "Are aggressive bumblebees suicidal?", "answer": false, "facts": [ "Bees with barbed stingers lose the barb after attacking a victim and die soon afterwards", "Bumblebees do not have barbed stingers and can sting multiple times without dying" ], "decomposition": [ "Can bees with non-barbed stingers sting multiple times?", "Do bumblebees have non-barbed stingers?", "By #1 and #2 do bumblebees die after stinging just once?" ], "evidence": [ [ [ [ "Honey bee-61" ] ], [ [ "Bumblebee-45" ] ], [ "operation" ] ], [ [ [ "Bombus ternarius-23" ] ], [ [ "Bumblebee-45" ] ], [ "operation" ] ], [ [ [ "Bee sting-6" ] ], [ [ "Bumblebee-45" ] ], [ "operation" ] ] ] }, { "qid": "b56796dda04ab21e5f24", "term": "Alfred Nobel", "description": "Swedish chemist, innovator, and armaments manufacturer (1833–1896)", "question": "Has categories of Nobel prizes remained same since Alfred Nobel established them?", "answer": false, "facts": [ "Alfred Nobel established the Nobel prize in his will in 1895.", "Alfred Nobel established 5 Nobel prize categories: Chemistry, Literature, Peace, Physics, and Physiology or Medicine.", "The Nobel prize evolved to include a sixth category of Economic Sciences in 1968." ], "decomposition": [ "When did Alfred Nobel establish the Nobel prize?", "Has the Nobel Prize remained unchanged in all respects since #1?" ], "evidence": [ [ [ [ "Nobel Prize-1" ] ], [ [ "Nobel Prize-21" ], "operation" ] ], [ [ [ "Nobel Prize-1" ] ], [ [ "Nobel Prize-2" ] ] ], [ [ [ "Alfred Nobel-13" ] ], [ [ "Alfred Nobel-14", "Alfred Nobel-17" ], "operation" ] ] ] }, { "qid": "3bf4c56b6230859ec41c", "term": "British Airways", "description": "flag carrier airline of the United Kingdom", "question": "Is British Airways the air force of the United Kingdom?", "answer": false, "facts": [ "British Airways is a commercial transportation company.", "The military air force of the United Kingdom is named the Royal Air Force. ", "British Airways is headquartered in London. ", "Royal Air Force is part of the British Armed Forces." ], "decomposition": [ "What is the air force of the United Kingdom known as?", "Is #1 the same as British Airways?" ], "evidence": [ [ [ [ "Royal Air Force-4" ] ], [ [ "British Airways-1" ], "no_evidence" ] ], [ [ [ "Royal Air Force-1" ] ], [ [ "British Airways-1" ] ] ], [ [ [ "Royal Air Force-1" ] ], [ [ "British Airways-2" ], "operation" ] ] ] }, { "qid": "9738b3c0a4db11d5f8bc", "term": "Cerebral palsy", "description": "A group of disorders affecting the development of movement and posture, often accompanied by disturbances of sensation, perception, cognition, and behavior. It results from damage to the fetal or infant brain.", "question": "Is a slime mold safe from cerebral palsy?", "answer": true, "facts": [ "Cerebral palsy is a disorder caused by damage to fetal or infant brains.", "Slime molds are simple organisms that are similar to fungi.", "Slime molds do not possess a brain." ], "decomposition": [ "Damage to what structure can cause cerebral palsy?", "What structures do slime molds have?", "Is #1 listed in #2?" ], "evidence": [ [ [ [ "Cerebral palsy-2" ] ], [ [ "Plasmodium (life cycle)-3" ] ], [ "operation" ] ], [ [ [ "Cerebral palsy-2" ] ], [ [ "Slime mold-18" ] ], [ "operation" ] ], [ [ [ "Cerebral palsy-2" ] ], [ [ "Slime mold-18" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "c3b07403729ae5296cf6", "term": "Harry Potter and the Philosopher's Stone", "description": "1997 fantasy novel by J. K. Rowling", "question": "Was Harry Potter and the Philosopher's Stone popular during the great depression?", "answer": false, "facts": [ "The Great Depression started in 1929 and ended in 1933.", "Harry Potter and the Philosopher's Stone was first published in 1997." ], "decomposition": [ "When did the Great Depression end?", "When was Harry Potter and the Philosopher's Stone first published?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Great Depression-1" ] ], [ [ "Harry Potter-2" ] ], [ [ "Harry Potter-2" ], "operation" ] ], [ [ [ "Great Depression-1" ] ], [ [ "Harry Potter and the Philosopher's Stone-2" ] ], [ "operation" ] ], [ [ [ "Great Depression-1" ] ], [ [ "Harry Potter and the Philosopher's Stone-2" ] ], [ "operation" ] ] ] }, { "qid": "8507afa6a9a1db314bac", "term": "United States Department of Defense", "description": "United States federal executive department", "question": "Is the CIA part of the Department of Defense?", "answer": false, "facts": [ "The Department of Defense covers national defense and the armed forces, led by the Secretary of Defense.", "The CIA is a federal agency within the United States Intelligence Community, which answers to the Director of National Intelligence." ], "decomposition": [ "Which agencies are under the United States Department of Defense?", "Is the CIA included in #1?" ], "evidence": [ [ [ [ "United States Department of Defense-1" ], "no_evidence" ], [ [ "Central Intelligence Agency-1" ], "no_evidence", "operation" ] ], [ [ [ "United States Department of Defense-2" ] ], [ "operation" ] ], [ [ [ "United States Department of Defense-2" ] ], [ "operation" ] ] ] }, { "qid": "2fca3349a211a68b41f2", "term": "Barley", "description": "Species of plant", "question": "Would the owners of the company Peter Griffin works for need barley?", "answer": true, "facts": [ "Peter Griffin works for Pawtucket Brewery.", "Pawtucket Brewery produces beer.", "Barley is the preferred grain for making beer." ], "decomposition": [ "What kind of company is Peter Griffin?", "What does #1 produce?", "Does producing #2 require barley?" ], "evidence": [ [ [ [ "Peter Griffin-2" ], "no_evidence" ], [ [ "Brewery-1" ] ], [ [ "Brewery-27" ], "operation" ] ], [ [ [ "Peter Griffin-2" ] ], [ [ "Brewery-1" ] ], [ [ "Beer-1" ], "operation" ] ], [ [ [ "Peter Griffin-2" ] ], [ [ "Brewery-1" ] ], [ [ "Barley-1" ] ] ] ] }, { "qid": "47e4f407f7186ba6b86f", "term": "Chipmunk", "description": "Tribe of mammals (rodent (marmot))", "question": "Is an Eastern chipmunk likely to die before seeing two leap years?", "answer": true, "facts": [ "A leap year happens every four years.", "The Eastern chipmunk has an average lifespan of three years." ], "decomposition": [ "What is the average lifespan of an Eastern chipmunk?", "How often does a leap year occur?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Eastern chipmunk-7" ] ], [ [ "Leap year-6" ] ], [ "operation" ] ], [ [ [ "Chipmunk-11" ] ], [ [ "Leap year-2" ] ], [ "operation" ] ], [ [ [ "Chipmunk-11" ] ], [ [ "Leap year-2" ] ], [ "operation" ] ] ] }, { "qid": "59fdc8eaf72fdb34e744", "term": "Ahura Mazda", "description": "highest deity of Zoroastrianism", "question": "Does Ahura Mazda have a rivalry with Zeus?", "answer": false, "facts": [ "Ahura Mazda is a deity of Zoroastrianism, a contemporary religion", "Zeus is a deity of Greek mythology" ], "decomposition": [ "What belief system is Ahura Mazda associated with?", "What belief system is Zeus associated with?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Ahura Mazda-1" ] ], [ [ "Zeus-1" ] ], [ "operation" ] ], [ [ [ "Ahura Mazda-1" ] ], [ [ "Zeus-1" ] ], [ "operation" ] ], [ [ [ "Ahura Mazda-1" ] ], [ [ "Zeus-1" ] ], [ "operation" ] ] ] }, { "qid": "12802f22d30bcaf903a9", "term": "Holy Land", "description": "Term used by Jews, Christians, and Muslims to describe the Land of Israel and Palestine", "question": "Did Holy Land belong to Adamu's tribe?", "answer": true, "facts": [ "The Holy Land is a place that Jews, Muslims, and Christians revere.", "Adamu was an early king of Assyria.", "The Assyrians were in regions of the Holy Land as far back as 2600 BC.", "The predecessors to the Assyrians were in regions of the Holy Land as far back as 3500 BC." ], "decomposition": [ "Which place is referred to as the Holy Land?", "Which tribe was Adamu a leader of?", "Did #2 occupy #1?" ], "evidence": [ [ [ [ "Holy Land-1" ] ], [ [ "Adamu (Assyrian king)-1" ] ], [ [ "Adamu (Assyrian king)-4", "Assyria-1", "Mesopotamia-1" ] ] ], [ [ [ "Holy Land-1" ] ], [ [ "Adamu (Assyrian king)-1", "Assyrian people-1", "Assyrian people-2", "Assyrian people-51" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Holy place-8" ], "operation" ], [ [ "Adamu Adamu-2" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "ab1703ed82cc14252501", "term": "Edward II of England", "description": "14th-century King of England and Duke of Aquitaine", "question": "Was Edward II crucial to England's victory at Battle of Falkirk?", "answer": false, "facts": [ "The Battle of Falkirk was a battle between England and the Scots.", "King Edward I led English forces to victory against William Wallace at the Battle of Falkirk.", "The Battle of Falkirk took place in 1298.", "Edward II was born in 1284 and his first campaign with his father against Scotland happened in 1300.", "Edward II was knighted in 1306." ], "decomposition": [ "When did the Battle of Falkirk occur?", "When did Edward II start appearing at battles with his father?", "Did #2 occur before #1?" ], "evidence": [ [ [ [ "Battle of Falkirk-1" ] ], [ [ "Edward II of England-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Battle of Falkirk-1" ] ], [ [ "Edward II of England-1" ] ], [ "operation" ] ], [ [ [ "Battle of Falkirk-1" ] ], [ [ "Edward II of England-14" ] ], [ "operation" ] ] ] }, { "qid": "1cd4ba0baee559bf7a0f", "term": "White blood cell", "description": "type of cells of the immunological system", "question": "Do white blood cells outnumber red blood cells in the human body?", "answer": false, "facts": [ "Red blood cells are about 40-50% of what makes up human blood.", "White blood cells make up about 1% of the blood in a human body." ], "decomposition": [ "What percent of blood by volume is made up of white blood cells?", "What percent of blood by volume is red blood cells?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "White blood cell-3" ] ], [ [ "White blood cell-3" ] ], [ [ "White blood cell-3" ] ] ], [ [ [ "Blood-8" ] ], [ [ "Blood-8" ] ], [ "operation" ] ], [ [ [ "White blood cell-3" ] ], [ [ "White blood cell-3" ] ], [ "operation" ] ] ] }, { "qid": "7f435c65e98ee7f07b85", "term": "Cornwall", "description": "County of England", "question": "Was John George Bice's birthplace near Cornwall?", "answer": true, "facts": [ "Politician John George Bice was born in Callington.", "Cornwall is a place located in South West England.", "Callington is a small town in South East Cornwall." ], "decomposition": [ "Where was John George Bice born?", "Is #1 located close to Cornwall?" ], "evidence": [ [ [ [ "John George Bice-2" ] ], [ "operation" ] ], [ [ [ "John George Bice-2" ] ], [ [ "John George Bice-2" ] ] ], [ [ [ "John George Bice-2" ] ], [ [ "Callington-1" ] ] ], [ [ [ "John George Bice-2" ] ], [ [ "John George Bice-2" ] ] ] ] }, { "qid": "30b9ff6506a31671b4d3", "term": "History of Europe", "description": "History of Europe from the beginnings of recorded history", "question": "Does the history of Europe include the age of dinosaurs?", "answer": false, "facts": [ "Dinosaurs went extinct many millions of years ago.", "In contrast, ancient humans only started recording history several thousand years ago." ], "decomposition": [ "When did the dinosaurs exist?", "When did humans first colonize Europe?", "Is #2 contained within the range of #1?" ], "evidence": [ [ [ [ "Dinosaur-1" ] ], [ [ "Europe-29" ] ], [ "operation" ] ], [ [ [ "Dinosaur-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Dinosaur-1" ] ], [ [ "Hominid dispersals in Europe-10" ] ], [ "operation" ] ] ] }, { "qid": "e5f9336ceb74622c14c0", "term": "Model (person)", "description": "person employed to display, advertise and promote products, or to serve as a visual aid", "question": "Would a model be likely to frequently enjoy the menu at Cookout?", "answer": false, "facts": [ "Models are known for being very thin on average.", "Cookout serves high calorie American style barbecue food.", "Models often have pressure put on them to maintain a slim figure." ], "decomposition": [ "What is the typical body shape of a model?", "What kind of food does a cookout typically have?", "Are #2 foods high in calories?", "In order to maintain #1, what kinds of food must a person eat?", "Does #3 match with #4?" ], "evidence": [ [ [ [ "Model (person)-22" ] ], [ [ "Cook Out (restaurant)-1" ] ], [ [ "Food energy-4" ] ], [ [ "Model (person)-24" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Thin Ideal-21" ] ], [ [ "Cook Out (restaurant)-1" ] ], [ "operation" ], [ [ "Dieting-13" ] ], [ "operation" ] ], [ [ [ "Model (person)-24" ] ], [ [ "Cook Out (restaurant)-1" ] ], [ [ "Fast food-6" ] ], [ [ "Dieting-1" ] ], [ "operation" ] ] ] }, { "qid": "2b6cfeac9f6533f0409e", "term": "Markhor", "description": "species of mammal", "question": "Could a markhor give birth three times in a single year?", "answer": false, "facts": [ "The gestation period of a markhor lasts 135–170 days.", "There are 365 days in a year." ], "decomposition": [ "What is the gestation period of a Markhor?", "How many days are in a year?", "Can #1 be divided into #2 at least 3 times" ], "evidence": [ [ [ [ "Markhor-6" ] ], [ [ "Calendar year-2" ] ], [ [ "Calendar year-2", "Markhor-6" ], "operation" ] ], [ [ [ "Markhor-6" ] ], [ [ "Year-3" ] ], [ "operation" ] ], [ [ [ "Markhor-6" ] ], [ [ "Year-3" ] ], [ "operation" ] ] ] }, { "qid": "aa08a6021227d95f0f10", "term": "The Onion", "description": "American news satire organization", "question": "Could a delicious recipe be made with The Onion?", "answer": false, "facts": [ "Despite its name, The Onion is not a food, but rather an organization.", "It is not possible to eat a business organization." ], "decomposition": [ "What is The Onion?", "Is #1 an item you can eat?" ], "evidence": [ [ [ [ "The Onion-1" ] ], [ "operation" ] ], [ [ [ "The Onion-1" ] ], [ "operation" ] ], [ [ [ "The Onion-1" ] ], [ [ "Company-1", "Food-1" ] ] ] ] }, { "qid": "ae0891799990442fbb0e", "term": "Urban planner", "description": "professional who works on city planning", "question": "Would Paul Bunyan hypothetically be a poor choice for an urban planner?", "answer": true, "facts": [ "Paul Bunyan was a legendary giant from tall tale stories.", "Urban planners need to design buildings and ceilings in cities.", "Paul Bunyan lived in the country side with a giant blue ox.", "New York city apartment ceilings average around 8 feet in height.", "Paul Bunyan was over seven feet tall." ], "decomposition": [ "What environments are urban planners experts of?", "Did Paul Bunyan live or work in #1?" ], "evidence": [ [ [ [ "Urban planner-1", "Urban planner-2" ] ], [ [ "Paul Bunyan-1" ], "operation" ] ], [ [ [ "Urban planner-2" ] ], [ [ "Paul Bunyan-1" ], "operation" ] ], [ [ [ "Urban planner-2" ], "no_evidence" ], [ [ "Lumberjack-5", "Paul Bunyan-1" ], "operation" ] ] ] }, { "qid": "70168bf2898466b57253", "term": "Fax", "description": "method of transmitting images, often of documents", "question": "Do most college students own a fax machine?", "answer": false, "facts": [ "College students typically must submit assignments via email, web portal, or on paper.", "Most colleges have on-campus fax machines available for student use." ], "decomposition": [ "How do college students typically submit their assignments nowadays?", "Does #1 require a fax machine?" ], "evidence": [ [ [ [ "Student-54" ], "no_evidence" ], [ [ "Fax-1" ], "no_evidence" ] ], [ [ [ "Educational technology-1", "Educational technology-14" ], "no_evidence" ], [ [ "Fax-1" ], "operation" ] ], [ [ [ "Email-1", "Email-45" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "55b8d3c7ae5fbb58ddce", "term": "Leafhopper", "description": "family of insects", "question": "Do Leafhoppers compete with Log Cabin syrup producers for resources?", "answer": true, "facts": [ "Leafhoppers are insects that feed on sap from trees.", "Log Cabin is a company that originated in Minnesota and makes several varieties of maple syrup.", "Sap is an ingredient in maple syrup." ], "decomposition": [ "What does the leafhopper diet consist of?", "What kind of syrup is produced by Log Cabin?", "What are the ingredients in #2?", "Is any substance listed in #1 also found in #3?" ], "evidence": [ [ [ [ "Leafhopper-6" ], "no_evidence" ], [ [ "Log Cabin syrup-1", "Syrup-2" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Leafhopper-1" ] ], [ [ "Log Cabin syrup-1" ], "no_evidence" ], [ [ "Maple syrup-1" ] ], [ "operation" ] ], [ [ [ "Leafhopper-6" ] ], [ [ "Log Cabin syrup-4" ] ], [ [ "Maple syrup-6" ] ], [ "operation" ] ] ] }, { "qid": "2127e3455127e099982b", "term": "Scottish people", "description": "ethnic inhabitants of Scotland", "question": "Does the Pixar film Brave feature Scottish people?", "answer": true, "facts": [ "The movie Brave is set in the Scottish highlands.", "Merida, the main character of Brave, is a Princess of Medieval Scotland " ], "decomposition": [ "Who are the main characters of the Pixar film Brave?", "Are any of #1 from Scotland?" ], "evidence": [ [ [ [ "Brave (2012 film)-4" ] ], [ "operation" ] ], [ [ [ "Brave (2012 film)-4" ] ], [ [ "Brave (2012 film)-4" ], "operation" ] ], [ [ [ "Brave (2012 film)-1", "Brave (2012 film)-4" ] ], [ "operation" ] ] ] }, { "qid": "665a7a698ff08a8aa399", "term": "Chick-fil-A", "description": "American fast food chain", "question": "Would a vegetarian be able to eat something at Chick-fil-A?", "answer": true, "facts": [ "Most people who follow a vegetarian diet don't eat meat, fish or poultry. ", "While Chick-fil-A sells chicken, they also offer other items. ", "Items that are meat free include: hash browns, waffle fries, and superfood sides." ], "decomposition": [ "What foods must a vegetarian avoid?", "What foods are on the menu of Chick-fil-A?", "Are any items in #2 free of #1?" ], "evidence": [ [ [ [ "Vegetarianism-1" ] ], [ [ "Chick-fil-A-39" ] ], [ [ "Crinkle-cutting-4", "Potato-1" ], "operation" ] ], [ [ [ "Vegetarianism-1" ] ], [ [ "Chick-fil-A-39" ] ], [ "operation" ] ], [ [ [ "Vegetarianism-1" ] ], [ [ "Chick-fil-A-39" ] ], [ "operation" ] ] ] }, { "qid": "56434ca634c4ef6e0741", "term": "Eid al-Fitr", "description": "Islamic holiday that marks the end of Ramadan", "question": "Could jockey win Triple Crown between Eid al-Fitr endpoints?", "answer": false, "facts": [ "The Triple Crown is an accomplishment in which a jockey wins three specific races.", "The three Triple Crown races are: Preakness, Kentucky Derby, and Belmont Stakes.", "The three Triple Crown races take place weeks apart.", "Eid al-Fitr is a Muslim holiday that lasts for three consecutive days." ], "decomposition": [ "How long does Eid al-Fitr last?", "How long is it between the first and last races of the Triple Crown?", "Is #1 longer than #2?" ], "evidence": [ [ [ [ "Eid al-Fitr-4" ] ], [ [ "Belmont Stakes-1" ] ], [ [ "Week-1" ], "operation" ] ], [ [ [ "Eid al-Fitr-1" ] ], [ [ "Triple Crown of Thoroughbred Racing (United States)-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Eid al-Fitr-1" ] ], [ [ "Belmont Stakes-1", "Kentucky Derby-1", "Triple Crown of Thoroughbred Racing (United States)-1" ] ], [ "operation" ] ] ] }, { "qid": "cea2610394fec9fbde44", "term": "Harry Houdini", "description": "American magician, escapologist, and stunt performer", "question": "Did Harry Houdini appear on Chris Angel Mindfreak?", "answer": false, "facts": [ "Chris Angel Mindfreak was released in 2005.", "Harry Houdini died in 1926." ], "decomposition": [ "When did Harry Houdini's career as an entertainer come to an end?", "When was the Criss Angel Mindfreak show first aired?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Harry Houdini-68" ] ], [ [ "Criss Angel Mindfreak-1" ] ], [ "operation" ] ], [ [ [ "Harry Houdini-39" ] ], [ [ "Criss Angel Mindfreak-1" ] ], [ "operation" ] ], [ [ [ "Harry Houdini-71" ] ], [ [ "Criss Angel Mindfreak-1" ] ], [ "operation" ] ] ] }, { "qid": "2be83f11f8b6602afe87", "term": "Psychotherapy", "description": "clinically applied psychology for desired behavior modification", "question": "Do some psychotherapy patients have no mental illness?", "answer": true, "facts": [ "Psychotherapy is useful for couples navigating relationship issues.", "Grief is a common reason that people seek psychotherapy. " ], "decomposition": [ "What are some common issues that make people seek psychotherapy?", "Does #1 not always involve mental illness?" ], "evidence": [ [ [ [ "Psychotherapy-1" ] ], [ "operation" ] ], [ [ [ "Psychotherapy-1" ] ], [ [ "Psychotherapy-4" ], "operation" ] ], [ [ [ "Psychotherapy-1" ] ], [ [ "Psychotherapy-32" ], "operation" ] ] ] }, { "qid": "33f7f8c55b4acedb061a", "term": "Lolcat", "description": "image combining a photograph of a cat with text intended to contribute humour", "question": "Is purchasing food for a Lolcat unnecessary?", "answer": true, "facts": [ "An image macro is a piece of digital media featuring a picture, or artwork, superimposed with some form of text.", "Food is any substance consumed to provide nutritional support for an organism.", "An organism is any individual entity that embodies the properties of life.", "Digital media does not embody the properties of life." ], "decomposition": [ "Which kind of entities require food?", "Is a lolcat excluded from #1?" ], "evidence": [ [ [ [ "Eating-1" ] ], [ [ "Lolcat-2" ] ] ], [ [ [ "Food-1", "Organism-1", "Organism-2" ] ], [ [ "Image macro-1", "Lolcat-1", "Media (communication)-1" ] ] ], [ [ [ "Food-1" ] ], [ [ "Lolcat-2" ], "operation" ] ] ] }, { "qid": "9ca37a720a283c3d6045", "term": "Common warthog", "description": "Wild member of the pig family", "question": "Would a Common warthog starve in a greenhouse?", "answer": false, "facts": [ "A greenhouse is an enclosed building in which plants are grown.", "The Common warthog is an animal that feeds on grasses, roots, berries, and small insects.", "Aphids, fungus gnats, and caterpillars, are common insects found in greenhouses." ], "decomposition": [ "What kind of things are found in a greenhouse?", "What does the warthog diet consist of?", "Is there significant overlap between #1 and #2?" ], "evidence": [ [ [ [ "Greenhouse-31" ] ], [ [ "Phacochoerus-2" ] ], [ [ "Greenhouse-31", "Herbivore-1" ] ] ], [ [ [ "Greenhouse-1" ] ], [ [ "Common warthog-5" ] ], [ "operation" ] ], [ [ [ "Greenhouse-27" ] ], [ [ "Common warthog-5" ] ], [ "operation" ] ] ] }, { "qid": "c928aaf3842cb87e3642", "term": "Saltwater crocodile", "description": "species of reptile", "question": "Would alligator best saltwater crocodile in hypothetical Lake Urmia battle?", "answer": false, "facts": [ "Lake Urmia is a salt lake in Iran.", "Saltwater crocodiles have special glands that allow them to survive in salt water.", "Alligators lack glands to stay in salt water for extended periods of time." ], "decomposition": [ "What kind of water is in Lake Urmia?", "Can alligators survive for long in #1?" ], "evidence": [ [ [ [ "Lake Urmia-1" ] ], [ "no_evidence" ] ], [ [ [ "Lake Urmia-1" ] ], [ [ "Alligator-8" ] ] ], [ [ [ "Lake Urmia-3" ] ], [ "no_evidence" ] ] ] }, { "qid": "7f5bde421ffbd43d7cdd", "term": "Sirius", "description": "Brightest star in the night sky", "question": "Is Sirius part of a constellation of an animal?", "answer": true, "facts": [ "Sirius is the brightest star in the constellation Canis Major.", "Canis Major represents a large dog." ], "decomposition": [ "What constellation is Sirius a part of?", "What does #1 represent?", "Is #2 an animal?" ], "evidence": [ [ [ [ "Canis Major-2" ] ], [ [ "Canis Major-1" ] ], [ "operation" ] ], [ [ [ "Sirius-4" ] ], [ [ "Canis Major-1" ] ], [ "operation" ] ], [ [ [ "Sirius-4" ] ], [ [ "Canis Major-4" ] ], [ [ "Animal-4" ] ] ] ] }, { "qid": "3cec996f26ec99d0a519", "term": "The Godfather", "description": "1972 film directed by Francis Ford Coppola", "question": "Is Y2K relevant to the plot of The Godfather?", "answer": false, "facts": [ "The story in the Godfather spans from 1945 to 1955.", "Y2K refers to events related to the formatting and storage of calendar data for dates beginning in the year 2000." ], "decomposition": [ "What era is the story of The Godfather set in?", "What year does Y2K refer to?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "The Godfather-1" ] ], [ [ "Year 2000 problem-1" ] ], [ "operation" ] ], [ [ [ "The Godfather-1" ] ], [ [ "Year 2000 problem-10" ] ], [ "operation" ] ], [ [ [ "The Godfather-1" ] ], [ [ "Year 2000 problem-1" ] ], [ "operation" ] ] ] }, { "qid": "b0f15f72bd627ddff80b", "term": "Scrabble", "description": "board game with words", "question": "Could a two-year old win a Scrabble tournament?", "answer": false, "facts": [ "Scrabble is a word game that requires a large vocabulary in order to play well.", "A two-year old has a very limited vocabulary and lacks the reasoning capability needed to perform well in Scrabble." ], "decomposition": [ "What size vocabulary do Scrabble champions have?", "What size vocabulary do two-year olds have?", "is #2 greater than #1?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Language development-13" ] ], [ "no_evidence", "operation" ] ], [ [ [ "World Scrabble Championship-4" ], "no_evidence" ], [ [ "Vocabulary-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Official Scrabble Players Dictionary-9" ], "no_evidence" ], [ [ "Toddler-5" ] ], [ "operation" ] ] ] }, { "qid": "10d718862227bef4a6ed", "term": "Chives", "description": "edible species of plant", "question": "Could chives be mistaken for grass?", "answer": true, "facts": [ "Chives grow upwards in thin green cylindrical shoots. ", "Grass grows upwards in thin green flat shoots." ], "decomposition": [ "What is the shape and color of Chives?", "What is the shape and color of grass?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Chives-4" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Chives-4" ], "no_evidence" ], [ [ "Poaceae-42" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Chives-6" ] ], [ [ "Poaceae-15" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "5694613473f2c2c557f7", "term": "New York Harbor", "description": "harbor in the New York City, U.S.A. metropolitan area", "question": "Does a giant green lady stand in New York Harbor?", "answer": true, "facts": [ "New York Harbor is a body of water between south Manhattan and the Atlantic Ocean", "The Statue of Liberty stands in New York Harbor", "The Statue of Liberty is a very large, green statue of a woman" ], "decomposition": [ "Where is The Statue of Liberty located?", "Is #1 in New York Harbor?" ], "evidence": [ [ [ [ "Statue of Liberty-1" ] ], [ [ "Statue of Liberty-1" ] ] ], [ [ [ "Statue of Liberty-1" ] ], [ [ "Statue of Liberty-1" ] ] ], [ [ [ "Statue of Liberty-1" ] ], [ "operation" ] ] ] }, { "qid": "48da75d87c66754ccc2e", "term": "Attack on Pearl Harbor", "description": "Surprise attack by the Imperial Japanese Navy on the U.S. Pacific Fleet in Pearl Harbor in Hawaii", "question": "Was only woman to serve as U.S. Speaker of the House alive during the attack on Pearl Harbor?", "answer": true, "facts": [ "Nancy Pelosi is the only woman to ever serve as Speaker of the United States House of Representatives.", "Nancy Pelosi was born on Mar 26, 1940", "The attach on Pearl Harbor occurred on December 7, 1941" ], "decomposition": [ "Who is the only woman to ever serve as Speaker of the United States House of Representatives?", "When was #1 born?", "When did the attack on Pearl Harbor occur?", "Is #2 before #3?" ], "evidence": [ [ [ [ "Speaker of the United States House of Representatives-3" ] ], [ [ "Nancy Pelosi-1" ] ], [ [ "Attack on Pearl Harbor-1" ] ], [ "operation" ] ], [ [ [ "Nancy Pelosi-1" ] ], [ [ "Nancy Pelosi-1" ] ], [ [ "Attack on Pearl Harbor-1" ] ], [ "operation" ] ], [ [ [ "Speaker of the United States House of Representatives-3" ] ], [ [ "Nancy Pelosi-1" ] ], [ [ "Attack on Pearl Harbor-1" ] ], [ "operation" ] ] ] }, { "qid": "a61bbe3edc4de5272c0c", "term": "Honey", "description": "Sweet food made by bees mostly using nectar from flowers", "question": "Is honey associated with queens?", "answer": true, "facts": [ "Honey is made by bees.", "Each bee hive is led by a queen bee." ], "decomposition": [ "What produces honey?", "Do #1 have queens?" ], "evidence": [ [ [ [ "Honey-1" ] ], [ [ "Honey bee-53" ], "operation" ] ], [ [ [ "Honey-1" ] ], [ [ "Honey bee-53" ], "operation" ] ], [ [ [ "Honey-1" ] ], [ [ "Queen bee-1" ] ] ] ] }, { "qid": "fc712fb2286006222e50", "term": "Basel", "description": "Place in Basel-Stadt, Switzerland", "question": "Is it dark is Basel during the day in Los Angeles?", "answer": true, "facts": [ "Basel is located in the CEST time zone.", "Los Angeles is located in the PDT time zone." ], "decomposition": [ "What time zone is Basel in?", "What time zone is Los Angeles in?", "What is the time difference in hours between #1 and #2?", "Is #3 at least equal to 8?" ], "evidence": [ [ [ [ "Basel-1", "Central European Summer Time-1", "Central European Summer Time-6" ] ], [ [ "Pacific Time Zone-1", "Pacific Time Zone-3" ] ], [ [ "Central European Summer Time-1", "Pacific Time Zone-1" ], "operation" ], [ "operation" ] ], [ [ [ "Basel-1", "UTC+00:30-2" ] ], [ [ "Pacific Time Zone-9" ] ], [ "no_evidence", "operation" ], [ "operation" ] ], [ [ [ "Basel-1", "UTC+01:00-1" ] ], [ [ "Los Angeles-1", "UTC−08:00-1" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "56d3c8ea7338af395e4f", "term": "Learning disability", "description": "Range of neurodevelopmental conditions", "question": "Does penicillin cure a learning disability?", "answer": false, "facts": [ "Learning disabilities are neurological impairments", "Neurological impairments can result from genetic issues, developmental problems, and accidents like head trauma, malnutrition or exposure to toxins", "Penicillin is an antibiotic that treats bacterial infection" ], "decomposition": [ "What kind of impairment is a learning disability?", "What are the causes of #1?", "What is Penicillin used to treat?", "Is #3 also listed in #2?" ], "evidence": [ [ [ [ "Learning difficulties-2" ] ], [ [ "Learning disability-18" ] ], [ [ "Penicillin-1" ] ], [ [ "Learning disability-18", "Penicillin-1" ] ] ], [ [ [ "Learning disability-1" ] ], [ [ "Learning disability-17", "Learning disability-18" ] ], [ [ "Penicillin-1" ] ], [ "operation" ] ], [ [ [ "Learning disability-1" ] ], [ [ "Learning disability-3" ] ], [ [ "Side effects of penicillin-1" ] ], [ "operation" ] ] ] }, { "qid": "2def6bb4885a6cc0a6a5", "term": "Mickey Mouse", "description": "Disney cartoon character", "question": "Would Mickey Mouse blend in with the American flag?", "answer": false, "facts": [ "The American Flag is colored red, white, and blue.", "Mickey Mouse typically wears red shorts, large yellow shoes, and white gloves.", "The color yellow stands out distinctly from red, white, and blue.", "Things that are colored similarly or identically will blend in with each other." ], "decomposition": [ "What colors are Mickey Mouse?", "What colors are the American flag?", "Are most of the colors in #1 also found in #2?" ], "evidence": [ [ [ [ "Mickey Mouse-48" ] ], [ [ "Franco-American Flag-1" ] ], [ "operation" ] ], [ [ [ "Mickey Mouse-1" ] ], [ [ "Flag of the United States-1" ] ], [ "operation" ] ], [ [ [ "Mickey Mouse-1" ] ], [ [ "Flag of the United States-1" ] ], [ "operation" ] ] ] }, { "qid": "ac75c6a745713c08508a", "term": "Hippie", "description": "diminutive pejorative of hipster: 1960s counterculture participant", "question": "Was hippie culture encouraged by the government in the Soviet Union?", "answer": false, "facts": [ "Long hair was associated with the subcultures and youth movements that arose in the Western world during the mid-1960s, such as Hippies.", " Máničky) is a Czech term used for young people with long hair, typically men, in Czechoslovakia through the 1960s and 1970s.", "From the mid-1960s, \"máničky\" became a target of continuous interest of the state security apparatus." ], "decomposition": [ "What hairstyle was associated with hippies in the mid 1960's?", "What Czech term was used to describe people with #1?", "Was the government accepting of people who identified as #2?" ], "evidence": [ [ [ [ "History of the hippie movement-8" ] ], [ [ "Mánička-1" ] ], [ [ "Counterculture of the 1960s-28" ] ] ], [ [ [ "History of the hippie movement-39" ] ], [ [ "Mánička-4" ] ], [ [ "Counterculture of the 1960s-29" ] ] ], [ [ [ "Red Dog Saloon (Virginia City, Nevada)-4" ] ], [ [ "Mánička-1" ] ], [ [ "Counterculture of the 1960s-28" ], "operation" ] ] ] }, { "qid": "f43533225534420816d6", "term": "Giant squid", "description": "Deep-ocean dwelling squid in the family Architeuthidae", "question": "Can you house a giant squid at Soldier Field?", "answer": true, "facts": [ "Soldier Field is a football stadium", "Football fields are 120 yards long, or 360 feet", "The maximum length of a giant squid is 43 feet" ], "decomposition": [ "How long are giant squid?", "What type of field is Soldier Field?", "How long are #2?", "Is #3 equal to or greater than #1?" ], "evidence": [ [ [ [ "Giant squid-1" ] ], [ [ "Soldier Field-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Giant squid-1" ] ], [ [ "Soldier Field-1" ] ], [ [ "American football-36" ] ], [ "operation" ] ], [ [ [ "Giant squid-1" ] ], [ [ "Soldier Field-1" ] ], [ [ "Gridiron football-7" ] ], [ "operation" ] ] ] }, { "qid": "009f188cc24ee8241cb2", "term": "English Channel", "description": "Arm of the Atlantic Ocean that separates southern England from northern France", "question": "Can Iowa be hidden in the English Channel?", "answer": false, "facts": [ "The maximum width of the English Channel is 150 miles", "The minimum width of Iowa is 200 miles" ], "decomposition": [ "What is the maximum width of the English Channel?", "What is the minimum width of Iowa?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "English Channel-2" ] ], [ [ "Iowa-5" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "English Channel-2" ] ], [ [ "Geography of Iowa-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "cb08864902f07be76e77", "term": "Earth's magnetic field", "description": "Magnetic field that extends from the Earth’s inner core to where it meets the solar wind", "question": "Do Flat Earthers doubt the existence of Earth's magnetic field?", "answer": true, "facts": [ "Theories about the Earth's magnetic field depend on the globe model of the Earth.", "Flat Earthers are skeptical of most science related to the Earth and space, believing it to be part of a conspiracy coverup." ], "decomposition": [ "Which theory about the earth do the Flat-Earthers believe?", "Which earth theory supports the existence of the earth's magnetic field?", "Does #1 contradict #2?" ], "evidence": [ [ [ [ "Flat Earth-1" ] ], [ [ "Earth's magnetic field-1" ] ], [ "operation" ] ], [ [ [ "Modern flat Earth societies-1" ] ], [ [ "Modern flat Earth societies-8" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Flat Earth-58" ] ], [ [ "History of geomagnetism-12" ] ], [ "operation" ] ] ] }, { "qid": "8dc182aaa2d2117c4091", "term": "CNES", "description": "French space agency", "question": "Has CNES planted a French flag on the lunar surface?", "answer": false, "facts": [ "The lunar surface is on the moon.", "CNES has not sent a person to the moon." ], "decomposition": [ "Where is the lunar surface?", "What country is the CNES part of?", "Which countries have sent people or probes to #1?", "Is #2 included in #3?" ], "evidence": [ [ [ [ "Geology of the Moon-1" ] ], [ [ "CNES-1" ] ], [ [ "Space Race-2" ] ], [ "operation" ] ], [ [ [ "Moon-3" ] ], [ [ "CNES-1" ] ], [ [ "Chinese Lunar Exploration Program-3", "Exploration of the Moon-11" ] ], [ "operation" ] ], [ [ [ "Moon-3" ] ], [ [ "CNES-1" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "60aa0b975babf5475cf1", "term": "Myth", "description": "Type of traditional narrative", "question": "Was story of Jesus inspired by Egyptian myth?", "answer": true, "facts": [ "Jesus was a biblical character that walked on water, was born of a virgin, and was killed beside two thieves.", "Horus was a character in ancient Egyptian myth that walked on water, had a virgin mother, and was executed beside two thieves." ], "decomposition": [ "What are the main characteristics of the Horus story?", "What are the main characteristics of the Jesus story?", "Is there evidence people believed #1 before #2?", "Is there significant overlap between #1 and #2?", "Are #4 and #3 both \"Yes\"?" ], "evidence": [ [ [ [ "Osiris myth-1" ] ], [ [ "Jesus-3" ] ], [ [ "Osiris myth-3" ] ], [ [ "Jesus-1" ] ], [ "operation" ] ], [ [ [ "Horus-8" ], "no_evidence" ], [ [ "Jesus-1" ], "no_evidence" ], [ [ "Ancient Egypt-1" ] ], [ "no_evidence", "operation" ], [ "operation" ] ], [ [ [ "Horus-11" ] ], [ [ "Jesus-11" ] ], [ [ "Jesus-7" ], "no_evidence" ], [ [ "Horus-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0f70628653f7b064a50d", "term": "Harbor seal", "description": "Species of mammal", "question": "Can you see live harbor seals in Washington DC?", "answer": true, "facts": [ "The Smithsonian National Zoo is in Washington DC.", "There is a harbor seal exhibit at the Smithsonian National Zoo. " ], "decomposition": [ "Is there a zoo in Washington DC?", "Is there a harbor seal exhibit at #1?" ], "evidence": [ [ [ [ "National Zoological Park (United States)-2" ] ], [ [ "National Zoological Park (United States)-26" ] ] ], [ [ [ "National Zoological Park (United States)-2" ] ], [ [ "National Zoological Park (United States)-26" ] ] ], [ [ [ "National Zoological Park (United States)-2" ] ], [ [ "National Zoological Park (United States)-26" ], "operation" ] ] ] }, { "qid": "97ffbaa790ff9df718ec", "term": "Atmosphere of Mars", "description": "atmosphere", "question": "Are all the elements plants need for photosynthesis present in atmosphere of Mars?", "answer": true, "facts": [ "Plants need three elements for photosynthesis: Hydrogen, Oxygen, and Carbon.", "The atmosphere of Mars is composed of carbon dioxide, nitrogen, argon, and trace levels of water vapor, oxygen, carbon monoxide, hydrogen and other noble gases." ], "decomposition": [ "What are the elements needed by plants need for photosynthesis?", "Which elements are found in the atmosphere?", "Are #1 included in #2?" ], "evidence": [ [ [ [ "Photosynthesis-11" ] ], [ [ "Atmosphere of Mars-11" ] ], [ "operation" ] ], [ [ [ "Photosynthesis-72" ], "no_evidence", "operation" ], [ [ "Atmosphere-14" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Photosynthesis-10" ] ], [ [ "Atmosphere-14" ] ], [ "operation" ] ] ] }, { "qid": "38ef97eb7cdd4200fd00", "term": "Latino", "description": "A group of people in the United States with ties to Latin America", "question": "Is blonde hair green eyed Sara Paxton considered a Latino?", "answer": true, "facts": [ "Sara Paxton is an American actress.", "Latino's are people with ancestral ties to Latin America.", "Sara Paxton was born to an Irish/English father and a Mexican/Spanish/Chilean mother.", "Mexico is a country that is part of Latin America." ], "decomposition": [ "Latinos are people with which nationality?", "Which countries are Sara Paxton's parents from?", "Is any of #2 included in #1?" ], "evidence": [ [ [ [ "Latino (demonym)-1", "Latino (demonym)-2" ] ], [ [ "Sara Paxton-3" ] ], [ "operation" ] ], [ [ [ "Latin America-12", "Latino (demonym)-1", "Mexico-1" ] ], [ [ "Sara Paxton-3" ] ], [ "operation" ] ], [ [ [ "Latino (demonym)-18" ] ], [ [ "Sara Paxton-3" ] ], [ "operation" ] ] ] }, { "qid": "3cc870b5665bc1bc2a8e", "term": "Bee", "description": "Clade of insects", "question": "Does the human stomach destroy a bee if ingested?", "answer": true, "facts": [ "Bees have an outer protective exoskeleton made of chitin, a polymer of glucose.", "The human stomach releases harsh acids that break down materials.", "The human stomach breaks down glucose in about 33 hours." ], "decomposition": [ "What material protects a bee?", "What is #1 made of?", "Can the human stomach digest #2?" ], "evidence": [ [ [ [ "Exoskeleton-3" ], "no_evidence" ], [ [ "Chitin-1" ] ], [ [ "Glucose-1" ], "operation" ] ], [ [ [ "Bee-40" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bee-28", "Bee-65" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "fba89c686fe1c025a0ed", "term": "Voyages of Christopher Columbus", "description": "1492-1502 voyages to the Americas; beginning of the Columbian exchange", "question": "Could largest ship from Voyages of Christopher Columbus haul Statue of Liberty?", "answer": false, "facts": [ "The largest ship Christopher Columbus used was the Santa Maria.", "The Santa Maria has a cargo capacity of 108 tons.", "The Statue of Liberty weighs 225 tons." ], "decomposition": [ "What was the largest of Columbus' ships?", "What was the cargo capacity of #1?", "How much does the Statue of Liberty weigh?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Santa María (ship)-1" ] ], [ [ "Builder's Old Measurement-1", "Santa María (ship)-62" ] ], [ [ "Statue of Liberty-20" ] ], [ [ "Ton-2" ], "operation" ] ], [ [ [ "Carrack-11" ], "no_evidence" ], [ [ "Santa María (ship)-62" ] ], [ [ "Statue of Liberty-18" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Santa María (ship)-1" ] ], [ [ "Santa María (ship)-63" ] ], [ [ "Statue of Liberty-20" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "84a83f07a543ae3024c9", "term": "Yellow pages", "description": "Telephone directory of businesses by category", "question": "Would it be uncommon for a high schooler to use the yellow pages?", "answer": true, "facts": [ "High school students are between 14-19 years old. ", "Teenagers now are considered 'digital natives'. ", "'Digital natives' are individuals who have grown up during the computer age, being exposed to technology from early childhood." ], "decomposition": [ "Which age range do most high school students fall within?", "What is the age range of children who are considered to have grown during the computer age?", "Is #1 similar to #2?" ], "evidence": [ [ [ [ "High school (North America)-1" ] ], [ [ "Information Age-1" ] ], [ "operation" ] ], [ [ [ "Adolescence-1" ] ], [ [ "Digital native-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Yellow pages-2" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d5cd4b71ec956f0e1272", "term": "5", "description": "Natural number", "question": "Is pi in excess of square root of 5?", "answer": true, "facts": [ "Pi is a mathematical number approximately equal to 3.14", "The square root of a number are the two numbers multiplied together that equal that number.", "The square root of 5 is around 2.23." ], "decomposition": [ "What is the square root of 5?", "What is the value of pi?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Square root-1" ], "no_evidence", "operation" ], [ [ "Pi-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Square root of 5-1" ] ], [ [ "Pi-1" ] ], [ "operation" ] ], [ [ [ "Square root of 5-3" ] ], [ [ "Pi-1" ] ], [ "operation" ] ] ] }, { "qid": "2a4e1d4524997b6263bf", "term": "Iris (mythology)", "description": "Greek goddess of the rainbow", "question": "Would Iris (mythology) and Hermes hypothetically struggle at a UPS job?", "answer": false, "facts": [ "UPS is the number one delivery/courier service according to 2019 sales.", "Iris is the goddess of the rainbow and serves as a messenger of the gods in Greek mythology.", "Hermes in Greek mythology was a god that functioned as the emissary and messenger of the gods." ], "decomposition": [ "What role does the Greek goddess Iris play for gods?", "What role does the Greek deity Hermes play for gods?", "What kind of service does UPS provide?", "Are #1 and #2 much different from #3?" ], "evidence": [ [ [ [ "Iris (mythology)-1" ] ], [ [ "Hermes-1" ] ], [ [ "United Parcel Service-1" ] ], [ "operation" ] ], [ [ [ "Iris (mythology)-1" ] ], [ [ "Hermes-2" ] ], [ [ "United Parcel Service-1" ] ], [ "operation" ] ], [ [ [ "Iris (mythology)-1" ] ], [ [ "Hermes-1" ] ], [ [ "United Parcel Service-1" ] ], [ "operation" ] ] ] }, { "qid": "ac8bab79a8f573de8ec5", "term": "Doctor Strange", "description": "Superhero appearing in Marvel Comics publications and related media", "question": "Would Doctor Strange like the Pittsburgh Steelers logo?", "answer": true, "facts": [ "The Pittsburgh Steelers logo features patches of red, gold and blue", "Doctor Strange's costume uses the colors red, gold and blue" ], "decomposition": [ "What colors are in Doctor Strange's costume?", "What are the colors in the logo of the Pittsburgh Steelers?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Doctor Strange-39" ] ], [ [ "Pittsburgh Steelers-32" ] ], [ "operation" ] ], [ [ [ "Doctor Strange-39" ] ], [ [ "Logos and uniforms of the Pittsburgh Steelers-5" ] ], [ "operation" ] ], [ [ [ "Doctor Strange-39" ], "no_evidence" ], [ [ "Pittsburgh Steelers-32" ] ], [ "operation" ] ] ] }, { "qid": "b6cebe14a6b32222ad1b", "term": "Torah", "description": "First five books of the Hebrew Bible", "question": "Can you give at least one word from the Torah to all residents of Bunkie Louisiana?", "answer": true, "facts": [ "Torah scrolls must be duplicated precisely by a trained scribe.", "The Torah has a total of 8,674 words.", "The population of Bunkie Louisiana is 3,939 people according to a 2018 census." ], "decomposition": [ "How many words are in the Torah?", "How many residents does Bunkie, Louisiana have?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Torah-45" ], "no_evidence" ], [ [ "Bunkie, Louisiana-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Torah-1" ], "no_evidence" ], [ [ "Bunkie, Louisiana-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Sefer Torah-7" ], "no_evidence" ], [ [ "Bunkie, Louisiana-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "3de88c80f0914eb08caf", "term": "Leaf", "description": "organ of a vascular plant, composing its foliage", "question": "Does the texture of leaves remain the same independent of their coloring changing?", "answer": false, "facts": [ "When leaves turn colors like red, yellow, or brown, they start being cut off from their main supply of nutrients and moisture.", "As leaves change colors, they become dry and brittle.", "Leaves tend to be supple and soft when they are green." ], "decomposition": [ "When leaves change to red and orange, what does their texture become?", "When leaves are green, what is their texture?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Autumn leaf color-1" ], "no_evidence" ], [ [ "Leaf-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Anthocyanin-17" ], "no_evidence" ], [ [ "Autumn leaf color-4" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Leaf-55" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0e3b14f806bfea19faf2", "term": "Snow White", "description": "fairy tale", "question": "Can all of Snow White's dwarfs play a game of 7 Wonders simultaneously?", "answer": true, "facts": [ "The fairy tale character Snow White was friends with seven dwarfs.", "The board game 7 Wonders is for 2 to 7 players." ], "decomposition": [ "How many players can participate in a game of 7 Wonders?", "How many dwarfs are in the story of Snow White?", "Is #2 less than or equal to #1?" ], "evidence": [ [ [ [ "7 Wonders (board game)-1" ], "no_evidence" ], [ [ "Snow White and the Seven Dwarfs (1937 film)-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "7 Wonders (board game)-14" ], "no_evidence" ], [ [ "Snow White and the Seven Dwarfs (1937 film)-7" ] ], [ "operation" ] ], [ [ [ "7 Wonders (board game)-21" ] ], [ [ "Snow White-3" ] ], [ "operation" ] ] ] }, { "qid": "687b694d02f9ebe82b96", "term": "Dominican Order", "description": "Roman Catholic religious order", "question": "Could the Dominican Order hypothetically defeat Blessed Gerard's order?", "answer": false, "facts": [ "The Dominican Order is a Catholic group of friars that several priestly vows.", "Blessed Gerard was the founder of the Order of St John of Jerusalem (Knights Hospitaller).", " The Order of St John of Jerusalem (Knights Hospitaller) were a well trained Catholic military order that fought in the Crusades." ], "decomposition": [ "To what order did Blessed Gerard belong?", "What is the purpose of members of #1?", "Do members of the Dominican Order have training similar to #2?" ], "evidence": [ [ [ [ "Blessed Gerard-1" ] ], [ [ "Benedictines-30" ], "no_evidence" ], [ [ "Dominican Order-1" ], "no_evidence", "operation" ] ], [ [ [ "Blessed Gerard-1" ] ], [ [ "Knights Hospitaller-1" ] ], [ [ "Dominican Order-2" ], "operation" ] ], [ [ [ "Blessed Gerard-1" ] ], [ [ "Knights Hospitaller-2", "Knights Hospitaller-3" ] ], [ [ "Dominican Order-2" ], "operation" ] ] ] }, { "qid": "725335996a1551cc953d", "term": "Leadership", "description": "ability of an individual or organization to guide other individuals, teams, or entire organizations", "question": "Is Steve Carell's character on The Office portrayed as one with tremendous leadership skills?", "answer": false, "facts": [ "Steve Carell plays Michael Scott on The Office.", "Michael Scott is a clueless and naive character that is not meant to be seen as effective in his job as General Manager." ], "decomposition": [ "Who is Steve Carell's character on The Office?", "What are leadership skills?", "Does #1 possess #2?" ], "evidence": [ [ [ [ "Michael Scott (The Office)-1" ] ], [ [ "Leadership-1" ], "no_evidence" ], [ [ "Michael Scott (The Office)-20" ], "operation" ] ], [ [ [ "Steve Carell-9" ] ], [ [ "Skills management-7" ] ], [ [ "Skills management-7" ], "operation" ] ], [ [ [ "Steve Carell-9" ] ], [ [ "Michael Scott (The Office)-12" ] ], [ "operation" ] ] ] }, { "qid": "22642aa493b059c3b185", "term": "Christopher Nolan", "description": "British–American film director, screenwriter, and producer", "question": "Could Christopher Nolan's movies finance Cyprus's entire GDP?", "answer": false, "facts": [ "The films of Christopher Nolan have grossed around 4.7 billion at the box office.", "The GDP of Cyprus was 24.96 billion in 2018." ], "decomposition": [ "How much have Christopher Nolan films grossed?", "What is the GDP of Cypress?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Christopher Nolan-1" ] ], [ [ "Cyprus-81", "Cyprus-96" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Christopher Nolan-1" ] ], [ [ "Northern Cyprus-47", "Northern Cyprus-50" ] ], [ "operation" ] ], [ [ [ "Christopher Nolan-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "eb6aa79b52637aabc477", "term": "Subway (restaurant)", "description": "American fast food chain", "question": "Did Subway have a sex offender as a spokesperson?", "answer": true, "facts": [ "Jared Fogle was a national spokesman for the company in the US starting in January 2000.", "Jared Scott Fogle is a convicted sex offender. " ], "decomposition": [ "Who was the spokesman for Subway in January 2000?", "Has #1 ever been convicted of a sex crime?" ], "evidence": [ [ [ [ "Jared Fogle-2" ] ], [ [ "Jared Fogle-19" ] ] ], [ [ [ "Jared Fogle-1", "Jared Fogle-8" ] ], [ [ "Jared Fogle-3" ], "operation" ] ], [ [ [ "Jared Fogle-1" ] ], [ [ "Jared Fogle-1" ] ] ] ] }, { "qid": "7b7fa804c698d013d9a7", "term": "Zorro", "description": "Fictional character", "question": "Did Zorro carve his name into items regularly?", "answer": false, "facts": [ "Zorro was known for using his weapon to leave a mark wherever he went.", "The mark Zorro left was the first initial of his name and nothing more." ], "decomposition": [ "What mark did Zorro leave using his weapon?", "Is his name the answer in #1?" ], "evidence": [ [ [ [ "Zorro-2" ] ], [ "operation" ] ], [ [ [ "Zorro-2" ] ], [ "operation" ] ], [ [ [ "Zorro-18" ] ], [ "operation" ] ] ] }, { "qid": "1c0ccde98d448ffeedac", "term": "Yeti", "description": "Folkloric ape-like creature from Asia", "question": "Is there a Yeti associated with Disney theme parks?", "answer": true, "facts": [ "In the 1960s, an attraction called Matterhorn featuring a cartoon version of the Yeti opened in Disneyland.", "Later in 2005, Expedition Everest opened at Animal Kingdom in Disney World, featuring a much scarier version of the Yeti." ], "decomposition": [ "What 1960s attraction featured a cartoon version of the Yeti?", "Is #1 part of a Disney park?" ], "evidence": [ [ [ [ "Matterhorn Bobsleds-1", "Matterhorn Bobsleds-8" ] ], [ "operation" ] ], [ [ [ "Expedition Everest-12" ] ], [ [ "Disney's Animal Kingdom-1" ] ] ], [ [ [ "Matterhorn Bobsleds-23" ] ], [ [ "Disney's Animal Kingdom-1" ] ] ] ] }, { "qid": "df951faeafccf7c29be9", "term": "Football War", "description": "1969 War between Honduras and El Salvador", "question": "Did either side score a touchdown during the Football War?", "answer": false, "facts": [ "The Football War was a war in 1969 between Honduras and El Salvador", "The Football War was caused in part by rioting during a FIFA Cup qualifying match", "The FIFA Cup is a soccer tournament", "Touchdowns are scored in American football" ], "decomposition": [ "Which sport was involved as one of the causes of the Football War?", "Which sport are touchdowns scored in?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Association football-1", "FIFA-1", "Football War-1" ] ], [ [ "American football-1" ] ], [ "operation" ] ], [ [ [ "Football War-1" ] ], [ [ "Touchdown-1" ] ], [ "operation" ] ], [ [ [ "Football War-1" ] ], [ [ "Touchdown-1" ] ], [ "operation" ] ] ] }, { "qid": "6b83137493422db701bb", "term": "Grief", "description": "reaction to loss of someone or something close or important", "question": "Is grief always obvious when it is being experienced?", "answer": false, "facts": [ "Grief has no set external representation. ", "People who are grieving may laugh, cry, or even seem angry." ], "decomposition": [ "What are the ways a person may express their grief?", "Based on #1, can one always tell when someone is grieving?" ], "evidence": [ [ [ [ "Grief-10" ] ], [ [ "Grief-10" ] ] ], [ [ [ "Grief-1" ], "no_evidence" ], [ [ "Grief-59" ], "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "dce3695a2aaa8d7dba5a", "term": "Bill Nye", "description": "American science educator, comedian, television host, actor, writer, scientist and former mechanical engineer", "question": "Did Bill Nye vote for Franklin Delano Roosevelt?", "answer": false, "facts": [ "Bill Nye was born in 1955", "Franklin Delano Roosevelt's last election was in 1944" ], "decomposition": [ "When was the last time Franklin Delano Roosevelt contested in an election?", "When was Bill Nye born?", "Is #1 at least 18 years after #2?" ], "evidence": [ [ [ [ "Franklin D. Roosevelt-89" ] ], [ [ "Bill Nye-1" ] ], [ "operation" ] ], [ [ [ "1944 United States presidential election-1" ] ], [ [ "Bill Nye-1" ] ], [ "operation" ] ], [ [ [ "Franklin Delano Roosevelt Jr.-15" ] ], [ [ "Bill Nye-1" ] ], [ "operation" ] ] ] }, { "qid": "ba3cfe396d13f7a78786", "term": "Jack Dempsey", "description": "American boxer", "question": "Did Jack Dempsey have most title fight wins in either of his weight classes?", "answer": false, "facts": [ "Jack Dempsey competed as a heavyweight and a lightheavyweight.", "Jack Dempsey only had a handful of title defenses as heavyweight champion.", "Wladimir Klitschko had 25 heavyweight title fight wins.", "Jack Dempsey did not hold the lightheavyweight title.", "Dariusz Michalczewski had 23 lightheavyweight title fight wins." ], "decomposition": [ "What weight class did Jack Dempsey have title fight wins in?", "How many title fight wins did Jack Dempsey have in #1?", "How many title fight wins did Wladimir Klitschko have in #1?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Jack Dempsey-1" ] ], [ [ "Jack Dempsey-12", "Jack Dempsey-22", "Jack Dempsey-23", "Jack Dempsey-26", "Jack Dempsey-27", "Jack Dempsey-28" ] ], [ [ "Wladimir Klitschko-4" ] ], [ "operation" ] ], [ [ [ "Jack Dempsey-1" ] ], [ [ "Jack Dempsey-11" ], "no_evidence" ], [ [ "Heavyweight-6" ] ], [ "operation" ] ], [ [ [ "Jack Dempsey-1" ] ], [ [ "Jack Dempsey-12", "Jack Dempsey-23", "Jack Dempsey-24", "Jack Dempsey-28" ], "no_evidence" ], [ [ "Wladimir Klitschko-4" ] ], [ "operation" ] ] ] }, { "qid": "918f19d38c16251343fc", "term": "Subway (restaurant)", "description": "American fast food chain", "question": "Was Subway involved in a pedophilia scandal?", "answer": true, "facts": [ "In 2000, Jared Fogle became a national spokesman for Subway after he lost a lot of weight eating only Subway sandwiches.", "In 2015, Fogle was found guilty of child molestation and possession of child pornography, and Subway terminated its relationship with him." ], "decomposition": [ "Who was the famous spokesman for Subway?", "What crimes did #1 commit?", "Are #2 directly related to pedophilia?" ], "evidence": [ [ [ [ "Subway (restaurant)-30" ] ], [ [ "Jared Fogle-3" ] ], [ [ "Pedophilia-12" ], "operation" ] ], [ [ [ "Jared Fogle-1" ] ], [ [ "Jared Fogle-3" ] ], [ [ "Child sex tourism-1", "Pedophilia-1" ] ] ], [ [ [ "Jared Fogle-6" ] ], [ [ "Jared Fogle-25" ] ], [ [ "Jared Fogle-25" ] ] ] ] }, { "qid": "423f9d3f282fe8cb087b", "term": "Earth's magnetic field", "description": "Magnetic field that extends from the Earth’s inner core to where it meets the solar wind", "question": "Are implants from an ORIF surgery affected by the magnetic field of the Earth?", "answer": false, "facts": [ "An ORIF surgery is an Open Reduction Internal Fixation, done to fix broken bones.", "Most hardware from ORIF surgeries is made of titanium.", "Titanium is only slightly magnetic and does not affect metal detectors." ], "decomposition": [ "What kind of materials can be significantly affected by the earth's magnetic field?", "What kind of materials are ORIF surgery implants made of?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Magnetic mineralogy-4" ], "no_evidence" ], [ [ "Internal fixation-1" ] ], [ "operation" ] ], [ [ [ "Earth's magnetic field-9" ], "no_evidence" ], [ [ "Internal fixation-1" ] ], [ [ "Titanium-7" ], "operation" ] ], [ [ [ "Ferromagnetism-1", "Magnetic field-90" ], "no_evidence" ], [ [ "Internal fixation-1", "Internal fixation-3" ] ], [ [ "Stainless steel-1" ] ] ] ] }, { "qid": "e2affa461f20218463a4", "term": "Romani people", "description": "Ethnic group living mostly in Europe and the Americas", "question": "Is the use of the word Gypsy by non-Romani people considered okay?", "answer": false, "facts": [ "'Gypsy' is considered a slur in the Americas by Romani people.", "Lady Gaga has faced online criticism regarding her use of the word 'Gypsy' as the title and lyrics of a song." ], "decomposition": [ "What kind of word is Gypsy considered in the Americas by Romani people?", "Would using #1 types of words be considered okay?" ], "evidence": [ [ [ [ "Names of the Romani people-11" ], "no_evidence" ], [ [ "Antiziganism-1" ] ] ], [ [ [ "Names of the Romani people-11" ] ], [ "operation" ] ], [ [ [ "Names of the Romani people-11" ] ], [ [ "Pejorative-1" ], "operation" ] ] ] }, { "qid": "703738f478e1ed466b59", "term": "Banana", "description": "edible fruit", "question": "Can a banana get a virus?", "answer": true, "facts": [ "A virus is a disease that is caused by infectious agents.", "A banana comes from a banana plant.", "Blight is a disease that is caused by infections on plants.", "The Banana bunchy top virus (BBTV) is a plant virus of the genus Babuvirus,that causes diseased streaks." ], "decomposition": [ "What are the various diseases that affect banana plant?", "Are any of #1 caused by virus?" ], "evidence": [ [ [ [ "Banana bunchy top virus-1" ] ], [ [ "Nanoviridae-1" ], "operation" ] ], [ [ [ "Banana-54", "Banana-55" ] ], [ [ "Banana bunchy top virus-6" ] ] ], [ [ [ "Banana-50" ] ], [ "operation" ] ] ] }, { "qid": "ccf67586446bcc4a86b3", "term": "E.T. the Extra-Terrestrial", "description": "1982 American science fiction film directed by Steven Spielberg", "question": "Would E.T. the Extra-Terrestrial alien hypothetically love Friendly's?", "answer": true, "facts": [ "E.T., the main alien from E.T. the Extra-Terrestrial, loved Reese's Pieces candy.", "Friendly's is a restaurant that serves dinner entrees and ice cream dishes.", "Friendly's has several desserts with Reese's candy including the Reese's Peanut Butter Cup Sundae, and Reese's Pieces Sundae." ], "decomposition": [ "What is E.T. the Extra-Terrestrial's favorite food?", "Does Friendly's serve dishes made with #1?" ], "evidence": [ [ [ [ "E.T. the Extra-Terrestrial-6" ] ], [ [ "Friendly's-4", "Reese's Pieces-1" ] ] ], [ [ [ "E.T. the Extra-Terrestrial-6" ] ], [ [ "Friendly's-1" ], "no_evidence", "operation" ] ], [ [ [ "E.T. the Extra-Terrestrial-6" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2aba85e0e138ad86035e", "term": "Sugar Ray Robinson", "description": "American boxer", "question": "Could Sugar Ray Robinson box if he stole in Iran?", "answer": false, "facts": [ "Sugar Ray Robinson was an American boxer who relied on his fists to achieve 109 KO victories.", "The penalty for stealing in Iran is having your hand cut off.", "In August 2015 a prisoner in Iran, who was caught stealing, had his right hand and part of his left leg cut off." ], "decomposition": [ "What body part does Iran cut off if someone steals?", "What body part is necessary for boxing?", "Is #1 different from #2?" ], "evidence": [ [ [ [ "Hudud-26" ] ], [ [ "Boxing-1" ] ], [ "operation" ] ], [ [ [ "Guardian Council-2", "Sharia-80" ], "no_evidence" ], [ [ "Boxing-1", "Boxing-4" ] ], [ "operation" ] ], [ [ [ "Hudud-9" ], "no_evidence" ], [ [ "Boxing-4" ], "operation" ], [ "operation" ] ] ] }, { "qid": "5e2d9519c7c521411817", "term": "Pizza", "description": "Usually savory dish of flattened bread and toppings", "question": "Would a TMNT coloring book have pizza in it?", "answer": true, "facts": [ "TMNT is an abbreviation for 'Teenage Mutant Ninja Turtles'.", "The Teenage Mutant Ninja Turtles canonically only ever ate pizza in the animated series. " ], "decomposition": [ "What cartoon does TMNT stand for?", "In the animated series, did #1 canonically eat only pizza?" ], "evidence": [ [ [ [ "Teenage Mutant Ninja Turtles-1" ] ], [ [ "Teenage Mutant Ninja Turtles-9" ], "operation" ] ], [ [ [ "Teenage Mutant Ninja Turtles-1" ] ], [ [ "Teenage Mutant Ninja Turtles-19" ], "operation" ] ], [ [ [ "Teenage Mutant Ninja Turtles-1" ] ], [ [ "Teenage Mutant Ninja Turtles-19" ] ] ] ] }, { "qid": "a6f593e93ca87c53a0a1", "term": "Crucifixion", "description": "Method of capital punishment in which the victim is tied or nailed to a large wooden beam and left to hang until eventual death", "question": "Does crucifixion violate US eighth amendment?", "answer": true, "facts": [ "The eighth amendment prohibits cruel and unusual punishment.", "Crucifixion was particularly barbaric as people do not die instantly and live for several days." ], "decomposition": [ "What does the Eighth Amendment say about punishment measures?", "What are the features of crucifixion as a method of punishment?", "Is #1 contradicted by #2?" ], "evidence": [ [ [ [ "Eighth Amendment to the United States Constitution-1" ] ], [ [ "Crucifixion-1" ] ], [ "operation" ] ], [ [ [ "Eighth Amendment to the United States Constitution-1" ] ], [ [ "Crucifixion-1" ] ], [ [ "Crucifixion-1", "Eighth Amendment to the United States Constitution-1" ], "operation" ] ], [ [ [ "United States constitutional sentencing law-4" ] ], [ [ "Cruel and unusual punishment-6" ] ], [ "operation" ] ] ] }, { "qid": "565c68b8f343711ff7dd", "term": "Stork", "description": "family of birds", "question": "Should you wrap a gift for a mother of a stillborn in stork wrapping paper?", "answer": false, "facts": [ "Storks are used as a symbol of a new baby on the way.", "Stillborn babies are those who are born lifeless." ], "decomposition": [ "What is the significance of stork to a potential mother?", "Can stillborn babies be regarded as #1?" ], "evidence": [ [ [ [ "White stork-46" ] ], [ [ "Stillbirth-1" ], "operation" ] ], [ [ [ "White stork-46" ] ], [ "operation" ] ], [ [ [ "White stork-47" ] ], [ [ "Stillbirth-18" ], "operation" ] ] ] }, { "qid": "aefa59c255bf15e90f58", "term": "Naruto", "description": "Japanese manga and anime series", "question": "Would the historic Hattori Hanzō admire Naruto?", "answer": false, "facts": [ "Naruto is a ninja", "Ninja tactics were considered dishonorable by samurai", "Hattori Hanzō is a famous historical samurai " ], "decomposition": [ "What was Naruto's profession?", "What was Hattori Hanzō's profession? ", "Did #2 respect the actions of #1?" ], "evidence": [ [ [ [ "Naruto-1" ] ], [ [ "Hattori Hanzō-1" ] ], [ [ "Hattori Hanzō-6" ], "no_evidence", "operation" ] ], [ [ [ "Naruto-1" ] ], [ [ "Hattori Hanzō-1" ] ], [ [ "Ninja-1" ], "operation" ] ], [ [ [ "Naruto-7" ], "no_evidence" ], [ [ "Hattori Hanzō-10" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "6ad52c1f34bebab2cbba", "term": "Uniting Church in Australia", "description": "christian denomination", "question": "Was Muhammed a member of the Uniting Church in Australia?", "answer": false, "facts": [ "The Uniting Church in Australia is a combination of Methodist and Presbyterian congregations.", "Methodists and Presbyterians are Christians.", "Muhammed was the Muslim prophet and was not a Christian." ], "decomposition": [ "Which religion was Muhammed a prophet in?", "What is the religion of the members of the Uniting Church in Australia?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Last prophet-2" ] ], [ [ "Uniting Church in Australia-23" ] ], [ "operation" ] ], [ [ [ "Muhammad-1" ] ], [ [ "Uniting Church in Australia-1" ] ], [ "operation" ] ], [ [ [ "Muhammad-1" ] ], [ [ "Uniting Church in Australia-1" ] ], [ "operation" ] ] ] }, { "qid": "6de352d79466276a1d06", "term": "Sand cat", "description": "Small wild cat", "question": "Do sand cats avoid eating all of the prey of eels?", "answer": false, "facts": [ "Sand cats eat a number of animals including insects, birds, hares, and reptiles.", "Eels prey on fish, worms, frogs, and lizards.", "Lizards are a type of reptile." ], "decomposition": [ "What does the sand cat's diet consist of?", "What does the eel's diet consist of?", "Are any of the foods in #2 a subtype of a food in #1?", "Is it not the case that #3 is \"yes\"?" ], "evidence": [ [ [ [ "Sand cat-48", "Sand cat-49", "Sand cat-51" ] ], [ [ "American eel-4" ] ], [ [ "Invertebrate-1" ], "operation" ], [ "operation" ] ], [ [ [ "Sand cat-48", "Sand cat-49", "Sand cat-50" ] ], [ [ "Electric eel-18", "Moray eel-8" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Sand cat-48" ] ], [ [ "Eel-1", "European conger-4" ], "no_evidence" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "97cf5c63fb530d9fc492", "term": "Paul the Apostle", "description": "Early Christian apostle and missionary", "question": "Did Paul the Apostle's cause of death violate the tenets of Ahimsa?", "answer": true, "facts": [ "Ahimsa is is an ancient Indian principle of nonviolence which applies to all living beings. ", "Ahimsa is a key virtue in Hinduism, Buddhism and Jainism.", "Paul the Apostle was violently beheaded." ], "decomposition": [ "What is Ahimsa?", "Does #1 believe in non-violence?", "Did Paul the Apostle die due to violence?", "Are #2 and #3 the same answer?" ], "evidence": [ [ [ [ "Ahiṃsā-1" ] ], [ [ "Ahiṃsā-1" ] ], [ [ "Decapitation-1", "Paul the Apostle-53", "Violence-1" ] ], [ "operation" ] ], [ [ [ "Ahiṃsā-1" ] ], [ "operation" ], [ [ "Paul the Apostle-53" ] ], [ "operation" ] ], [ [ [ "Ahiṃsā-1" ] ], [ "operation" ], [ [ "Paul the Apostle-53", "Paul the Apostle-54" ] ], [ "operation" ] ] ] }, { "qid": "db6ede282f42b088fef7", "term": "Sophist", "description": "Specific kind of teacher in both Ancient Greece and in the Roman Empire", "question": "Would a sophist use an épée?", "answer": false, "facts": [ "A sophist is a specific kind of teacher in ancient Greece, in the fifth and fourth centuries BC.", "Sophists specialized in using the tools of philosophy and rhetoric, though other sophists taught subjects such as music, athletics and mathematics.", "An épée is a sword used in fencing.", "The épée was not developed until the 19th century." ], "decomposition": [ "How long ago were the sophists around?", "How long ago was the epee developed?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Sophist-1" ] ], [ [ "Épée-15" ] ], [ "operation" ] ], [ [ [ "Sophist-1" ] ], [ [ "Épée-1" ] ], [ "operation" ] ], [ [ [ "Sophist-1" ] ], [ [ "Épée-16" ] ], [ "operation" ] ] ] }, { "qid": "e93c7827b7789c989e99", "term": "The Powerpuff Girls", "description": "American animated television series", "question": "In most Mennonite homes, would children know of The Powerpuff Girls?", "answer": false, "facts": [ "Mennonites are a religious with similar beliefs to Amish groups.", "Mennonites do not prohibit or view the use of technology as a sin.", "Most Mennonites avoid using television sets at home." ], "decomposition": [ "On what devices can one watch The Powerpuff Girls?", "What modern items do Mennonites prohibit themselves from using?", "Is #1 different from #2?" ], "evidence": [ [ [ [ "The Powerpuff Girls-1" ] ], [ [ "Mennonites-59" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Powerpuff Girls-1" ] ], [ [ "Mennonites-59" ] ], [ "operation" ] ], [ [ [ "Mennonites-59" ] ], [ [ "Mennonites-59" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "d4b9a903cd99f477a219", "term": "Athena", "description": "ancient Greek goddess of wisdom and war", "question": "Is Freya a combination of Athena and Aphrodite?", "answer": true, "facts": [ "Athena was the Greek goddess of war.", "Aphrodite was the Greek goddess of love.", "Freya was the Norse goddess of war, love, and fertility." ], "decomposition": [ "What was Athena the Greek goddess of?", "What was Aphrodite's specialty?", "What was Freya the Norse goddess of?", "Does #3 have part of the same answer as #1 and #2?" ], "evidence": [ [ [ [ "Athena-1" ] ], [ [ "Aphrodite-1" ] ], [ [ "Freyja-1" ] ], [ "no_evidence" ] ], [ [ [ "Athena-1" ] ], [ [ "Aphrodite-1" ] ], [ [ "Freyja-1" ] ], [ "operation" ] ], [ [ [ "Athena-1" ] ], [ [ "Aphrodite-1" ] ], [ [ "Freyja-1" ] ], [ "operation" ] ] ] }, { "qid": "7aa52a80b601b46bd48f", "term": "September", "description": "ninth month in the Julian and Gregorian calendars", "question": "Could you brew beer from start to finish in the month of September?", "answer": false, "facts": [ "Brewing a batch of beer takes at least 5 weeks.", "There are 30 days, or 4 1/2 weeks, in the month of September." ], "decomposition": [ "How long does it take to brew a batch of beer?", "How many weeks does September have?", "Is #2 longer than #1?" ], "evidence": [ [ [ [ "Brewing-36" ] ], [ [ "September-1" ] ], [ "operation" ] ], [ [ [ "Beer-45" ], "no_evidence" ], [ [ "September-1" ] ], [ "operation" ] ], [ [ [ "Beer-19" ] ], [ [ "September-3" ] ], [ "operation" ] ] ] }, { "qid": "14a891550bef9ab64ef8", "term": "Foot (unit)", "description": "customary unit of length", "question": "When en route from China to France, must pilots know their altitude in the imperial foot?", "answer": true, "facts": [ "Most international airports and aviators use the foot to measure altitude ", "China and North Korea require pilots to use meters for altitude", "Pilots must communicate their altitude with local air traffic control " ], "decomposition": [ "Which unit of altitude does France require pilots to use?", "Which unit of altitude does China require pilots to use?", "Is #1 or #2 the imperial foot?" ], "evidence": [ [ [ [ "Foot (unit)-2" ] ], [ [ "Foot (unit)-2" ] ], [ "operation" ] ], [ [ [ "Foot (unit)-26" ], "no_evidence" ], [ [ "Foot (unit)-3" ] ], [ "operation" ] ], [ [ [ "Foot (unit)-3" ], "no_evidence" ], [ [ "Foot (unit)-3" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "5b0a4c75f55575ae7e83", "term": "Lieutenant", "description": "junior commissioned officer in many nations' armed forces", "question": "Would Gomer Pyle salute a lieutenant?", "answer": true, "facts": [ "Gomer Pyle was a character on a television sitcom", "Pyle was in the US Marine Corp", "Lieutenants are junior commissioned officers in the USMC", "Marine custom dictates that officers are to be saluted by other Marines" ], "decomposition": [ "Which arm of the US Armed Forces did Gomer Pyle join?", "What was his rank in the #1?", "According to #1 tradition. would a #2 salute a lieutenant?" ], "evidence": [ [ [ [ "Gomer Pyle-4" ] ], [ [ "Private first class-8" ] ], [ [ "Lieutenant-1" ], "operation" ] ], [ [ [ "Gomer Pyle, U.S.M.C.-2" ] ], [ [ "Gomer Pyle, U.S.M.C.-12" ] ], [ [ "Private (rank)-1" ] ] ], [ [ [ "Gomer Pyle-7" ] ], [ [ "Gomer Pyle-8" ] ], [ [ "Lieutenant-12", "Salute-57" ], "no_evidence" ] ] ] }, { "qid": "f4a731f739533c52becd", "term": "Prime Minister of the United Kingdom", "description": "Head of UK Government", "question": "Does the Prime Minister of the United Kingdom have poor job security?", "answer": true, "facts": [ "The Prime Minister of the United Kingdom is an elected official.", "Elected officials can be recalled with a vote of no confidence in UK parliament. " ], "decomposition": [ "How does the prime minister of UK get his position?", "Can people who are #1 be easily removed?" ], "evidence": [ [ [ [ "Prime Minister of the United Kingdom-60" ] ], [ [ "Recall election-1" ] ] ], [ [ "no_evidence" ], [ [ "House of Commons of the United Kingdom-8" ] ] ], [ [ [ "Prime minister-25" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "a076d2cbd7e4eafad35b", "term": "The Jungle Book", "description": "1894 children's book by Rudyard Kipling", "question": "Does The Jungle Book contain racist subtext?", "answer": true, "facts": [ "Baloo, the father character in The Jungle Book, refers to the money characters as \"flat-nosed flaky creeps\" mocking a common black feature feature.", "The antagonist snake character was made to sound like an Indian mag, and was said to hate men.", "King Louie is viewed socially as a racist archetype of a black man." ], "decomposition": [ "What term did Baloo use to refer to the monkey characters in \"The Jungle Book\"?", "What sentiment did the antagonistic snake express towards men?", "Do #1 and #2 have racist connotation?" ], "evidence": [ [ [ [ "Bandar-log-1" ] ], [ [ "Kaa-3" ] ], [ [ "Bandar-log-1" ], "no_evidence" ] ], [ [ [ "The Jungle Book-1" ], "no_evidence" ], [ [ "The Jungle Book (2016 film)-6" ], "no_evidence" ], [ [ "The Jungle Book (1967 film)-26" ], "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ [ "The Jungle Book-18" ], "no_evidence", "operation" ] ] ] }, { "qid": "dc2da0d915bc9d327b9a", "term": "Ginger", "description": "Species of plant", "question": "If you're pregnant, might you be recommended ginger?", "answer": true, "facts": [ "Pregnancy often causes nausea and stomach upset.", "Small doses of ginger have been shown to help with vomiting and nausea.", "Doctors say that, in small doses, ginger is safe for pregnant women." ], "decomposition": [ "What symptoms can ginger help alleviate?", "Do pregnant women suffer from any symptoms in #1?" ], "evidence": [ [ [ [ "Ginger-36" ] ], [ [ "Morning sickness-1" ] ] ], [ [ [ "Ginger-36" ] ], [ [ "Pregnancy-1" ], "operation" ] ], [ [ [ "Jamaica ginger-2" ] ], [ [ "Signs and symptoms of pregnancy-15" ], "operation" ] ] ] }, { "qid": "a1db94948b252250102b", "term": "History of the world", "description": "Recorded history of humanity", "question": "Are the events of Star Trek: The Next Generation in the history of the world?", "answer": false, "facts": [ "The history of the world includes factual events.", "Star Trek: TNG is a fictional television show. " ], "decomposition": [ "Which universe is Star Trek: The Next Generation set in?", "Is #1 the same as the real world?" ], "evidence": [ [ [ [ "Star Trek: The Next Generation-1" ] ], [ "no_evidence" ] ], [ [ [ "Star Trek: The Next Generation-1" ] ], [ [ "Science fiction-1" ], "operation" ] ], [ [ [ "Star Trek: The Next Generation-70" ] ], [ "operation" ] ] ] }, { "qid": "9885d9bb4506cdf4f2cd", "term": "Cuisine of Hawaii", "description": "Cuisine of Hawaii", "question": "Is pig meat considered inedible within the cuisine of Hawaii?", "answer": false, "facts": [ "SPAM is a pork and ham product that is very popular around the world.", "In the cuisine of Hawaii, SPAM is a cherished and widely used ingredient." ], "decomposition": [ "What are some popular dishes in Hawaiian cuisine?", "Is pork (pig meat) excluded from #1?" ], "evidence": [ [ [ [ "Cuisine of Hawaii-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Cuisine of Hawaii-2" ] ], [ [ "Spam (food)-1", "Spam musubi-1" ] ] ], [ [ [ "Kālua-1" ] ], [ [ "Kālua-3" ], "operation" ] ] ] }, { "qid": "02435ed89673ef2eb808", "term": "Hanging", "description": "execution or suicide method involving suspension of a person by a ligature", "question": "Is hanging a viable execution method on a ship at sea?", "answer": true, "facts": [ "Hanging is typically set up using rope.", "Ships have plenty of rope on board because their operation relies heavily on rope." ], "decomposition": [ "What materials are necessary for hanging?", "Would #1 be available on a ship?" ], "evidence": [ [ [ [ "Gallows-1" ] ], [ "operation" ] ], [ [ [ "Hanging-4" ] ], [ [ "Rigging-1" ] ] ], [ [ [ "Hanging-15" ], "operation" ], [ "operation" ] ] ] }, { "qid": "3e1f787a59396deeb88c", "term": "Southern United States", "description": "Cultural region of the United States", "question": "Can you hunt Iberian wolves in the Southern United States?", "answer": false, "facts": [ "The Iberian wolf inhabits northern Portugal and northwestern Spain.", "Portugal and Spain are not located in the Southern United States." ], "decomposition": [ "What is the range of the Iberian wolf?", "Is #1 located in the Southern United States?" ], "evidence": [ [ [ [ "Iberian wolf-1" ] ], [ "operation" ] ], [ [ [ "Iberian wolf-1" ] ], [ [ "United States-1", "Western Europe-1" ] ] ], [ [ [ "Iberian wolf-1" ] ], [ [ "Iberian Peninsula-64", "United States-1" ] ] ] ] }, { "qid": "7fa631340ce8c42aba53", "term": "1980 United States presidential election", "description": "49th quadrennial presidential election in the United States", "question": "Were there greater landslides than 1980 United States presidential election?", "answer": true, "facts": [ "A landslide refers to a competitor beating their opponent by a wide margin.", "Ronald Reagan defeated Jimmy carter in the 1980 United States presidential election by around 8 million votes.", "Franklin D. Roosevelt won the 1936 United States presidential election over Alf Landon by more than 11 million votes.", "In 1804 Thomas Jefferson received 162 (92%) of the electoral votes while Charles Cotesworth Pinckney received only 14 (8%)." ], "decomposition": [ "By what votes margin did Ronald Reagan defeat Jimmy Carter in the 1980 US Presidential election?", "By how many votes was Franklin D. Roosevelt leading Alf Landon in the 1936 US Presidential election?", "How many more votes did Thomas Jefferson receive than Charles Cotesworth Pinckney in the 1804 United States presidential election?", "Are #2 and #3 greater individually than #1?" ], "evidence": [ [ [ [ "Ronald Reagan-50" ] ], [ [ "Franklin D. Roosevelt-52" ] ], [ [ "Thomas Jefferson-73" ], "no_evidence" ], [ "operation" ] ], [ [ [ "1980 United States presidential election-50" ] ], [ [ "1936 United States presidential election-4" ] ], [ [ "1804 United States presidential election-3", "Thomas Jefferson-73" ] ], [ "operation" ] ], [ [ [ "1980 United States presidential election-4" ] ], [ [ "1936 United States presidential election-4" ] ], [ [ "1804 United States presidential election-3" ] ], [ "operation" ] ] ] }, { "qid": "3b9f0e80d5929f447166", "term": "Swastika", "description": "a geometrical figure and an ancient religious icon in the cultures of Eurasia and 20th-century symbol of Nazism", "question": "Does the word swastika have meaning in sanskrit?", "answer": true, "facts": [ "In Sanskrit, the word swastika is a combination of ‘su’ (meaning ‘good’) and ‘asti’ (meaning ‘to exist’)", "This meaning of swastika from Sanskrit this gets translated as ‘all is well.’ " ], "decomposition": [ "Was the word swastika derived from a Sanskrit word?" ], "evidence": [ [ [ [ "Swastika-9" ] ] ], [ [ [ "Swastika-3" ], "operation" ] ], [ [ [ "Swastika-3" ], "operation" ] ] ] }, { "qid": "e2608700f4055492cdc3", "term": "Gothenburg", "description": "City in Västergötland and Bohuslän, Sweden", "question": "Could the Toyota Stadium sit a tenth of the population of Gotheburg?", "answer": false, "facts": [ "The Toyota Stadium seats 45,000 people", "Gothenburg has a population of over five hundred thousand" ], "decomposition": [ "How many people can the Toyota Stadium sit?", "What is the population of Gothenburg?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Toyota Stadium (Texas)-1", "Toyota Stadium-1" ] ], [ [ "Gothenburg-1" ] ], [ "operation" ] ], [ [ [ "Toyota Stadium-1" ] ], [ [ "Gothenburg-17" ] ], [ "operation" ] ], [ [ [ "Toyota Stadium-1" ] ], [ [ "Gothenburg-1" ] ], [ "operation" ] ] ] }, { "qid": "e374088f76c6618c3459", "term": "Portuguese Colonial War", "description": "1961–1974 armed conflicts in Africa between Portugal and independence movements", "question": "Do all of the African regions that participated in the Portugese Colonial War share an official language?", "answer": true, "facts": [ " The current African nations of Angola, Guinea-Bissau and Mozambique participated in the the Portugese Colonial War.", "The Portugese Colonial War was a decisive struggle in Lusophone Africa.", "Lusaphone countries are those that include Portugese as an official language." ], "decomposition": [ "Which African nations participated in the Portuguese Colonial War?", "Which African region did all of #1 belong to?", "Do all nations in #2 share official language?" ], "evidence": [ [ [ [ "Portuguese Colonial War-1" ], "no_evidence" ], [ [ "Portuguese Colonial War-1" ] ], [ [ "Lusophone-1" ] ] ], [ [ [ "Portuguese Colonial War-24" ] ], [ [ "Southern Africa-2" ] ], [ [ "Swahili language-1" ] ] ], [ [ [ "Portuguese Colonial War-2" ] ], [ [ "Sub-Saharan Africa-1" ], "no_evidence" ], [ [ "Portuguese Angola-36", "Portuguese Guinea-4", "Portuguese Mozambique-55" ], "no_evidence", "operation" ] ] ] }, { "qid": "7c74e1234d292f83fbba", "term": "Land of Israel", "description": "Traditional Jewish name for an area of indefinite geographical extension in the Southern Levant.", "question": "Was Land of Israel in possession of an Islamic empire in 16th century?", "answer": true, "facts": [ "Land of Israel was controlled by the Ottoman Empire in 16th century. ", "The religion of Ottoman Empire was Sunni Islam. " ], "decomposition": [ "Who ruled the geographic region of Israel in the 16th century?", "Was Islam the state religion of #1?" ], "evidence": [ [ [ [ "Israel-23", "Palestine (region)-20" ] ], [ [ "Ottoman Empire-93" ], "operation" ] ], [ [ [ "Israel-22" ] ], [ [ "Ottoman Empire-96" ], "no_evidence" ] ], [ [ [ "Israel-22" ] ], [ [ "Ottoman Empire-96" ], "operation" ] ] ] }, { "qid": "a11734fceb222d0629c3", "term": "Mount Emei", "description": "mountain", "question": "Can a Liebherr LTM 11200-9.1 hypothetically lift Mount Emei?", "answer": true, "facts": [ "Mount Emei is a 70 ton mountain located in China.", "The Liebherr LTM 11200-9.1 is the world's strongest crane that can lift 1200 tons." ], "decomposition": [ "How much does Mount Emei weigh?", "How much can a Liebherr LTM 11200-9.1 lift?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Mount Emei-2" ], "no_evidence" ], [ [ "Crane (machine)-43" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Mount Emei-1" ], "no_evidence" ], [ [ "Liebherr Group-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "7a90cbd4a6c6f4c78f31", "term": "Solo (music)", "description": "musical piece or part of musical piece performed by a single musician", "question": "Can the Department of Defense perform a solo?", "answer": false, "facts": [ "A solo is the part of a musical piece performed by a single musician", "The Department of Defense is a US government agency composed of many individuals and unrelated to music" ], "decomposition": [ "How many are part of a solo performance? ", "What is the Department of Defense?", "How many people are part of #2?", "Is #1 the same as #3?" ], "evidence": [ [ [ [ "First solo flight-1" ] ], [ [ "United States Department of Defense-1" ] ], [ [ "United States Department of Defense-1" ] ], [ "operation" ] ], [ [ [ "Solo (music)-1" ] ], [ [ "United States Department of Defense-1" ] ], [ [ "United States Department of Defense-1" ] ], [ "operation" ] ], [ [ [ "Solo (music)-1" ] ], [ [ "United States Department of Defense-2" ] ], [ [ "United States Department of Defense-1" ] ], [ "operation" ] ] ] }, { "qid": "b5fbec7f7dd187a2746e", "term": "Bumblebee", "description": "genus of insects", "question": "Does a bumblebee have to worry about spider veins?", "answer": false, "facts": [ "Spider veins is a condition in which the veins become inflamed.", "Bumblebees have a free flowing blood system and do not have veins or arteries." ], "decomposition": [ "What anatomical features are necessary for a being to have spider veins?", "Do bumblebees have #1?" ], "evidence": [ [ [ [ "Telangiectasia-1" ] ], [ [ "Drone (bee)-7" ], "no_evidence", "operation" ] ], [ [ [ "Telangiectasia-1" ] ], [ [ "Blood vessel-1", "Insect physiology-12" ], "operation" ] ], [ [ [ "Skin-1", "Telangiectasia-1" ] ], [ [ "Arthropods in culture-1", "Invertebrate-1" ], "operation" ] ] ] }, { "qid": "7dc74fd9b62e514750c2", "term": "Bee", "description": "Clade of insects", "question": "Are queen bees unnecessary for growing apples?", "answer": true, "facts": [ "Mason bees are solitary (they live alone)", "Mason bees are efficient pollinators for orchards", "Apple trees are grown in orchards" ], "decomposition": [ "What is the social structure of Mason bees?", "Where are Mason bees recognized as efficient pollinators?", "Is #1 needless of a queen, and are apples grown in #2?" ], "evidence": [ [ [ [ "Bee-27" ] ], [ [ "Mason bee-2", "Orchard-7" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Mason bee-5" ] ], [ [ "Osmia lignaria-8" ] ], [ [ "Apple-38", "Mason bee-5" ] ] ], [ [ [ "Mason bee-1" ] ], [ [ "Mason bee-11" ], "no_evidence" ], [ [ "Apple-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "fca6bb785830b76c0c7c", "term": "Law & Order", "description": "original television series (1990-2010)", "question": "Can you taste Law & Order?", "answer": false, "facts": [ "Law & Order is a television show.", "Television shows cannot be tasted, only viewed. " ], "decomposition": [ "What is Law & Order?", "Is the answer to #1 something that can be tasted?" ], "evidence": [ [ [ [ "Law & Order-2" ] ], [ [ "Taste-1" ] ] ], [ [ [ "Law & Order (franchise)-1" ] ], [ "operation" ] ], [ [ [ "Law & Order-1" ] ], [ [ "Taste-1", "Television show-30" ] ] ] ] }, { "qid": "43302ebc930b722c112b", "term": "Osama bin Laden", "description": "Co-founder of al-Qaeda", "question": "Does Osama bin Laden put a wafer on his tongue every Sunday?", "answer": false, "facts": [ "Osama bin Laden was an Islamic fundamentalist", "The practice of putting a wafer on your tongue is called Communion", "Communion is a Christian religious practice", "Christians commonly attend religious services on Sunday" ], "decomposition": [ "What is the practice of putting a wafer on your tongue called?", "What religion practices #1 on Sundays?", "Does Osama bin Laden practice #2?" ], "evidence": [ [ [ [ "Eucharist-1" ] ], [ [ "Christianity-37" ] ], [ [ "Osama bin Laden-1" ] ] ], [ [ [ "Eucharist-65" ] ], [ [ "Eucharist-1" ] ], [ [ "Osama bin Laden-1" ], "operation" ] ], [ [ [ "Eucharist-95" ] ], [ [ "Eucharist-1" ] ], [ [ "Osama bin Laden-10" ], "operation" ] ] ] }, { "qid": "75cb50e207fe81886cfc", "term": "Easy Rider", "description": "1969 film by Dennis Hopper", "question": "Will the producer of Easy Rider become an octogenarian in 2021?", "answer": false, "facts": [ "The producer of Easy Rider was Peter Fonda.", "Peter Fonda died in 2019 at the age of 79.", "An octogenarian is someone who is between 80 and 89 years old and is still alive." ], "decomposition": [ "Who produced Easy Rider?", "What characteristics does someone need to be considered an octogenarian?", "What characteristics does #1 have?", "Are all the characteristics in #2 also in #3?" ], "evidence": [ [ [ [ "Easy Rider-1" ] ], [ [ "Illustrations of the rule against perpetuities-2" ] ], [ [ "Peter Fonda-1" ] ], [ "operation" ] ], [ [ [ "Easy Rider-1" ] ], [ "no_evidence" ], [ [ "Peter Fonda-1" ] ], [ "operation" ] ], [ [ [ "Easy Rider-1" ] ], [ [ "Ageing-46" ], "no_evidence" ], [ [ "Peter Fonda-58" ] ], [ "operation" ] ] ] }, { "qid": "f23d09094b35ceab6d54", "term": "Blueberry", "description": "section of plants", "question": "Was the Treaty of Versailles settled over blueberry scones?", "answer": false, "facts": [ "Blueberries are native to North America.", "Blueberries did not come to Europe until the 1930's.", "The treaty of Versailles was made effective in 1920. " ], "decomposition": [ "Where was the The Treaty of Versailles settled?", "When did blueberries first go over to #1?", "When was The Treaty of Versailles settled?", "Did #2 occur before #3?" ], "evidence": [ [ [ [ "Paris Peace Conference (1919–1920)-1" ] ], [ [ "Blueberry-33" ], "no_evidence" ], [ [ "Treaty of Versailles-1" ] ], [ "operation" ] ], [ [ [ "Paris-1", "Treaty of Versailles-1", "Versailles, Yvelines-1" ] ], [ [ "Blueberry-33" ] ], [ [ "Treaty of Versailles-1" ] ], [ "operation" ] ], [ [ [ "Treaty of Versailles-7" ] ], [ "no_evidence" ], [ [ "Treaty of Versailles-7" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "341375fa1c7cb93278f5", "term": "Zucchini", "description": "Edible summer squash, typically green in color", "question": "Would 7 zucchini's satisfy potassium USDA daily recommendation?", "answer": true, "facts": [ "The USDA recommends at least 3500 mg of potassium a day.", "One zucchini has 512 mg of potassium." ], "decomposition": [ "How much potassium is in a zucchini?", "How much potassium does the USDA recommend daily?", "Would seven times #1 be more than #2?" ], "evidence": [ [ [ [ "Zucchini-27" ], "no_evidence" ], [ [ "Dietary Reference Intake-10" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Zucchini-27" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Potassium-46" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "57cace65de55cbb6b8d4", "term": "Colitis", "description": "inflammation of the colon or the large intestine", "question": "Is it best to avoid kola nuts with colitis?", "answer": true, "facts": [ "Colitis is a disease in which the colon becomes inflamed.", "Many things can trigger colitis, including dairy, alcohol, and caffeine.", "The kola nut is the fruit of the tropical cola tree that contains caffeine inside." ], "decomposition": [ "What triggers colitis? ", "Are any of the triggers in #1 present in the kola nut?" ], "evidence": [ [ [ [ "Colitis-17" ], "no_evidence" ], [ [ "Kola nut-2" ], "operation" ] ], [ [ [ "Colitis-10" ], "no_evidence" ], [ [ "Kola nut-1" ], "no_evidence" ] ], [ [ [ "Colitis-3" ], "no_evidence" ], [ [ "Kola nut-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "3b762e848e0e43f2e97a", "term": "Donald Duck", "description": "Disney cartoon character", "question": "Would Donald Duck be allowed into most grocery stores?", "answer": false, "facts": [ "Donald Duck is known for not wearing any pants or shoes.", "Most grocery stores have a \"No shoes, No Shirt, No Service\" policy." ], "decomposition": [ "What is Donald Duck known for not wearing?", "Would most grocery stores serve people without #1?" ], "evidence": [ [ [ [ "Donald Duck-1" ] ], [ "no_evidence" ] ], [ [ [ "Donald Duck-1" ] ], [ [ "Indecent exposure-1" ], "no_evidence", "operation" ] ], [ [ [ "Donald Duck-1" ], "no_evidence" ], [ [ "Indecent exposure-1" ] ] ] ] }, { "qid": "9d981b77a8466dfe621d", "term": "Indian Ocean", "description": "The ocean between Africa, Asia, Australia and Antarctica (or the Southern Ocean)", "question": "Does the United States of America touch the Indian Ocean?", "answer": false, "facts": [ "The United States of America is bordered by the Atlantic and Pacific Oceans.", "Even the westernmost point of the USA, the Hawaiian Islands, is too far east in the Pacific to be anywhere near that ocean's border with the Indian Ocean." ], "decomposition": [ "What oceans does the US of America border?", "Is the Indian Ocean part of #1?" ], "evidence": [ [ [ [ "East Coast of the United States-1", "West Coast of the United States-1" ] ], [ "operation" ] ], [ [ [ "Arctic Ocean-2", "Arctic Ocean-3", "Atlantic Ocean-2", "Pacific Ocean-1" ] ], [ "operation" ] ], [ [ [ "Borders of the United States-2" ] ], [ "operation" ] ] ] }, { "qid": "8a26ff980663f11b0c6e", "term": "QWERTY", "description": "keyboard layout where the first line is \"QWERTYUIOP\"", "question": "Can second row of QWERTY keyboard spell Abdastartus's kingdom?", "answer": true, "facts": [ "QWERTY keyboards have one row of numbers followed by three rows of letters.", "The second row of the QWERTY keyboard has the letters: QWERTYUIOP.", "Abdastartus was king of Tyre from 929 – 921 BC." ], "decomposition": [ "What letters are on the second row of a QWERTY keyboard?", "What was Abdastartus' kingdoms name?", "Are all the letters in #2 also found in #1?" ], "evidence": [ [ [ [ "QWERTY-1" ], "no_evidence" ], [ [ "Abdastartus-1" ] ], [ "operation" ] ], [ [ [ "QWERTY-1" ], "no_evidence" ], [ [ "Abdastartus-3" ] ], [ "operation" ] ], [ [ [ "QWERTY-9" ] ], [ [ "Abdastartus-1" ] ], [ "operation" ] ] ] }, { "qid": "7c60f63b6cd0d7aa13f4", "term": "Pottery", "description": "Craft of making objects from clay", "question": "Are all types of pottery safe to cook in?", "answer": false, "facts": [ "Some types of pottery glaze are unsafe for contact with food meant for human consumption. ", "Antique pottery pieces may have hazardous levels of lead in them." ], "decomposition": [ "Are all antique or glazed pottery safe to cook in?" ], "evidence": [ [ [ [ "Pottery-35" ], "no_evidence" ] ], [ [ [ "Pottery-4" ], "no_evidence", "operation" ] ], [ [ [ "Pottery-62" ], "no_evidence" ] ] ] }, { "qid": "34146f5ebddcb370c29a", "term": "New Year's Eve", "description": "holiday celebrated on 31 December", "question": "Should you ask a neighbor for candy on New Year's Eve?", "answer": false, "facts": [ "Halloween is a holiday where children knock on doors of houses in their neighborhood asking for treats", "Halloween falls on October 31st", "New Year's Eve is a celebration of the end of the year held on December 31st" ], "decomposition": [ "On which holiday do children go trick-or-treating?", "What is the date of #1?", "When is New Year's Eve celebration?", "Are #2 and #3 the same?" ], "evidence": [ [ [ [ "Halloween-3" ] ], [ [ "Halloween-1" ] ], [ [ "New Year's Eve-1" ] ], [ "operation" ] ], [ [ [ "Trick-or-treating-1" ] ], [ [ "Halloween-11" ] ], [ [ "New Year's Eve-36" ] ], [ "operation" ] ], [ [ [ "Trick-or-treating-1" ] ], [ [ "Halloween-1" ] ], [ [ "New Year's Eve-1" ] ], [ "operation" ] ] ] }, { "qid": "1bed1742cceb9b51f41d", "term": "Eye surgery", "description": "medical specialty", "question": "Would Eye surgery on a fly be in vain?", "answer": true, "facts": [ "Researchers at BYU have been developing smaller surgical instruments to improve medical procedures.", "BYU researchers created robotically-controlled forceps that can pass through a hole about 3 millimeters in size.", "The eye of a fly is considerably small and estimates range from .5mm to 2mm." ], "decomposition": [ "What levels of precision can be reached by robot-assisted surgery?", "What is the size range of the eye of a fly?", "Is #1 considerably larger than the range of #2?" ], "evidence": [ [ [ [ "Robot-assisted surgery-21" ], "no_evidence" ], [ [ "Fly-9" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Robot-assisted surgery-28" ], "no_evidence" ], [ [ "Fly-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Robot-assisted surgery-2" ], "no_evidence" ], [ [ "Fly-14" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "7a8b380af91f1ff5bb21", "term": "Sonnet", "description": "form of poetry with fourteen lines; by the thirteenth century it signified a poem of fourteen lines that follows a strict rhyme scheme and specific structure", "question": "Can Jabberwocky be considered a sonnet?", "answer": false, "facts": [ "A sonnet is a fourteen line poem that follows certain rhyme schemes.", "Jabberwocky is an 1871 poem by Lewis Carroll.", "Jabberwocky is a 28 line poem that uses nonsense words." ], "decomposition": [ "How many lines does a sonnet have?", "How many lines did the poem Jabberwocky have?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Sonnet-2" ] ], [ [ "Jabberwocky-21" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Sonnet-2" ] ], [ [ "Jabberwocky-1", "Jabberwocky-21" ] ], [ "operation" ] ], [ [ [ "Sonnet-2" ] ], [ [ "Jabberwocky-21" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "ad2f6f41742075ce5c65", "term": "White", "description": "color", "question": "Is white light the absence of color?", "answer": false, "facts": [ "White light is formed by the combination of red, green, and blue light.", "Instead, black is considered to be the absence of color." ], "decomposition": [ "How many colors of light are mixed to create white light?", "Is #1 equal to zero?" ], "evidence": [ [ [ [ "Color mixing-3" ] ], [ "operation" ] ], [ [ [ "White-41" ] ], [ "operation" ] ], [ [ [ "White-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "2bc94c6dfc79d7a86fa2", "term": "Christopher Walken", "description": "American actor", "question": "Could Christopher Walken enlist in the United States Marine Corps?", "answer": false, "facts": [ "Christopher Walken is 77 years old.", "The maximum age for first-time enlistment in the United States Marine Corps is 28." ], "decomposition": [ "How old is Christopher Walken?", "What is the age limit for enlistment in the United States Marine Corps?", "Is #1 less than or equal to #2?" ], "evidence": [ [ [ [ "Christopher Walken-1" ] ], [ [ "United States Armed Forces-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Christopher Walken-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Christopher Walken-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ec3eecce8a13f2742ee4", "term": "Milky Way", "description": "Spiral galaxy containing our Solar System", "question": "Is number of stars in Milky Way at least ten times earth's population?", "answer": true, "facts": [ "The number of stars in the Milky Way galaxy is between 100 and 400 billion.", "Earth's population in 2018 was 7.5 billion people." ], "decomposition": [ "How many stars are in the Milky Way galaxy?", "What is the number of the human population on earth?", "Is #1 greater than or equal to ten times #2?" ], "evidence": [ [ [ [ "Milky Way-2" ] ], [ [ "World population-1" ] ], [ "operation" ] ], [ [ [ "Milky Way-2" ] ], [ [ "World population-1" ] ], [ "operation" ] ], [ [ [ "Milky Way-2" ] ], [ [ "World population-1" ] ], [ "operation" ] ] ] }, { "qid": "4c88f31810b240fd43a6", "term": "The Hobbit", "description": "Fantasy novel by J. R. R. Tolkien", "question": "Can The Hobbit be read in its entirety in four minutes?", "answer": true, "facts": [ "The Hobbit is a 95,356 word book by J.R.R. Tolkien.", "Speed reader Howard Stephen Berg could read at the speed of 25,000 words per minute.", "Speed reader Maria Teresa Calderon from the Philippines claimed to be able to read 80,000 words per minute with 100% comprehension." ], "decomposition": [ "How many words are in the Hobbit?", "How many words per minute could Maria Teresa Calderon read?", "What is #2 multiplied by 4?", "Is #3 greater than or equal to #1?" ], "evidence": [ [ [ [ "The Hobbit-3" ], "no_evidence" ], [ [ "Speed reading-19" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Speed reading-19" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "The Hobbit-1" ], "no_evidence" ], [ [ "Speed reading-19" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "85a99480c6fb0177f4c7", "term": "Peach", "description": "species of fruit tree (for the fruit use Q13411121)", "question": "Will you see peach blossoms and Andromeda at the same time?", "answer": false, "facts": [ "Peach trees bloom in the spring.", "Andromeda is visible in the fall." ], "decomposition": [ "When do peach trees bloom?", "When can you see Andromeda?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Peach-18" ] ], [ [ "Andromeda Galaxy-56" ] ], [ [ "Andromeda Galaxy-56", "Peach-18" ] ] ], [ [ [ "Peach-5" ] ], [ [ "Andromeda (constellation)-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Peach-18" ] ], [ [ "Andromeda (constellation)-1", "Andromeda (constellation)-3" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "bbc91b6a34ebcebd5e5b", "term": "Mini", "description": "British car model made by the British Motor Corporation (BMC) and its successors from 1959 until 2000", "question": "Was the British car, the Mini, the first car manufactured?", "answer": false, "facts": [ "The first car widely manufactured was the Model T.", "The Model T was manufactured in 1908.", "The Mini was made beginning in 1959." ], "decomposition": [ "When was the first car manufactured?", "When was the Mini first manufactured?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Nicolas-Joseph Cugnot-3" ] ], [ [ "Mini-1" ] ], [ "operation" ] ], [ [ [ "Car-24" ] ], [ [ "Mini-1" ] ], [ "operation" ] ], [ [ [ "Car-2" ] ], [ [ "Mini-1" ] ], [ "operation" ] ] ] }, { "qid": "cb9530f6acdd8a2b6676", "term": "Arnold Schwarzenegger", "description": "Austrian-American actor, businessman, bodybuilder and politician", "question": "Could Arnold Schwarzenegger hypothetically defeat Hafþór Björnsson in a powerlifting competition if both are at their peak strength?", "answer": false, "facts": [ "A powerlifting competition is won by the person who lifts the most combined weight across the squat, deadlift, and bench press.", "Arnold Schwarzenegger's powerlifting personal records are a 545 lb squat, 520 lb bench press, and a 710 lb deadlift.", "Hafþór Björnsson's powerlifting personal records in competition are a 970 lb squat, 551 lb bench press, and a 904 lb deadlift." ], "decomposition": [ "What lifts are contested in powerlifting?", "What are Arnold Schwarzenegger's personal records in #1?", "What are Hafþór Björnsson's personal records in #1?", "Is the sum of #2 greater than the sum of #3?" ], "evidence": [ [ [ [ "Powerlifting-1" ] ], [ [ "Arnold Schwarzenegger-24" ] ], [ [ "Hafþór Júlíus Björnsson-4" ] ], [ "operation" ] ], [ [ [ "Powerlifting-1" ] ], [ [ "Arnold Schwarzenegger-24" ] ], [ [ "Hafþór Júlíus Björnsson-15" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Powerlifting-1" ] ], [ [ "Arnold Schwarzenegger-24" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c1ecd6ec8ca3c02ed008", "term": "Mesopotamia", "description": "Historical region within the Tigris–Euphrates river system", "question": "Was Mesopotamia part of what is now China?", "answer": false, "facts": [ "Mesopotamia occupies an area of land called the Fertile Crescent which is part of what is currently known as the Middle East.", "China is much further East, beyond the Stans and India." ], "decomposition": [ "What land did Mesopotamia occupy?", "What is #1 currently known as?", "Is #2 located in China?" ], "evidence": [ [ [ [ "Mesopotamia-1" ] ], [ [ "Fertile Crescent-1" ] ], [ [ "China-1" ], "operation" ] ], [ [ [ "Mesopotamia-1" ] ], [ [ "Mesopotamia-1" ] ], [ [ "Central Asia-1", "Western Asia-1" ] ] ], [ [ [ "Mesopotamia-6" ] ], [ [ "Mesopotamia-6" ] ], [ [ "Mesopotamia-6" ] ] ] ] }, { "qid": "570dad603aee73b5c88e", "term": "Doctor Who", "description": "British science fiction TV series", "question": "In Doctor Who, did the war doctor get more screen time than his successor?", "answer": false, "facts": [ "The War Doctor was succeeded by the \"9th Doctor\". ", "The War Doctor was featured in two episodes of Doctor Who.", "The 9th Doctor was featured in 13 episodes of Doctor Who." ], "decomposition": [ "Who was the successor of the War Doctor?", "How many episodes was #1 in?", "How many episodes was the War Doctor in?", "Is #3 greater than #2?" ], "evidence": [ [ [ [ "War Doctor-1" ] ], [ [ "Ninth Doctor-4", "Ninth Doctor-5", "Ninth Doctor-6" ] ], [ [ "War Doctor-10", "War Doctor-8" ] ], [ "operation" ] ], [ [ [ "War Doctor-1" ] ], [ [ "Ninth Doctor-4", "Ninth Doctor-5" ] ], [ [ "Doctor Who-47" ] ], [ "operation" ] ], [ [ [ "War Doctor-1" ] ], [ [ "Ninth Doctor-1" ], "no_evidence" ], [ [ "War Doctor-10", "War Doctor-7", "War Doctor-8", "War Doctor-9" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "a505361d75535d674ebd", "term": "Paprika", "description": "spice made from dried fruits of Capsicum annuum", "question": "Can Paprika be made without a dehydrator?", "answer": true, "facts": [ "Peppers can be dehydrated in the oven in lieu of a dehydrator.", "Sunlight and heat have been used for centuries to dry peppers and other foods." ], "decomposition": [ "What is paprika made from?", "Can #1 be dehydrated without using a dehydrator?" ], "evidence": [ [ [ [ "Paprika-4" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Paprika-1" ] ], [ [ "Food dehydrator-2" ], "no_evidence", "operation" ] ], [ [ [ "Paprika-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "16ced51d31cc386aa343", "term": "Pancake", "description": "Thin, round cake made of eggs, milk and flour", "question": "Are some types of pancakes named after coins?", "answer": true, "facts": [ "Silver dollar pancakes are a variety that is smaller than traditional pancakes.", "Silver dollars are a type of American coin." ], "decomposition": [ "What are some common types of pancakes?", "Is any of #1 named after a coin?" ], "evidence": [ [ [ [ "Pancake-12" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Pancake-12" ] ], [ [ "Silver dollar-1" ] ] ], [ [ [ "Pancake-67", "Pancake-69", "Pancake-70" ] ], [ [ "Pancake-69" ], "operation" ] ] ] }, { "qid": "378926a5a2874aa941a9", "term": "Fantasy", "description": "Genre of literature, film, television and other artforms", "question": "Would J.K Rowling's top sellers be on a fantasy shelf?", "answer": true, "facts": [ "J.K Rowling's top sellers are her Harry Potter series.", "Harry Potter is a series about a boy who goes to a magical school to learn wizardry." ], "decomposition": [ "What is J. K. Rowling's top selling book?", "Is #1 fantasy?" ], "evidence": [ [ [ [ "J. K. Rowling-1" ] ], [ [ "Harry Potter-1" ], "operation" ] ], [ [ [ "J. K. Rowling-1" ] ], [ [ "J. K. Rowling-1" ] ] ], [ [ [ "J. K. Rowling-22" ] ], [ "operation" ] ] ] }, { "qid": "108c4339892195d73e60", "term": "P. G. Wodehouse", "description": "English author", "question": "Was P. G. Wodehouse's favorite book The Hunger Games?", "answer": false, "facts": [ "P. G. Wodehouse died in 1975.", "The Hunger Games was published in 2008." ], "decomposition": [ "When did P. G. Wodehouse die?", "When was the Hunger Games first published?", "Did #2 happen before #1?" ], "evidence": [ [ [ [ "P. G. Wodehouse-1" ] ], [ [ "The Hunger Games (novel)-1" ] ], [ "operation" ] ], [ [ [ "P. G. Wodehouse-51" ] ], [ [ "The Hunger Games (novel)-1" ] ], [ "operation" ] ], [ [ [ "P. G. Wodehouse-51" ] ], [ [ "The Hunger Games (novel)-1" ] ], [ "operation" ] ] ] }, { "qid": "4464ad7e6a0b908632ac", "term": "Limbic system", "description": "structures of the brain", "question": "Will The Exorcist stimulate limbic system?", "answer": true, "facts": [ "The limbic system of the brain contains regions that detect fear, control bodily functions and perceive sensory information.", "The Exorcist has been called one of the scariest movies of all time.", "The Exorcist ranked number 3 on the American Film Institute's 100 Years/100 Thrills list." ], "decomposition": [ "Based on its functions, what kinds of stimuli is the limbic system likely to respond to?", "What is the general opinion of the movie The Exorcist?", "Does #2 suggest that the movie will generate #1?" ], "evidence": [ [ [ [ "Limbic system-17" ] ], [ [ "The Exorcist-6" ] ], [ [ "Limbic system-17", "The Exorcist (film)-1" ] ] ], [ [ [ "Limbic system-17" ] ], [ [ "The Exorcist (film series)-16", "The Exorcist (film)-78" ] ], [ "operation" ] ], [ [ [ "Limbic system-9" ] ], [ [ "The Exorcist (film)-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "3570d57f16a40488ff42", "term": "Judo", "description": "modern martial art, combat and Olympic sport", "question": "Do silicone suits make judo difficult?", "answer": true, "facts": [ "Judo is a martial art that requires combatants to grip their opponents and throw them in various ways.", "Judo practitioners traditionally wear an outfit called a gi, which opponents use to grip and throw.", "Silicone is one of the slipperiest substances on the planet." ], "decomposition": [ "What maneuvers are required to do Judo?", "What characteristics does an article of clothing need to have in order to do #1 effectively?", "What characteristics does a silicone suit have? ", "Is #3 excluded from #2?" ], "evidence": [ [ [ [ "Judo-1" ] ], [ [ "Keikogi-1" ], "no_evidence" ], [ [ "Silicone rubber-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Leopold's maneuvers-6" ], "no_evidence" ], [ "no_evidence" ], [ [ "Silicone-47" ] ], [ "operation" ] ], [ [ [ "Judo-1" ] ], [ [ "Judo-48" ] ], [ [ "Silicone-1" ] ], [ "operation" ] ] ] }, { "qid": "5c3bf978d0f77907af6c", "term": "JAG (TV series)", "description": "American legal drama television series (1996-2005)", "question": "Could you watch all of JAG in six months?", "answer": true, "facts": [ "JAG has 227 episodes in the entire series.", "Each episode of JAG is between 42-47 minutes long.", "There are over 200,000 minutes in a month. ", "The entire series of JAG is under 12,000 minutes. " ], "decomposition": [ "How many episodes of JAG are there?", "How long is an episode of JAG?", "What is #1 multiplied by #2?", "How many minutes are there in six months?", "Is #3 less than or equal to #4?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "operation" ], [ [ "Year-57" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "operation" ], [ "no_evidence", "operation" ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Television pilot-19" ], "no_evidence" ], [ "operation" ], [ "no_evidence", "operation" ], [ "operation" ] ] ] }, { "qid": "c921bdade889def26cf4", "term": "Solubility", "description": "Capacity of a designated solvent to hold a designated solute in homogeneous solution under specified conditions", "question": "In isopropyl alcohol, is the solubility of salt low?", "answer": true, "facts": [ "Isopropyl alcohol is unique in the sense that salt remains visible.", "When salt has high solubility, it becomes invisible." ], "decomposition": [ "How does high solubility affect the visibility of salt?", "Does salt in isopropyl alcohol fail to exhibit #1?" ], "evidence": [ [ [ [ "Salt-15" ] ], [ [ "Isopropyl alcohol-4" ] ] ], [ [ [ "Isopropyl alcohol-4" ] ], [ "operation" ] ], [ [ [ "Solubility-1" ] ], [ [ "Isopropyl alcohol-4" ], "no_evidence", "operation" ] ] ] }, { "qid": "6b9093fff19fdcb320e3", "term": "Noble gas", "description": "group of chemical elements tend to be chemically inert and thus form odorless, colorless, monatomic gases with low reactivity; consists of helium, neon, argon, krypton, xenon, radon, and possibly oganesson", "question": "Was the Japanese street aesthetic once illuminated by noble gasses?", "answer": true, "facts": [ "Japan was known for their use of brightly lit businesses and signage.", "The signs in Japan were lit with neon for a long time." ], "decomposition": [ "What were the signs in Japan known for in the past?", "Were noble gasses the reason behind #1?" ], "evidence": [ [ [ [ "Neon lighting-3" ], "no_evidence" ], [ [ "Neon lighting-1" ] ] ], [ [ "no_evidence" ], [ [ "Noble gas-1" ], "operation" ] ], [ [ [ "Neon sign-1" ], "no_evidence" ], [ [ "Neon-1", "Neon-3" ], "operation" ] ] ] }, { "qid": "fd5f22b8ed969a08eea5", "term": "Rick and Morty", "description": "Animated sitcom", "question": "Could Rich and Morty be triggered for children of alcoholics?", "answer": true, "facts": [ "Rick, one of the titular characters of Rick and Morty, is often seen drunk and speaking abusively to Morty.", "Morty's mother Beth is depicted multiple times neglecting her children while getting drunk on wine. ", "Trauma triggers can occur when someone is exposed to something that reminds them of a traumatic situation. " ], "decomposition": [ "What depictions are common triggers for children of alcoholics?", "Do any of the characters from Rick and Morty exhibit the characteristics in #1?" ], "evidence": [ [ [ [ "Alcoholism-13" ], "no_evidence" ], [ [ "Rick and Morty-5" ], "operation" ] ], [ [ [ "Alcoholism-13" ], "no_evidence" ], [ [ "Rick and Morty-5" ], "no_evidence", "operation" ] ], [ [ [ "Adult Children of Alcoholics-4" ] ], [ [ "Adult Children of Alcoholics-4", "Rick and Morty-4", "Rick and Morty-5" ] ] ] ] }, { "qid": "f61fa68d73eb81ede181", "term": "Gorillaz", "description": "British virtual band", "question": "Does it seem like the Gorillaz is composed of more members than they have?", "answer": true, "facts": [ "In music videos for Gorillaz songs, there are four animated bandmates playing.", "Gorillaz is a collaboration of 3 band members." ], "decomposition": [ "How many band members are in Gorillaz?", "How many animated band members are in Gorillaz videos?", "Is #2 more than #1?" ], "evidence": [ [ [ [ "Gorillaz-1" ] ], [ [ "Gorillaz-1" ] ], [ "operation" ] ], [ [ [ "Gorillaz-1" ] ], [ [ "Gorillaz-1" ] ], [ "operation" ] ], [ [ [ "Gorillaz-1" ] ], [ [ "Gorillaz-1" ] ], [ "operation" ] ] ] }, { "qid": "f7d66ea2a1fcc7473f48", "term": "Bengal cat", "description": "Breed of cat", "question": "Can a Bengal cat survive eating only pancakes?", "answer": false, "facts": [ "Bengal cats are carnivores.", "Pancakes contain no meat.", "Carnivores eat only meat to survive. " ], "decomposition": [ "What type of diet does a Bengal cats follow?", "What do #1 mainly eat?", "Do pancakes contain #2?" ], "evidence": [ [ [ [ "Bengal cat-1" ] ], [ [ "Cat food-9" ] ], [ "operation" ] ], [ [ [ "Bengal cat-1", "Cat-1" ] ], [ [ "Carnivore-1" ] ], [ [ "Pancake-1" ], "operation" ] ], [ [ [ "Bengal cat-1", "Carnivore-7" ] ], [ [ "Carnivore-7" ] ], [ [ "Pancake-1" ] ] ] ] }, { "qid": "78754ccf05c5f541050c", "term": "Oceanography", "description": "The study of the physical and biological aspects of the ocean", "question": "Does a person suffering from Thalassophobia enjoy oceanography?", "answer": false, "facts": [ "Thalassophobia is a deep and persistent fear of the sea.", "Oceanography is the study of bodies of water.", "Oceanographers frequently observe and interact with bodies of water such as lakes, seas, and oceans." ], "decomposition": [ "What do people that have thalassophobia fear?", "Oceanography is the study of what?", "Is #1 excluded from #2?" ], "evidence": [ [ [ [ "Thalassophobia-1" ] ], [ [ "Oceanography-1" ] ], [ "operation" ] ], [ [ [ "Thalassophobia-1" ] ], [ [ "Oceanography-1" ] ], [ "operation" ] ], [ [ [ "Thalassophobia-1" ] ], [ [ "Oceanography-1" ] ], [ "operation" ] ] ] }, { "qid": "57e6b3d22162e254f7f0", "term": "United Airlines", "description": "Airline in the United States", "question": "Is Glycol something United Airlines would buy?", "answer": true, "facts": [ "Glycol is a commonly used de-icing fluid for commercial planes.", "American Airlines flies all year round, including throughout the winter." ], "decomposition": [ "What is Glycol commonly used for?", "What cold season does American Airlines fly its planes?", "Would #1 be helpful in #2?" ], "evidence": [ [ [ [ "Diol-2", "Diol-4" ] ], [ [ "American Airlines-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Ethylene glycol-1" ] ], [ [ "Winter-1" ] ], [ [ "Ethylene glycol-16" ] ] ], [ [ [ "Diol-2", "Ethylene glycol-1" ] ], [ [ "Winter-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "84dd002838898869cfad", "term": "Poseidon", "description": "Ancient Greek god of the sea, earthquakes and horses", "question": "Is Poseidon similar to the god Vulcan?", "answer": false, "facts": [ "Poseidon is the Greek god of the sea and water, and is the brother of Zeus.", "Neptune is the Roman god of the sea.", "Hephaestus is the Greek god of fire.", "Hephaestus's ROman equivalent is Vulcan, the Roman god of fire." ], "decomposition": [ "What are the major characteristics of Poseidon?", "What are the major characteristics of Vulcan?", "Is there a significant overlap between #1 and #2?" ], "evidence": [ [ [ [ "Poseidon-43" ] ], [ [ "Vulcan (mythology)-44" ] ], [ "operation" ] ], [ [ [ "Poseidon-1" ] ], [ [ "Vulcan (mythology)-1" ] ], [ "operation" ] ], [ [ [ "Greek sea gods-6" ] ], [ [ "Vulcan (mythology)-1" ] ], [ "operation" ] ] ] }, { "qid": "2c402ed761b9bd7aa205", "term": "Green Party of England and Wales", "description": "Political party in England and Wales", "question": "Can members of the Green Party of England and Wales vote in the USA?", "answer": false, "facts": [ "Green Party of England Wales isn't registered in the USA.", "People who live in England can't vote in the USA." ], "decomposition": [ "Members of the Green Party of England and Wales are from which country?", "Can people living in #1 vote in the US?" ], "evidence": [ [ [ [ "Green Party of England and Wales-1" ] ], [ "operation" ] ], [ [ [ "Green Party of England and Wales-1" ] ], [ [ "Voting rights in the United States-101" ] ] ], [ [ [ "Green Party of England and Wales-1" ] ], [ [ "Voting rights in the United States-101" ] ] ] ] }, { "qid": "49cd2c594e9715899734", "term": "Golden Gate Bridge", "description": "suspension bridge on the San Francisco Bay", "question": "Do depressed people travel to the Golden Gate Bridge often?", "answer": true, "facts": [ "The Golden Gate Bridge is one of the most popular suicide spots in the USA.", "Suicide is often caused by severe depression." ], "decomposition": [ "What is the ultimate end that severe depression can lead to?", "Is the Golden Gate Bridge a place where #1 is known to often happen?" ], "evidence": [ [ [ [ "Major depressive disorder-22" ] ], [ [ "Suicides at the Golden Gate Bridge-4" ], "operation" ] ], [ [ [ "Suicide-7" ] ], [ [ "Golden Gate Bridge-50" ] ] ], [ [ [ "Suicide-1" ] ], [ [ "Suicides at the Golden Gate Bridge-4" ] ] ] ] }, { "qid": "12f49bcc3fdaebe88bd9", "term": "Olive oil", "description": "liquid fat extracted by pressing olives", "question": "Do some people soak in olive oil and water?", "answer": true, "facts": [ "Adding olive oil to bath water is a common practice for dry skin.", "In baths, people tend to soak for a period of time. " ], "decomposition": [ "During which activity do people soak in water for some time?", "Is it common to add olive oil water for dry skin during #1?" ], "evidence": [ [ [ [ "Bathing-1" ] ], [ [ "Bathing-44" ], "no_evidence" ] ], [ [ [ "Bathing-1" ] ], [ [ "Bathing-44" ], "no_evidence" ] ], [ [ [ "Bathing-1" ] ], [ [ "Olive oil-28" ], "no_evidence", "operation" ] ], [ [ [ "Bathing-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "c3ee8ac44eff4db19ffb", "term": "Bucharest", "description": "Capital of Romania", "question": "Could a wandering albatross fly from Bucharest to New York City without a rest?", "answer": true, "facts": [ "Wandering albatross can travel at least 15,000 km (just under 10,000 miles) over the sea before returning to land. ", "It's 4766 miles or 7670 km from Bucharest to New York City." ], "decomposition": [ "How far can a Wandering albatross travel over the sea before returning to land?", "How far is it from Bucharest to New York City", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Wandering albatross-4" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Wandering albatross-8" ], "no_evidence" ], [ [ "Bucharest-1", "New York City-1" ], "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Wandering albatross-5" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "b77170e15c0b1bd72340", "term": "Durian", "description": "genus of plants", "question": "Are Durian fruits an olfactory delight?", "answer": false, "facts": [ "Durian is a plant type that produces several kinds of fruit.", "Olfactory refers to the human sense of smell.", "Pleasant smells according to polls include flowers and sweet foods.", "Durian fruits have been banned in Singapore due to its overwhelming smell." ], "decomposition": [ "What kind of smell is the durian known for?", "Is #1 pleasant?" ], "evidence": [ [ [ [ "Durian-3" ] ], [ "operation" ] ], [ [ [ "Durian-3" ] ], [ "operation" ] ], [ [ [ "Durian-3" ] ], [ [ "Durian-3" ] ] ] ] }, { "qid": "3540bb4c468f7b513aab", "term": "Aerosmith", "description": "American rock band", "question": "Can Aerosmith legally drive in the carpool lane?", "answer": true, "facts": [ "Aerosmith is a rock band with five members", "Carpool lanes require at least two occupants in each vehicle" ], "decomposition": [ "How many people does the band Aerosmith have?", "What is the minimum number of occupants required to use the carpool lane?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Aerosmith-1" ] ], [ [ "High-occupancy vehicle lane-1" ] ], [ "operation" ] ], [ [ [ "Aerosmith-1" ] ], [ [ "High-occupancy vehicle lane-1" ] ], [ "operation" ] ], [ [ [ "Aerosmith-1" ] ], [ [ "High-occupancy vehicle lane-1" ] ], [ "operation" ] ] ] }, { "qid": "f6c3094dbf7cb1f21b2a", "term": "Petroleum", "description": "Naturally occurring hydrocarbon liquid found underground", "question": "Can petroleum jelly be used as fuel in a car?", "answer": false, "facts": [ "Petroleum is a highly reactive liquid used to power cars.", "Petroleum jelly is a solid substance used as an ointment on cuts and scrapes to promote healing.", "Petroleum jelly does not oxidize on exposure to the air and is not readily acted on by chemical reagents." ], "decomposition": [ "What is petroleum jelly used for?", "Does #1 include fueling cars?" ], "evidence": [ [ [ [ "Petroleum jelly-8" ] ], [ [ "Petroleum jelly-8" ] ] ], [ [ [ "Petroleum jelly-2" ] ], [ [ "Gasoline-1" ], "operation" ] ], [ [ [ "Petroleum jelly-15", "Petroleum jelly-23", "Petroleum jelly-24", "Petroleum jelly-26", "Petroleum jelly-8" ] ], [ "operation" ] ] ] }, { "qid": "2ce4e885ee19dbbdb46c", "term": "New York Public Library", "description": "Public library system in New York City", "question": "Does the New York Public Library sell Alpo products?", "answer": false, "facts": [ "The New York Public Library is a public lending library system in New York City", "Alpo produces pet food and related products" ], "decomposition": [ "What does The New York Public Library offer for customers?", "What kinds of products does Alpo make?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "New York Public Library-30", "New York Public Library-31" ] ], [ [ "Alpo (pet food)-1" ] ], [ "operation" ] ], [ [ [ "New York Public Library-3" ] ], [ [ "Alpo (pet food)-1" ] ], [ "operation" ] ], [ [ [ "New York Public Library-19", "New York Public Library-28" ] ], [ [ "Alpo (pet food)-1" ] ], [ "operation" ] ] ] }, { "qid": "8b982a46f1a78d5f295f", "term": "Spider wasp", "description": "family of insects", "question": "Do spider wasps have eight legs?", "answer": false, "facts": [ "A spider wasp is a kind of wasp, which is an insect.", "Insects all have six legs." ], "decomposition": [ "What kind of animal is a spider wasp?", "Do #1's have eight legs?" ], "evidence": [ [ [ [ "Spider wasp-1" ] ], [ [ "Spider wasp-5" ] ] ], [ [ [ "Spider wasp-1", "Wasp-1" ] ], [ [ "Insect-1" ], "operation" ] ], [ [ [ "Spider wasp-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "789b8d1061266869fc86", "term": "Newt Gingrich", "description": "50th Speaker of the United States House of Representatives", "question": "Is Newt Gingrich's nickname a type of Reptile?", "answer": false, "facts": [ "Newt Gingrich was born Newton Leroy Gingrich and goes by the nickname Newt.", "A newt is a type of Amphibian with smooth, sticky skin..", "Reptiles have dry and hard skin." ], "decomposition": [ "What was Newt Gingrich's nickname?", "Is #1 a reptile?" ], "evidence": [ [ [ [ "Newt Gingrich-1" ] ], [ [ "Newt-32" ], "operation" ] ], [ [ [ "Newt Gingrich-1" ], "no_evidence" ], [ [ "Newt-1" ], "operation" ] ], [ [ [ "Newt Gingrich-1" ] ], [ [ "Newt-32" ] ] ] ] }, { "qid": "88675643c5f299344f36", "term": "Tomato", "description": "Edible berry of the tomato plant, originating in South America", "question": "Do you need both hot and cold water to peel a tomato?", "answer": true, "facts": [ "The first step in removing the skin from at tomato is to quickly submerge it in boiling water.", "The second step in removing the skin from a tomato is to take the tomatoes out of the boiling water and put them into ice water." ], "decomposition": [ "What are the various steps involved in peeling tomatoes?", "Does any of #1 use hot water?", "Does any of #1 use cold water?", "Are #2 and #3 positive?" ], "evidence": [ [ [ [ "Blanching (cooking)-1", "Peel (fruit)-1" ] ], [ [ "Blanching (cooking)-7" ], "operation" ], [ [ "Blanching (cooking)-9" ], "operation" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Tomato-89" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "5c27625de0e7c35be856", "term": "Tony Bennett", "description": "American singer", "question": "Is Tony Bennett's middle name shared by a former UFC champion?", "answer": true, "facts": [ "Tony Bennett's full name is Anthony Dominick Benedetto.", "Dominick Cruz is a two-time UFC Bantamweight Champion." ], "decomposition": [ "What is Tony Bennett's middle name?", "What are the names of the former UFC champions?", "Is #1 found in #2?" ], "evidence": [ [ [ [ "Tony Bennett-1" ] ], [ [ "Dominick Cruz-1" ] ], [ "operation" ] ], [ [ [ "Tony Bennett-1" ] ], [ [ "Dominick Cruz-1" ] ], [ "operation" ] ], [ [ [ "Tony Bennett-1" ] ], [ [ "Dominick Reyes-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "a895b73bdfb2397b2e27", "term": "Friday", "description": "day of the week", "question": "Does 2015 have more unlucky Friday's than usual?", "answer": true, "facts": [ "Friday the 13th is known as an unlucky Friday because of the number 13.", "A year can have as many as three Friday the 13ths.", "One Friday the 13th is the average per year.", "There were 3 Friday the 13ths in 2015." ], "decomposition": [ "How many Friday the 13ths were in 2015?", "What is the usual number of Friday the 13ths per year?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Friday the 13th-1" ] ], [ [ "Friday the 13th-1" ] ], [ "operation" ] ], [ [ [ "Friday the 13th-1" ] ], [ [ "Friday the 13th-1" ] ], [ "operation" ] ], [ [ [ "Friday the 13th-25" ] ], [ [ "Friday the 13th-1" ] ], [ "operation" ] ] ] }, { "qid": "bda79ddfa59d6d51e081", "term": "Estonian language", "description": "Finno-Ugric language spoken in Estonia", "question": "Did Jesus know anyone who spoke Estonian?", "answer": false, "facts": [ "Estonian is the language of Estonia, which is located in Northern Europe near Finland.", "Jesus is recorded to have lived and traveled almost exclusively within the borders of Galilee.", "Galilee is a territory within the borders of northern Israel.", "Israel is located on the Mediterranean sea in the Middle East.", "Israel is almost 2,000 miles from Estonia. " ], "decomposition": [ "Where is Estonian spoken?", "Where did Jesus live and travel?", "Where is #2 located?", "Is #1 close to #3?" ], "evidence": [ [ [ [ "Estonian language-1" ] ], [ [ "Jesus-114", "Jesus-22" ], "no_evidence" ], [ [ "Bethlehem-1", "Jerusalem-1", "Lower Galilee-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Estonia-1" ] ], [ [ "Nazareth-14" ] ], [ [ "Nazareth-1" ] ], [ [ "Nazareth-54" ], "operation" ] ], [ [ [ "Estonian language-1" ] ], [ [ "Jesus-5" ] ], [ [ "Nazareth-1" ] ], [ "operation" ] ] ] }, { "qid": "8f2f696e717fefb03d9e", "term": "The Dark Knight (film)", "description": "2008 film directed by Christopher Nolan", "question": "Would The Dark Knight be appropriate for a preschool class?", "answer": false, "facts": [ "Preschoolers are between 3 and 5 years old.", "The Dark Knight is rated PG-13.", "PG-13 is a rating that means parents are strongly cautioned that the content of a film may not be appropriate for children under 13." ], "decomposition": [ "What is the average age of preschoolers?", "What is the Dark Knight rated?", "What is the minimum age to watch something rated #2?", "Is age #1 above #3?" ], "evidence": [ [ [ [ "Preschool-4" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Preschool-4" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Preschool-4" ] ], [ "no_evidence" ], [ [ "PG-13 (disambiguation)-1" ] ], [ "operation" ] ] ] }, { "qid": "07bcc6301dae2a7b038f", "term": "Jujutsu", "description": "Japanese martial art", "question": "Are all limbs required for jujutsu?", "answer": false, "facts": [ "Jujutsu is a Japanese martial art that uses unarmed combat to subdue opponents.", "Nick Newell, a congenital amputee, got his black belt after two straight submission wins.", "Fighter Aaron LaPointe has succeeded in martial arts with a fully paralyzed arm." ], "decomposition": [ "What kind of sport is jujutsu?", "Which sport did Nick Newell get a black belt in?", "Did Nick Newell have all limbs intact or is #2 not a form of #1?" ], "evidence": [ [ [ [ "Jujutsu-1" ] ], [ [ "Nick Newell-1", "Nick Newell-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Jujutsu-1" ] ], [ [ "Nick Newell-1" ], "no_evidence" ], [ [ "Nick Newell-2" ], "no_evidence", "operation" ] ], [ [ [ "Jujutsu-1" ] ], [ "no_evidence" ], [ [ "Nick Newell-2" ] ] ] ] }, { "qid": "f086e8628fde91ab151c", "term": "Underworld", "description": "The mythic Relm of the Dead, located far underground (aka, Hades; Underworld)", "question": "Can you get a ride on Amtrak to the Underworld?", "answer": false, "facts": [ "Amtrak is a passenger railroad service operating in North America", "The Underworld is a fictional location from mythology and religion" ], "decomposition": [ "Which major regions are covered by the passenger railroad service 'Amtrak'?", "Is the Underworld part of #1?" ], "evidence": [ [ [ [ "Amtrak-1" ] ], [ [ "Underworld-1" ], "operation" ] ], [ [ [ "Amtrak-1" ] ], [ [ "Underworld-1" ] ] ], [ [ [ "Amtrak-1" ] ], [ [ "Underworld-1" ], "operation" ] ] ] }, { "qid": "4f8557872837e3b95781", "term": "The Jackson 5", "description": "American pop music family group", "question": "Did Jackson 5 members exceed number in The Osmonds?", "answer": false, "facts": [ "The Jackson 5 was composed of: Jackie, Tito, Jermaine, Marlon and Michael.", "The Osmonds consisted of: Alan, Wayne, Merrill, Jay and Donny." ], "decomposition": [ "How many members did The Jackson 5 have?", "How many members did The Osmonds have?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "The Jackson 5-1" ] ], [ [ "The Osmonds-1" ] ], [ "operation" ] ], [ [ [ "The Jackson 5-1" ] ], [ [ "The Osmonds-1" ] ], [ [ "The Jackson 5-1", "The Osmonds-1" ], "operation" ] ], [ [ [ "The Jackson 5-1" ] ], [ [ "Quintet-1", "The Osmonds-1" ] ], [ "operation" ] ] ] }, { "qid": "0aeb8bccd429c7dfe0da", "term": "Mickey Mouse", "description": "Disney cartoon character", "question": "Did Mickey Mouse appear in a cartoon with Bugs Bunny in 1930?", "answer": false, "facts": [ "Bugs Bunny was created in the late 1930s.", "Mickey Mouse was created in 1928.", "Mickey Mouse appears in Disney cartoons.", "Bugs Bunny appears in Warner Bros. cartoons." ], "decomposition": [ "When was Bugs Bunny created?", "Is #1 before 1930?" ], "evidence": [ [ [ [ "Bugs Bunny-1" ] ], [ "operation" ] ], [ [ [ "Bugs Bunny-1" ] ], [ "operation" ] ], [ [ [ "Bugs Bunny-1" ] ], [ "operation" ] ] ] }, { "qid": "8e9607aa0cefcb84c0fb", "term": "Isaac Newton", "description": "Influential British physicist and mathematician", "question": "Is Isaac Newton buried at the same church as the author of Great Expectations?", "answer": true, "facts": [ "Isaac Newton is buried at Westminster Abbey.", "Charles Dickens's book Great Expectations was published in 1861.", "Charles Dickens is buried at the Poets' Corner of Westminster Abbey.", "Westminster Abbey is a large church in the City of Westminster, London, England." ], "decomposition": [ "Who is the author of 'Great Expectations'?", "Where is #1 resting place?", "Where was Isaac Newton buried?", "Are #2 and #3 the same?" ], "evidence": [ [ [ [ "Great Expectations-1" ] ], [ [ "Charles Dickens-53" ] ], [ [ "Isaac Newton-46" ] ], [ "operation" ] ], [ [ [ "Great Expectations-1" ] ], [ [ "Charles Dickens-53" ] ], [ [ "Isaac Newton-46" ] ], [ "operation" ] ], [ [ [ "Great Expectations-2" ] ], [ [ "Charles Dickens-53" ] ], [ [ "Isaac Newton-46" ] ], [ "operation" ] ] ] }, { "qid": "424ecb3dd6c64b6da4cc", "term": "Butler", "description": "male domestic worker in charge of all the male household staff", "question": "Do most middle class families have butlers?", "answer": false, "facts": [ "Butlers make about $60,000 per year on average for their work.", "Middle class income is between $48,000 and $145,000." ], "decomposition": [ "What is a butler?", "How much does #1 make per year on average?", "How much is the average middle class income?", "Would #3 be enough to pay #2?" ], "evidence": [ [ [ [ "Butler-1" ] ], [ "no_evidence" ], [ [ "Middle class-34" ] ], [ "operation" ] ], [ [ [ "Butler-1" ] ], [ [ "Butler-15" ], "no_evidence" ], [ [ "Middle class-24" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Butler-1" ] ], [ [ "Butler-20" ], "no_evidence" ], [ [ "Middle class-37" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2e80843bfed38cc06df2", "term": "C", "description": "Letter of the Latin alphabet", "question": "Is letter C crucial to spelling the two most common words in English language?", "answer": false, "facts": [ "The most common word in the English language is \"the\".", "The second most common word in the English language is \"be\"." ], "decomposition": [ "What is the most common word in the English language?", "What is the second most common word in the English language?", "What letters make up #1?", "What letters make up #2?", "Is the letter \"c\" found in both #3 and #4?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Most common words in English-5" ], "no_evidence" ], [ [ "Most common words in English-5" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Most common words in English-5" ], "no_evidence" ], [ [ "Most common words in English-5" ], "no_evidence" ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "028eda7ded7825edb6eb", "term": "Jury", "description": "sworn body of people convened to render a verdict officially submitted to them by a court, or to set a penalty or judgment", "question": "Is a felony jury enough people for a Bunco game?", "answer": true, "facts": [ "Felonies and other serious crimes have a jury of 12 people.", "Bunco is a parlour game requiring 12 or more players." ], "decomposition": [ "How many people are on a felony criminal jury?", "How many players are needed for a game of Bunco?", "Is #2 the same or less than #1?" ], "evidence": [ [ [ [ "Jury-4" ] ], [ [ "Bunco-1" ] ], [ "operation" ] ], [ [ [ "Jury-2" ] ], [ [ "Bunco-1" ] ], [ "operation" ] ], [ [ [ "Jury-4" ] ], [ [ "Bunco-1" ] ], [ "operation" ] ] ] }, { "qid": "630a416013860c4e094c", "term": "Odyssey", "description": "Epic poem attributed to Homer", "question": "In baseball, is a \"Homer\" named after the poet Homer who wrote the Odyssey?", "answer": false, "facts": [ "Homer is a famous poet who wrote the epic poem the Odyssey.", "The Odyssey is about a character Odysseus on an epic journey home after the fall of Troy.", "In baseball a trip around all the bases is called a Home Run.", "\"Homer\" is a shortened name for Home Run." ], "decomposition": [ "What does the baseball term homer mean?", "Is #1 the same thing as the poet Homer?" ], "evidence": [ [ [ [ "Home run-1", "Home run-15" ] ], [ [ "Homer-1" ], "operation" ] ], [ [ [ "Home run-1", "Home run-35" ] ], [ [ "Homer-1" ], "operation" ] ], [ [ [ "Home run-1", "Home run-14", "Home run-2" ] ], [ [ "Homer-1" ], "operation" ] ] ] }, { "qid": "9c3f11fb9b6b469a3681", "term": "Tax collector", "description": "person who collects taxes", "question": "Does Kenny G hold the qualifications to be a tax collector?", "answer": true, "facts": [ "The qualifications to be a tax collector in the US inlude a bachelor's degree in accounting.", "Kenny G studied accounting at the University of Washington and graduated magna cum laude." ], "decomposition": [ "What are the qualifications to be a tax collector?", "Does Kenny G possess #1?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Certified Public Accountant-16" ], "no_evidence" ], [ [ "Kenny G-5" ], "operation" ] ], [ [ [ "Audit-1", "Tax collector-1" ], "no_evidence" ], [ [ "Kenny G-5" ], "operation" ] ] ] }, { "qid": "3aecd30e1212e2985d4b", "term": "Guitarist", "description": "person who plays the guitar", "question": "Do guitarists need both hands to play?", "answer": true, "facts": [ "The left hand typically positions the chords on the fretboard.", "The right hand plays the strings, either strumming a whole chord or finger-picking individual strings.", "The position of the left hand on the fretboard changes the tones of the strings played by the right hand, so both hands are necessary." ], "decomposition": [ "Which musical instrument do guitarists play?", "How many hands are typically used to play #1?", "Is #2 equal to two?" ], "evidence": [ [ [ [ "Guitarist-1" ] ], [ [ "Guitarist-2", "Guitarist-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Guitarist-1" ] ], [ [ "Guitar-1" ] ], [ "operation" ] ], [ [ [ "Guitarist-1" ] ], [ [ "Guitar-1" ] ], [ "operation" ] ] ] }, { "qid": "43e2fe43169c07b5ab49", "term": "Karachi", "description": "Megacity in Sindh, Pakistan", "question": "Karachi was a part of Alexander the Great's success?", "answer": true, "facts": [ "Karachi is a city in modern day Pakistan.", "Krokola was an ancient port located in what is now Karachi.", "Alexander the Great stationed his fleet in Krokola on his way to Babylon.", "Alexander the Great defeated Darius and conquered Babylon before expanding his empire." ], "decomposition": [ "What is Karachi?", "What was the name of the ancient port that was once located in #1?", "Before expanding his empire, what city did Alexander the Great conquer?", "Did Alexander the Great station his fleet at #2 prior to #3?" ], "evidence": [ [ [ [ "Karachi-1" ] ], [ [ "Karachi-8" ] ], [ [ "Achaemenid Assyria-41" ] ], [ "operation" ] ], [ [ [ "Karachi-1" ] ], [ [ "Karachi-8" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Karachi-1" ] ], [ [ "Port of Karachi-2" ] ], [ [ "Alexander the Great-51" ], "no_evidence" ], [ [ "Karachi-8" ], "no_evidence", "operation" ] ] ] }, { "qid": "e3ec5d1638cbf5a6ae53", "term": "Maize", "description": "Cereal grain", "question": "Would a bodybuilder choose maize over chicken breast for dinner?", "answer": false, "facts": [ "Bodybuilders aim to eat high amounts of protein in order to stimulate muscle growth.", "Maize contains 9.4 grams of protein per 100 grams.", "Baked chicken breast contains 31 grams of protein per 100 grams." ], "decomposition": [ "What nutrient is critical for bodybuilding?", "How much #1 is in maize?", "How much #1 is in chicken breast?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Bodybuilding-39" ] ], [ [ "Maize-76" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Bodybuilding-41" ] ], [ [ "Maize-77" ], "no_evidence" ], [ [ "Chicken as food-11" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Bodybuilding-41" ] ], [ [ "Maize-76" ] ], [ [ "Chicken as food-11" ] ], [ "operation" ] ] ] }, { "qid": "8b08d90e8d537d92e6a8", "term": "King Kong (2005 film)", "description": "2005 film directed by Peter Jackson", "question": "Was King Kong (2005 film) the lead actress's biggest box office role?", "answer": true, "facts": [ "Naomi Watts starred in King Kong (2005 film).", "Naomi Watts has starred in several movies such as Divergent and the RIng.", "Divergent: Insurgent made 295 million at the box office worldwide .", "The Ring made 248 million at the box office worldwide .", "King Kong (2005 film) made 550 million at the box office worldwide." ], "decomposition": [ "Who was the lead actress in King Kong (2005)?", "What other films has #1 starred in?", "How much did King Kong (2005) make at the box office?", "How much did each of #2 make at the box office?", "Is #3 greater than any of #4?" ], "evidence": [ [ [ [ "King Kong (2005 film)-1" ] ], [ [ "Naomi Watts-1" ] ], [ [ "King Kong (2005 film)-2" ] ], [ [ "Flirting (film)-9", "For Love Alone-8" ] ], [ "operation" ] ], [ [ [ "King Kong (2005 film)-1" ] ], [ [ "Naomi Watts filmography-2" ], "no_evidence" ], [ [ "King Kong (2005 film)-2" ] ], [ [ "The Divergent Series: Insurgent-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "King Kong (2005 film)-1" ] ], [ [ "Naomi Watts filmography-2" ], "no_evidence" ], [ [ "King Kong (2005 film)-28" ] ], [ [ "Naomi Watts-19" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "2bd29e26063a43572a19", "term": "Do it yourself", "description": "building, modifying, or repairing something without the aid of experts or professionals", "question": "Are some Do It Yourself projects potentially lethal?", "answer": true, "facts": [ "Deep fried turkey can be made at home, but a small mistake can cause the entire setup to erupt into a grease fire.", "Home roofing repair can be a DIY project but without proper safety gear a fall can be deadly." ], "decomposition": [ "How could DIY deep fried turkey go wrong in case of a mistake?", "What accidents could DIY home roofing cause if something went wrong?", "Are #1 and #2 deadly?" ], "evidence": [ [ [ [ "Turkey fryer-5" ] ], [ [ "Falling (accident)-1", "Roofer-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Turkey fryer-5" ] ], [ [ "Falling (accident)-17" ], "no_evidence" ], [ [ "Burn-4", "Falling (accident)-17" ], "operation" ] ], [ [ [ "Turkey fryer-5" ] ], [ [ "Home repair-8" ] ], [ "no_evidence" ] ] ] }, { "qid": "af9206e49f97ed8e3b61", "term": "Andes", "description": "Mountain range in South America", "question": "Has mummification in the Andes been prevented by rainfall?", "answer": false, "facts": [ "The Andes includes high, dry zones without precipitation.", "Dry climates do not impede mummification.", "Many mummies have been found in the Andes." ], "decomposition": [ "What type of climate is present in the Andes?", "Does #1 cause rainfall?" ], "evidence": [ [ [ [ "Andes-18" ] ], [ "operation" ] ], [ [ [ "Andes-18" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Andes-19" ] ], [ [ "Desert-1" ], "operation" ] ] ] }, { "qid": "a59367577ae81ab33531", "term": "Lettuce", "description": "Species of annual plant of the daisy family, most often grown as a leaf vegetable", "question": "Can lettuce result in spontaneous abortion?", "answer": true, "facts": [ "Food-borne pathogens that can survive on lettuce include Listeria monocytogenes, ", "Listeria monocytogenes is the causative agent of listeriosis.", "The manifestations of listeriosis include intrauterine or cervical infections in pregnant women, which may result in spontaneous abortion." ], "decomposition": [ "What diseases can be caused by contaminated lettuce?", "Can any of #1 cause intrauterine or cervical infections?" ], "evidence": [ [ [ [ "Lettuce-4" ] ], [ [ "Cervix-30", "Escherichia coli-1", "Salmonella-1" ], "no_evidence", "operation" ] ], [ [ [ "Lettuce-4" ] ], [ [ "Salmonella-22" ], "no_evidence", "operation" ] ], [ [ [ "Lettuce-4" ] ], [ "no_evidence" ] ] ] }, { "qid": "cac8e6300a98cf6128af", "term": "1800", "description": "Year", "question": "Is number of different US President's in 1800s a lucky number in Hong Kong?", "answer": false, "facts": [ "There were 24 different US President's in the 1800s.", "4 is an unlucky number in Chinese numerology.", "Where East Asian and Western cultures blend, such as in Hong Kong, it is possible in some buildings that the thirteenth floor along with all the floors with 4s to be omitted. " ], "decomposition": [ "How many U.S. Presidents served during the 1800's?", "What number is unlucky in Chinese numerology?", "Does #1 end with a number other than #2?" ], "evidence": [ [ [ [ "John Adams-1", "William McKinley-1" ] ], [ [ "Chinese numerology-7" ] ], [ "operation" ] ], [ [ [ "John Adams-1", "William McKinley-1" ] ], [ [ "Chinese numerology-1" ] ], [ "operation" ] ], [ [ [ "USS President (1800)-31" ], "no_evidence" ], [ [ "Chinese numerology-7" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "0992c14cfd410f2d5c1e", "term": "Slot machine", "description": "Casino gambling machine", "question": "Do any video games about the end of civilization have slot machines?", "answer": true, "facts": [ "Fallout New Vegas is a game that takes place after the apocalypse has ocurred. ", "In Fallout New Vegas, players can go to casinos and play on slot machines." ], "decomposition": [ "What video games take place in a post-apocalyptic world?", "Which video games have slot machines?", "Is at least one game in #1 found in #2?" ], "evidence": [ [ [ [ "Fallout (series)-2", "Fallout: New Vegas-1" ] ], [ [ "Fallout: New Vegas-4" ] ], [ "operation" ] ], [ [ [ "Fallout: New Vegas-1", "The Last of Us-1" ], "no_evidence" ], [ [ "Fallout: New Vegas-4" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Fallout (series)-1" ], "no_evidence" ], [ [ "Fallout: New Vegas-2", "Fallout: New Vegas-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "965b4af72fa11cbda5d1", "term": "Eminem", "description": "American rapper and actor", "question": "Would Terence Tao outperform Eminem in a math competition?", "answer": true, "facts": [ "Eminem disliked math and social studies, and dropped out of high school at age 17.", "Terence Tao was a child prodigy in mathematics, attending university-level mathematics courses at the age of 9.", "From 1992 to 1996, Tao was a graduate student at Princeton University under the direction of Elias Stein, receiving his Ph.D. at the age of 21." ], "decomposition": [ "What achievements did Terence Tao make as a student of mathematics?", "What was Eminem's disposition to mathematics as a high-schooler?", "Does #1 indicate a higher mathematical ability than #2?" ], "evidence": [ [ [ [ "Terence Tao-2" ] ], [ [ "Eminem-8" ] ], [ [ "Fields Medal-2" ] ] ], [ [ [ "Terence Tao-1" ] ], [ [ "Eminem-8" ] ], [ "operation" ] ], [ [ [ "Terence Tao-11", "Terence Tao-8" ] ], [ [ "Eminem-8" ] ], [ "operation" ] ] ] }, { "qid": "c70d40b57fa67ea13ed6", "term": "Tonsure", "description": "hairstyle related to religious devotion", "question": "Does a person using tonsure have hair at the top of their scalp?", "answer": false, "facts": [ "Tonsure involves shaving some or all of the hair from the head.", "Tonsure styles include a large bald spot at the top of the scalp." ], "decomposition": [ "What parts of the head are shaved for the tonsure hairstyle?", "Is the top of the scalp excluded from #1?" ], "evidence": [ [ [ [ "Tonsure-1" ] ], [ "operation" ] ], [ [ [ "Tonsure-1" ] ], [ "operation" ] ], [ [ [ "Tonsure-1" ] ], [ "operation" ] ] ] }, { "qid": "3b3765fab920695e5387", "term": "Peach", "description": "species of fruit tree (for the fruit use Q13411121)", "question": "Would a black widow woman have use for peaches?", "answer": true, "facts": [ "A black widow woman refers to a woman who murders her husbands for money and remarries.", "Peach pits contain amygdalin, a type of cyanide.", "Cyanide is poisonous to humans." ], "decomposition": [ "What is a black widows main goal?", "What do peach pits contain?", "If a human ate #2, would #1 occur?" ], "evidence": [ [ [ [ "Stacey Castor-31" ] ], [ [ "Amygdalin-5" ] ], [ [ "Amygdalin-3" ] ] ], [ [ "no_evidence" ], [ [ "Amygdalin-2", "Peach-2", "Peach-22" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Black Widow Murders-1" ] ], [ [ "Peach (fruit)-16" ] ], [ "operation" ] ] ] }, { "qid": "f3d87ec114acbb8c9dd7", "term": "Nordic countries", "description": "Geographical and cultural region in Northern Europe and the North Atlantic", "question": "Does someone from Japan need a passport to go to a Nordic country?", "answer": true, "facts": [ "The Nordic Passport Union allows citizens of the Nordic countries: Denmark (Faroe Islands included since 1 January 1966, Greenland not included), Sweden, Norway (Svalbard, Bouvet Island and Queen Maud Land not included), Finland and Iceland (since 24 September 1965) to cross approved border districts without carrying and having their passport checked.", "Japan is not one of the approved countries." ], "decomposition": [ "What countries recognize the Nordic Passport Union?", "Is Japan included in #1?" ], "evidence": [ [ [ [ "Nordic Passport Union-1" ] ], [ "operation" ] ], [ [ [ "Nordic Passport Union-1" ] ], [ "operation" ] ], [ [ [ "Nordic Passport Union-1" ] ], [ "operation" ] ] ] }, { "qid": "92375474ecfe1fe8b2be", "term": "Tibia", "description": "larger of the two bones of the leg below the knee for vertebrates", "question": "Would you find a tibia beside parsley on a holiday plate?", "answer": true, "facts": [ "The tibia of a goat is eaten during Passover, a Jewish holiday", "Parsley is served on a Passover seder plate beside the goat shank " ], "decomposition": [ "How is Passover celebrated?", "What part of a goat is eaten during #1?", "Is parsley typically served on the same plate as #2?" ], "evidence": [ [ [ [ "Passover-13", "Passover-7" ] ], [ [ "Tibia-1", "Zeroa-1" ] ], [ [ "Passover Seder plate-4" ] ] ], [ [ [ "Passover-22" ] ], [ [ "Passover-23" ] ], [ [ "Parsley-15" ], "no_evidence" ] ], [ [ [ "Passover-13" ] ], [ [ "Passover-87" ], "no_evidence" ], [ [ "Parsley-16" ], "operation" ] ] ] }, { "qid": "68a5517900edfff4ea61", "term": "Hanuman", "description": "The divine monkey companion of Rama in Hindu mythology", "question": "Did Hanuman ever experience an orgasm?", "answer": false, "facts": [ "Hanuman was a life long celibate.", "Celibates refrain from all sexual activity.", "Orgasms are only experienced during sexual activity." ], "decomposition": [ "What does one have to do to experience an orgasm?", "Which of Hanuman's characteristics concerned his #1 aspect?", "Do people who identify as #2 engage in #1?" ], "evidence": [ [ [ [ "Sexual intercourse-1" ] ], [ [ "Hanuman-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Orgasm-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Orgasm-1" ] ], [ [ "Hanuman-1" ] ], [ "operation" ] ] ] }, { "qid": "769c1dcad417f50a7ec1", "term": "U2", "description": "Four-member Irish rock band, from Dublin", "question": "Could someone listen to the entire U2 debut studio album during an episode of Peppa Pig?", "answer": false, "facts": [ "U2's debut studio album was titled Boy.", "The album, Boy, is 42 minutes and 52 seconds long.", "An episode of Peppa Pig has a running time of approximately 5 minutes." ], "decomposition": [ "What is the title of U2's debut studio album?", "How long is #1?", "How long is Peppa Pig episodes?", "Is #3 longer than #2?" ], "evidence": [ [ [ [ "Boy (album)-1" ] ], [ "no_evidence" ], [ [ "Peppa Pig-4" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Boy (album)-1" ] ], [ "no_evidence" ], [ [ "Peppa Pig-6" ] ], [ "operation" ] ], [ [ [ "Disco Boy-5" ], "no_evidence" ], [ "operation" ], [ [ "Peppa Pig-8" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "46ee63236516c42f3267", "term": "Soul music", "description": "Genre of music", "question": "Would Brian Warner be a good singer for a soul music band?", "answer": false, "facts": [ "Soul music is a music genre that originated in the United States African American community in the 1950s.", "Soul music combines elements of African-American gospel music, rhythm and blues and jazz.", "Brian Warner is the lead singer of the band Marilyn Manson.", "The band Marilyn Manson plays industrial heavy metal music." ], "decomposition": [ "What kind of music does Brian Warner play?", "Is soul music listed in #1?" ], "evidence": [ [ [ [ "Marilyn Manson-2" ] ], [ "operation" ] ], [ [ [ "Marilyn Manson (band)-46" ] ], [ "operation" ] ], [ [ [ "Marilyn Manson-1", "Marilyn Manson-21", "Marilyn Manson-8" ] ], [ "operation" ] ] ] }, { "qid": "f0d6a1bed84099f2f841", "term": "Comma", "description": "Punctuation mark", "question": "Would three commas be sufficient for displaying US 2018 GDP?", "answer": false, "facts": [ "The 2018 GDP of US was 20.54 trillion dollars.", "There are three commas in a billion.", "There are four commas in a trillion." ], "decomposition": [ "How much was US GDP in 2018?", "When written in figures, how many commas would #1 contain?", "Is #2 less than or equal to three?" ], "evidence": [ [ [ [ "United States-142" ], "no_evidence" ], [ [ "Trillion-2" ], "operation" ], [ "operation" ] ], [ [ [ "United States-142" ] ], [ [ "Trillion-2" ] ], [ "operation" ] ], [ [ [ "Economy of the United States-21" ] ], [ [ "Trillion-2" ] ], [ [ "Trillion-2" ], "operation" ] ] ] }, { "qid": "18677e493ffd5cd35fa9", "term": "Kanji", "description": "adopted logographic Chinese characters used in the modern Japanese writing system", "question": "Can a person who knows only English read Kanji?", "answer": false, "facts": [ "Kanji is a Japanese language.", "People who only know English can't read Japanese." ], "decomposition": [ "Is knowledge of Kanji included in English language?" ], "evidence": [ [ [ [ "Kanji-1" ] ] ], [ [ [ "Kanji-1" ] ] ], [ [ [ "Kanji-1" ] ] ] ] }, { "qid": "1daa64834b99cb125ae8", "term": "Walt Disney", "description": "American entrepreneur, animator, voice actor and film producer", "question": "Was Walt Disney able to email his illustrations to people living far away?", "answer": false, "facts": [ "Walt Disney died in 1966", "Modern email came into existence in 1971" ], "decomposition": [ "When was email first used?", "When did Walt Disney die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Email-9" ] ], [ [ "Walt Disney-1" ] ], [ "operation" ] ], [ [ [ "Email-1" ] ], [ [ "Walt Disney-1" ] ], [ "operation" ] ], [ [ [ "Email-1" ] ], [ [ "Walt Disney-1" ] ], [ "operation" ] ] ] }, { "qid": "feeabf84f23bf6d09d7d", "term": "AirTrain JFK", "description": "People mover system at JFK Airport in New York City", "question": "Could Katharine Hepburn have ridden the AirTrain JFK?", "answer": false, "facts": [ "The AirTrain JFK was built in December 17, 2003.", "Katharine Hepburn died on June 29, 2003." ], "decomposition": [ "When was the AirTrain JFK built?", "When did Katharine Hepburn die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "AirTrain JFK-20" ] ], [ [ "Katharine Hepburn-1" ] ], [ "operation" ] ], [ [ [ "AirTrain JFK-2" ] ], [ [ "Katharine Hepburn-1" ] ], [ "operation" ] ], [ [ [ "AirTrain JFK-15" ] ], [ [ "Katharine Hepburn-63" ] ], [ "operation" ] ] ] }, { "qid": "e5eaf0633f2fcedbcc8e", "term": "Great Recession", "description": "Early 21st-century global economic decline", "question": "Was Great Recession the period of severest unemployment?", "answer": false, "facts": [ "The Great Recession had an unemployment peak of 10%.", "The Great Depression saw global GDP decline by almost 30% and unemployment approach 25%.", "US unemployment numbers approached 15% in May 2020 due to the Coronavirus." ], "decomposition": [ "What was the unemployment rate during the Great Recession?", "What was the US unemployment rate in May 2020?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Great Recession-43" ] ], [ [ "Unemployment in the United States-21" ] ], [ "operation" ] ], [ [ [ "Effects of the Great Recession-11" ] ], [ [ "Unemployment in the United States-21" ] ], [ [ "Effects of the Great Recession-11", "Unemployment in the United States-21" ], "operation" ] ], [ [ [ "Effects of the Great Recession-11" ] ], [ [ "Unemployment-153" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "d6ab89d323e040b5a28a", "term": "Groundhog Day", "description": "Traditional method of weather prediction", "question": "At Christmastime, do some films remind us that groundhog day is approaching?", "answer": true, "facts": [ "Jack Frost is a 1979 stop motion Christmas film.", "In Jack Frost, the groundhog is a character and gets his own song reminding people of his own holiday." ], "decomposition": [ "What is the name of a stop motion Christmas film that was released in 1979?", "In #1, what does the groundhog get?", "Does #2 remind people of Groundhog Day?" ], "evidence": [ [ [ [ "Jack Frost (TV special)-1" ] ], [ [ "Jack Frost (TV special)-6" ] ], [ "operation" ] ], [ [ [ "Jack Frost (TV special)-1" ] ], [ [ "Jack Frost (TV special)-2", "Jack Frost (TV special)-6" ] ], [ "operation" ] ], [ [ [ "Jack Frost (TV special)-1" ] ], [ [ "Jack Frost (TV special)-2" ] ], [ "operation" ] ] ] }, { "qid": "86b893746c5eeece6760", "term": "Pearl hunting", "description": "Collecting pearls from wild mollusks", "question": "Would Michael Phelps be good at pearl hunting?", "answer": true, "facts": [ "Pearl hunters swim underwater to collect pearls from oysters.", "Michael Phelps is the most decorated Olympic swimmer of all time." ], "decomposition": [ "What do pearl hunters do?", "What is Michael Phelps famous for?", "Does #2 help with accomplishing #1?" ], "evidence": [ [ [ [ "Pearl hunting-1" ] ], [ [ "Michael Phelps-1" ] ], [ [ "Pearl hunting-2" ] ] ], [ [ [ "Pearl hunting-1" ] ], [ [ "Michael Phelps-1" ] ], [ "operation" ] ], [ [ [ "Pearl hunting-2" ] ], [ [ "Michael Phelps-1" ] ], [ "operation" ] ] ] }, { "qid": "8079e0f884664d724347", "term": "Hornet", "description": "Genus of eusocial wasp", "question": "Do hornets provide meaningful data for oceanographers?", "answer": false, "facts": [ "Hornets live on land", "Oceanographers study oceans" ], "decomposition": [ "Where do hornets live?", "What do oceanographers study?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Hornet-2", "Hornet-8" ] ], [ [ "Oceanography-1" ] ], [ "operation" ] ], [ [ [ "Hornet-2" ] ], [ [ "Oceanography-1" ] ], [ "operation" ] ], [ [ [ "Hornet-2" ] ], [ [ "Oceanography-1" ] ], [ "operation" ] ] ] }, { "qid": "268b7bf55b10eeab7a7e", "term": "USB", "description": "Industry standard", "question": "Is 500GB USB device enough to save 10 hours of Netflix shows a day?", "answer": false, "facts": [ "5 hours of Netflix programming uses up approximately 1 TB of data.", "1 TB is equal to 1000 GB of data." ], "decomposition": [ "How many terabytes of data does 5 hours of Netflix use up?", "What is #1 multiplied by 2?", "How many GB are in a TB?", "What is #3 multiplied by #2?", "Is #4 less than 500?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence", "operation" ], [ [ "Terabyte-2" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "TiVo-46" ], "no_evidence" ], [ "operation" ], [ [ "Terabyte-2" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Streaming media-39" ], "no_evidence", "operation" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "86f9275b46d101656634", "term": "Maritime pilot", "description": "mariner who manoeuvres ships through dangerous or congested waters", "question": "Can COVID-19 spread to maritime pilots?", "answer": true, "facts": [ "Maritime pilots are human beings.", "COVID-19 can spread among human population. " ], "decomposition": [ "Which organisms are susceptible to COVID-19?", "Are maritime pilots one of #1?" ], "evidence": [ [ [ [ "Coronavirus disease 2019-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Coronavirus disease 2019-1" ] ], [ [ "Maritime pilot-1" ], "operation" ] ], [ [ [ "Coronavirus disease 2019-83" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "bbf5422e2b837a2e1ff6", "term": "B", "description": "letter in the Latin alphabet", "question": "Could B be mistaken for an Arabic numeral?", "answer": true, "facts": [ "The letter 'B' resembles a figure-8 with a flattened left side.", "The Arabic numeral '8' is drawn as one large circle and a smaller circle immediately on top, intersecting each other. ", "A 'figure-8' is a shape consisting of two intersecting circles, the larger on the bottom." ], "decomposition": [ "Which figure is the letter B similar in appearance to?", "Is #1 an Arabic numeral?" ], "evidence": [ [ [ [ "8-1", "B-1" ] ], [ [ "Arabic numerals-1" ], "operation" ] ], [ [ [ "B-1", "Beta-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bet (letter)-3" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "c419fd25e9ed4501c9ea", "term": "Grizzly bear", "description": "Subspecies of mammal", "question": "Did occupants of Vellore Fort need to defend themselves from Grizzly Bears?", "answer": false, "facts": [ "The Vellore Fort was a 16th century stronghold in India.", "Grizzly Bears are native to the North American continent." ], "decomposition": [ "Where is the Vellore Fort located?", "Where can grizzly bears be found?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Vellore Fort-1" ] ], [ [ "Brown bear-21" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Vellore Fort-1" ] ], [ [ "Grizzly bear-1" ] ], [ "operation" ] ], [ [ [ "Vellore Fort-1" ] ], [ [ "Grizzly bear-1" ] ], [ [ "India-1", "North America-1" ], "operation" ] ] ] }, { "qid": "c099aea9bc84f482419d", "term": "Gladiator", "description": "combatant who entertained audiences in the Roman Republic and Roman Empire", "question": "Did a gladiator kill his opponent with a shotgun?", "answer": false, "facts": [ "The gladiator games lasted for nearly a thousand years, reaching their peak between the 1st century BC and the 2nd century AD.", "The gladiator games finally declined during the early 5th century.", "The shotgun was not invented until approximately the 18th century." ], "decomposition": [ "When did the gladiator games take place?", "When was the shotgun invented?", "Is #2 within the range of #1?" ], "evidence": [ [ [ [ "Gladiator-4" ] ], [ [ "Shotgun-38" ] ], [ [ "Gladiator-4", "Shotgun-38" ], "operation" ] ], [ [ [ "Gladiator-4" ] ], [ [ "Shotgun-31" ] ], [ "operation" ] ], [ [ [ "Gladiator-4" ] ], [ [ "Shotgun-32" ] ], [ "operation" ] ] ] }, { "qid": "7b625d3d673751304dfe", "term": "Immanuel Kant", "description": "Prussian philosopher", "question": "Would Immanuel Kant be disgusted by the Black Lives Matter movement?", "answer": true, "facts": [ "Immanuel Kant believed that Africans occupied the second lowest position on his racial hierarchy, below Whites and Asians.", "The Black Lives Matter movement advocates for racial equality and anti-racism." ], "decomposition": [ "What were Immanuel Kant's views on race?", "What are the main beliefs of the Black Lives Matter movement?", "Is #1 significantly different from #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Black Lives Matter-15" ] ], [ "operation" ] ], [ [ [ "Scientific racism-41" ] ], [ [ "Black Lives Matter-14", "Black Lives Matter-9" ] ], [ "operation" ] ], [ [ [ "Scientific racism-1", "Scientific racism-41" ] ], [ [ "Black Lives Matter-15" ] ], [ "operation" ] ] ] }, { "qid": "533d15de91fbfa97347d", "term": "Pregnancy", "description": "time when children develop inside the mother's body before birth", "question": "Will 2020 elephant pregnancy last past next year with 4 solar eclipses?", "answer": false, "facts": [ "The gestation period of elephants are around 95 weeks.", "The year 2029 is the next year with 4 solar eclipses." ], "decomposition": [ "What is the duration of an elephant's gestation period?", "How many years from 2020 will there be a year with four solar eclipses?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Elephant-48" ] ], [ [ "Solar eclipse-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Elephant-48" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Birth-5" ], "no_evidence" ], [ [ "Solar eclipse-20" ] ], [ "operation" ] ] ] }, { "qid": "91a3d81584c66d26d49a", "term": "Fear", "description": "Basic emotion induced by a perceived threat", "question": "Is an espresso likely to assuage fear?", "answer": false, "facts": [ "Fear raises heart rate", "Caffeine raises heart rate", "Coffee may also increase symptoms such as jitteriness and nausea " ], "decomposition": [ "What does fear typically do to a person's heart rate?", "What does espresso typically do to a person's heart rate?", "Is #1 the opposite of #2?" ], "evidence": [ [ [ [ "Fear-4" ] ], [ [ "Caffeine-32", "Espresso-2" ] ], [ "operation" ] ], [ [ [ "Heart rate-15" ] ], [ [ "Caffeine-3" ] ], [ "operation" ] ], [ [ [ "Fear-20" ] ], [ [ "Caffeine-44", "Espresso-2" ] ], [ "operation" ] ] ] }, { "qid": "27aca37fe8213109c4ea", "term": "Burning Man", "description": "annual experimental festival based in Nevada, United States", "question": "Are people more likely than normal to get sunburn at Burning Man?", "answer": true, "facts": [ "Burning Man often attracts lots of young people who are typically wearing minimal clothing due to the weather and for style. ", "Burning Man festivities occur in the hot summer sun and are often not in shaded areas." ], "decomposition": [ "What style of clothing do people wear to the burning man festival?", "Sun burning occurs easily while wearing what style of clothing?", "Is #1 and #2 the same?" ], "evidence": [ [ [ [ "Burning Man-37" ], "no_evidence" ], [ [ "Nudity-1", "Swimsuit-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Burning Man-37" ] ], [ [ "Sunburn-36" ] ], [ "operation" ] ], [ [ [ "Burning Man-37" ] ], [ [ "Bikini-71" ] ], [ [ "Sunburn-26" ], "operation" ] ] ] }, { "qid": "614b682ff2b6ddd9ecba", "term": "Marco Polo", "description": "Italian explorer and merchant noted for travel to central and eastern Asia", "question": "Do you often hear Marco Polo's name shouted near water?", "answer": true, "facts": [ "\"Marco Polo\" is a popular game among children and adults played while swimming.", "To play \"Marco Polo\", one person shouts \"Marco\" and the other shouts \"Polo\" while avoiding being tagged." ], "decomposition": [ "What is the game Marco Polo?", "When is #1 typically played?", "Does #2 occur near or in water?" ], "evidence": [ [ [ [ "Marco Polo (game)-1" ] ], [ [ "Marco Polo (game)-1" ] ], [ [ "Swimming pool-1" ] ] ], [ [ [ "Marco Polo (game)-1", "Marco Polo (game)-2" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Marco Polo (game)-2" ] ], [ [ "Marco Polo (game)-2" ] ], [ [ "Marco Polo (game)-2" ] ] ] ] }, { "qid": "dcd46aac45ec24673b43", "term": "Rahul Dravid", "description": "Indian cricketer", "question": "Did Rahul Dravid ever kick a field goal?", "answer": false, "facts": [ "Rahul Dravid was a professional cricket player", "Field goal kicks are part of American football" ], "decomposition": [ "Which sport does Rahul Dravid play?", "Does #1 involve field goal kicks?" ], "evidence": [ [ [ [ "Rahul Dravid-1" ] ], [ [ "Field goal-1" ], "operation" ] ], [ [ [ "Rahul Dravid-1" ] ], [ [ "Cricket-1", "Field goal-1" ], "operation" ] ], [ [ [ "Rahul Dravid-1" ] ], [ [ "Field goal-1" ] ] ] ] }, { "qid": "dffa127553f4c2dc9993", "term": "Donatello", "description": "Italian painter and sculptor", "question": "Did Donatello use a smartphone?", "answer": false, "facts": [ "Donatello died on December 13, 1466.", "The first smartphone did not come out until 1992." ], "decomposition": [ "What years was Donatello alive?", "When was the first smartphone released?", "Did #2 occur during #1?" ], "evidence": [ [ [ [ "Donatello-1" ] ], [ [ "Smartphone-6" ] ], [ "operation" ] ], [ [ [ "Donatello-1" ] ], [ [ "Smartphone-16" ] ], [ "operation" ] ], [ [ [ "Donatello-1" ] ], [ [ "Smartphone-6" ] ], [ "operation" ] ] ] }, { "qid": "931ece1ab2ac85572d46", "term": "The Dark Knight (film)", "description": "2008 film directed by Christopher Nolan", "question": "Was the death of Heath Ledger caused by his work on The Dark Knight?", "answer": false, "facts": [ "Heath Ledger accidentally overdosed on prescription medication.", "Heath Ledger's overdose led to his death. " ], "decomposition": [ "What was the cause of Heath Ledger's death?", "Is #1 related to his work on the The Dark Knight?" ], "evidence": [ [ [ [ "Heath Ledger-3" ] ], [ [ "Heath Ledger-21", "Heath Ledger-22", "Heath Ledger-30" ] ] ], [ [ [ "Heath Ledger-29" ] ], [ [ "Heath Ledger-29" ], "no_evidence" ] ], [ [ [ "Heath Ledger-3" ] ], [ [ "Heath Ledger-3" ] ] ] ] }, { "qid": "7ddaf76fa5a53f4b642c", "term": "Parody", "description": "Imitative work created to mock, comment on or trivialise an original work", "question": "Is \"A Tale of Two Cities\" a parody of the Bible?", "answer": false, "facts": [ "\"A Tale of Two Cities\" is an original work by Charles Dickens.", "The Bible is a religious text written down in the early centuries AD.", "A parody is a deriative work intended to make fun of another piece of media. " ], "decomposition": [ "Was the story of \"A Tale of Two Cities\" written as an imitation of the Bible?" ], "evidence": [ [ [ [ "A Tale of Two Cities-1", "A Tale of Two Cities-48" ], "operation" ] ], [ [ [ "A Tale of Two Cities-1", "Bible-1" ] ] ], [ [ [ "A Tale of Two Cities-1" ] ] ] ] }, { "qid": "3a20f77eb5aaebb051c7", "term": "D", "description": "letter in the Latin alphabet", "question": "Is the letter D influenced by the shape of ancient doors?", "answer": true, "facts": [ "D is the fourth letter of the Latin alphabet", "D is a descendent of the ancient Phoenician Dalet", "Dalet was represented by a glyph of a door" ], "decomposition": [ "Which ancient language did the letter 'D' descend from?", "What was used to represent 'D' in #1?", "Was #2 a symbol of a door?" ], "evidence": [ [ [ [ "D-2" ] ], [ [ "Dalet-2" ] ], [ [ "Dalet-2" ] ] ], [ [ [ "D-2" ] ], [ [ "D-2" ] ], [ "operation" ] ], [ [ [ "D-2" ] ], [ [ "D-2" ] ], [ [ "D-2", "Logogram-1" ] ] ] ] }, { "qid": "d640dea4e362ceaa2a64", "term": "Acetylene", "description": "chemical compound", "question": "Did Julio Gonzalez like acetylene?", "answer": true, "facts": [ "Julio Gonzalez was an artist who welded metal to create sculptures", "Welding is achieved by using a blowtorch on metal", "Blowtorches use acetylene as fuel" ], "decomposition": [ "What technique did Julio Gonzalez use to create his scultures?", "What is the main tool used for #1?", "What is a common fuel for #2?" ], "evidence": [ [ [ [ "Julio González (sculptor)-5" ], "no_evidence" ], [ [ "Welding-10" ] ], [ [ "Acetylene-14" ], "operation" ] ], [ [ [ "Julio González (sculptor)-4" ] ], [ [ "Oxy-fuel welding and cutting-3" ] ], [ [ "Oxy-fuel welding and cutting-30" ] ] ], [ [ [ "Julio González (sculptor)-1", "Julio González (sculptor)-4" ] ], [ [ "Welding-10" ] ], [ [ "Acetylene-14" ] ] ] ] }, { "qid": "9d5af50292804754a5d2", "term": "Supreme Court of Canada", "description": "highest court of Canada", "question": "Is clerk of Supreme Court of Canada safe profession for someone with seismophobia?", "answer": true, "facts": [ "Seismophobia is the extreme fear of earthquakes.", "The Supreme Court of Canada is located in Ottawa.", "The Ottawa-Gattineau region is located far from active tectonic plates." ], "decomposition": [ "What is seismophobia a fear of?", "Movement of what causes #1?", "Where is the Supreme Court of Canada located?", "Is #3 located near active #2's?" ], "evidence": [ [ [ [ "2019–20 Puerto Rico earthquakes-23" ] ], [ [ "Earthquake-3" ] ], [ "no_evidence" ], [ [ "Earthquake-25" ], "operation" ] ], [ [ [ "Earthquake-1" ], "no_evidence" ], [ [ "Earthquake-3" ] ], [ [ "Supreme Court of Canada-19" ] ], [ [ "Ottawa-16" ], "operation" ] ], [ [ [ "2019–20 Puerto Rico earthquakes-23" ], "no_evidence" ], [ [ "Seismology-5" ] ], [ [ "Supreme Court of Canada-19" ] ], [ [ "Ottawa-16" ], "operation" ] ] ] }, { "qid": "62239530032e5e88b8a7", "term": "Oceanography", "description": "The study of the physical and biological aspects of the ocean", "question": "Does an individual oceanographer study many sciences?", "answer": true, "facts": [ "Study of the oceans involve many fields or science.", "To properly study their specific topic of research, an oceanographer must understand how their science interacts with the other involved sciences." ], "decomposition": [ "What other fields of science does oceanography cover?", "Does an individual oceanographer have to understand all of #1 to properly understand oceanography?" ], "evidence": [ [ [ [ "Oceanography-1" ] ], [ "operation" ] ], [ [ [ "Oceanography-1" ] ], [ [ "Oceanography-1" ] ] ], [ [ [ "Oceanography-1" ] ], [ "operation" ] ] ] }, { "qid": "fdff69018a970c7f3bbc", "term": "Homelessness", "description": "circumstance when people desire a permanent dwelling but do not have one", "question": "Does Antarctica have a lot of problems relating to homelessness?", "answer": false, "facts": [ "Antarctica has no permanent residents.", "Exposure to the elements would be deadly for homeless people during certain times of year." ], "decomposition": [ "What do homeless people lack?", "Does the weather in Antarctica support life without #1?" ], "evidence": [ [ [ [ "Homelessness-1" ] ], [ [ "Antarctica-2" ], "operation" ] ], [ [ [ "Homelessness-1" ] ], [ [ "Antarctica-47" ] ] ], [ [ [ "Homelessness-6" ] ], [ [ "Antarctica-43", "Climate of Antarctica-10", "Homelessness-6" ] ] ] ] }, { "qid": "8ed193332f45fe1b9def", "term": "Superhero fiction", "description": "Fiction genre", "question": "Was Superhero fiction invented in the digital format?", "answer": false, "facts": [ "The Golden Age of comics occurred between the 1930's and the 1950's.", "Shatter was the first digitally drawn, commercially published comic." ], "decomposition": [ "In which format was superhero fiction first introduced?", "During which period were #1 first published and made popular?", "When was the first digitally drawn #1 published?", "Is #2 after #3?" ], "evidence": [ [ [ [ "Superhero fiction-21" ] ], [ [ "Comic book-5" ] ], [ [ "Shatter (digital comic)-2" ] ], [ "operation" ] ], [ [ [ "Superhero fiction-21" ] ], [ [ "Superhero fiction-21" ] ], [ [ "Digital comic-4" ] ], [ "operation" ] ], [ [ [ "Superhero-1" ] ], [ [ "Superhero-1" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "bf5f8d8dc96fe5e04f4b", "term": "Cooper (profession)", "description": "Maker of staved vessels such as barrels", "question": "Are coopers required in the beverage industry?", "answer": true, "facts": [ "Coopers make barrels.", "Barrels are used to store certain alcoholic beverages during production." ], "decomposition": [ "What liquids are barrels made for?", "Are any of #1 part of the beverage industry?" ], "evidence": [ [ [ [ "Barrel-2" ] ], [ [ "Drink-1" ], "operation" ] ], [ [ [ "Barrel-4" ] ], [ [ "Sake-1" ], "operation" ] ], [ [ [ "Barrel-1", "Barrel-2", "Barrel-4" ] ], [ "operation" ] ] ] }, { "qid": "01254b50648f4fb08b51", "term": "American black bear", "description": "species of bear", "question": "Could two newborn American Black Bear cubs fit on a king size bed?", "answer": true, "facts": [ "The average size of an American Black Bear cub is only 8 inches at birth.", "King size beds are 76\"x80\" in size." ], "decomposition": [ "What is the average length of an American Black Bear cub?", "What is the size of a king bed?", "Is two times #1 smaller than #2?" ], "evidence": [ [ [ [ "American black bear-29" ] ], [ [ "Bed size-6" ] ], [ "operation" ] ], [ [ [ "American black bear-19" ] ], [ [ "Bed size-17" ] ], [ [ "Bed size-17" ], "operation" ] ], [ [ [ "American black bear-29" ] ], [ [ "Bed size-23" ] ], [ "operation" ] ] ] }, { "qid": "7dcf18b23f679bc5a7cc", "term": "Gandalf", "description": "Fictional character created by J. R. R. Tolkien", "question": "Gandalf hypothetically defeats Rincewind in a wizard battle?", "answer": true, "facts": [ "Gandalf is a 2000 plus year old wizard that has fought orcs and spirits in Middle Earth.", "Rincewind is the protagonist of the Discworld series.", "Rincewind is a failed student at the Unseen University for wizards in Ankh-Morpork.", "Rincewind is described by other wizards as the magical equivalent to the number zero." ], "decomposition": [ "How powerful is Gandalf as portrayed in LOTR?", "How powerful is Rincewind as portrayed at the Unseen University for wizards?", "Does #1 include far more experience and accomplishments than #2?" ], "evidence": [ [ [ [ "Gandalf-2" ] ], [ [ "Rincewind-1" ] ], [ "no_evidence" ] ], [ [ [ "Gandalf-2" ], "no_evidence" ], [ [ "Rincewind-1" ] ], [ "operation" ] ], [ [ [ "Gandalf-2" ] ], [ [ "Rincewind-1" ] ], [ "operation" ] ] ] }, { "qid": "c253256fe0d8014da333", "term": "Alfa Romeo", "description": "Italian automotive manufacturer", "question": "Would an Alfa Romeo vehicle fit inside a barn?", "answer": true, "facts": [ "Alfa Romeo makes cars.", "Barns are large enough to hold a car." ], "decomposition": [ "What is the average length of an Alfa Romeo?", "What is the average size of a barn?", "Is #1 smaller than #2?" ], "evidence": [ [ [ [ "Alfa Romeo-46" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Alfa Romeo-1" ], "no_evidence" ], [ [ "Barn-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e6fc47d0cb907a7ee266", "term": "Tonsillitis", "description": "Inflammation of the tonsils", "question": "Is strep throat harmless to singer Rita Ora after her 2020 tonsilitis surgery?", "answer": false, "facts": [ "Tonsilitis is an inflammation of the tonsils.", "Singer Rita Ora had her tonsils removed in February of 2020 due to tonsilitis.", "Strep throat can still grow in the throat of people without tonsils." ], "decomposition": [ "What causes strep throat?", "Does #1 only flourish when tonsils are present?" ], "evidence": [ [ [ [ "Streptococcal pharyngitis-1" ] ], [ "no_evidence" ] ], [ [ [ "Throat irritation-7" ] ], [ [ "Streptococcal pharyngitis-1" ] ] ], [ [ [ "Streptococcal pharyngitis-2" ] ], [ [ "Streptococcal pharyngitis-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "a87b94672fd08c0a2f0e", "term": "United States Military Academy", "description": "U.S. Army's federal service academy in West Point, New York", "question": "Would the United States Military Academy reject an applicant with multiple sclerosis?", "answer": true, "facts": [ "Multiple Sclerosis is a progressive condition affecting the brain and spinal chord.", "The US Military Academy does not give waivers for serious progressive conditions." ], "decomposition": [ "What kind of condition is Multiple Sclerosis?", "Would the US Military Academy have to reject someone with #1?" ], "evidence": [ [ [ [ "Multiple sclerosis-5" ] ], [ [ "United States Military Academy-36" ], "no_evidence" ] ], [ [ [ "Multiple sclerosis-1" ] ], [ [ "United States Naval Academy-99" ], "operation" ] ], [ [ [ "Multiple sclerosis-59" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "948bbb81c0395227d3c6", "term": "Metropolitan Museum of Art", "description": "Art museum in New York City, New York", "question": "Could someone in Tokyo take a taxi to the The Metropolitan Museum of Art?", "answer": false, "facts": [ "Tokyo is located in Japan.", "Japan and the United States are separated by the Pacific Ocean.", "A taxi is not capable of travelling over water." ], "decomposition": [ "Where is Tokyo?", "Where is the Metropolitan Museum of Art?", "What separates #1 and #2?", "Can a taxi drive on #3?" ], "evidence": [ [ [ [ "Tokyo-1" ] ], [ [ "Metropolitan Museum of Art-1" ] ], [ [ "Pacific Ocean-1" ] ], [ [ "Taxicab-44", "Water taxi-1" ] ] ], [ [ [ "Tokyo City-5" ] ], [ [ "Metropolitan Museum of Art-3" ] ], [ [ "Ocean-3" ], "operation" ], [ [ "Ocean-3" ] ] ], [ [ [ "Tokyo-1" ] ], [ [ "Metropolitan Museum of Art-58" ] ], [ [ "Pacific Ocean-1" ] ], [ [ "Taxicab-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "bea6df46c8f218b4b5fb", "term": "Game engine", "description": "Software-development environment designed for building video games", "question": "Does Adobe Suite have video game engine coding?", "answer": true, "facts": [ "Adobe applications runs on the C++ framework.", "Many video games are run on Unity game engine.", "The Unity game engine is a C++ coded engine." ], "decomposition": [ "What framework does Adobe Suite run on?", "What game engine do most video games run on?", "What type of engine is #2?", "Is the framework for #1 the same as the engine for #3?" ], "evidence": [ [ [ [ "Adobe Creative Suite-1", "Starling Framework-2" ] ], [ [ "Starling Framework-1" ], "no_evidence" ], [ [ "Starling Framework-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Adobe Creative Suite-1" ], "no_evidence" ], [ [ "Game engine-27" ], "no_evidence" ], [ [ "Game engine-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Adobe Creative Suite-1", "C++-2" ], "no_evidence" ], [ [ "Unreal Engine-1" ] ], [ [ "Unreal Engine-1" ] ], [ "operation" ] ] ] }, { "qid": "910d7db694573a1683dd", "term": "Europa (moon)", "description": "The smallest of the four Galilean moons of Jupiter", "question": "Is Europa (moon) name origin related to Amunet?", "answer": false, "facts": [ "Europa (moon) gets its name from the Greek Classical Mythology story.", "Europa was a woman that Zeus was in love with, and he changed into a bull to deceive her.", "Amunet is a primordial goddess in Ancient Egyptian religion." ], "decomposition": [ "What was the moon 'Europa' named after?", "Which myth is #1 a part of?", "Is Amunet a part of #2?" ], "evidence": [ [ [ [ "Europa (moon)-6" ] ], [ [ "Europa (moon)-6" ] ], [ [ "Amunet-1" ] ] ], [ [ [ "Europa (moon)-1" ] ], [ [ "Europa (consort of Zeus)-1" ] ], [ [ "Amunet-1" ], "operation" ] ], [ [ [ "Europa (moon)-1" ] ], [ [ "Europa (consort of Zeus)-2" ] ], [ [ "Amunet-1" ], "operation" ] ] ] }, { "qid": "9a7a4669a295c5f3ebea", "term": "Hunger", "description": "Sustained inability to eat sufficient food", "question": "Was Jean Valjean imprisoned due to hunger?", "answer": true, "facts": [ "Jean Valjean was sentenced to imprisonment due to theft of property.", "The item Jean Valjean stole was a loaf of bread for his family." ], "decomposition": [ "What crime was Jean Valjean convicted of?", "What did Jean Valjean gain from #1?", "Who did he give #2 to?", "Is hunger experienced by #3 the main reason for wanting #2?" ], "evidence": [ [ [ [ "Jean Valjean-1" ] ], [ [ "Jean Valjean-1" ] ], [ [ "Jean Valjean-1" ] ], [ "operation" ] ], [ [ [ "Jean Valjean-1" ] ], [ [ "Jean Valjean-1" ] ], [ [ "Jean Valjean-1" ] ], [ "operation" ] ], [ [ [ "Jean Valjean-6" ] ], [ [ "Jean Valjean-7" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "c864fb965099ee7f6ebd", "term": "Hypothermia", "description": "A human body core temperature below 35.0°C", "question": "Would someone on Venus be unlikely to experience hypothermia?", "answer": true, "facts": [ "Hypothermia typically occurs from exposure to extreme cold.", "The average surface temperature on Venus is 863°F.", "A warmer surface temperature on the planet will result in a higher body temperature for people on that planet." ], "decomposition": [ "What is the average surface temperature on Venus?", "In order for the human body to experience hypothermia, it would have to be exposed to temperature that are what in relation to body temp?", "What is human body temperature?", "Does #1 meet the condition of #2 relative to #3?" ], "evidence": [ [ [ [ "Venus-23" ] ], [ [ "Hypothermia-1" ] ], [ [ "Human body temperature-4" ] ], [ "operation" ] ], [ [ [ "Venus-2" ] ], [ [ "Hypothermia-2" ] ], [ [ "Human body temperature-7" ] ], [ "operation" ] ], [ [ [ "Venus-19" ] ], [ [ "Hypothermia-5" ] ], [ [ "Human body temperature-4" ] ], [ "operation" ] ] ] }, { "qid": "7cfc1003f0a479b3487c", "term": "Jack Black", "description": "American actor, comedian, musician, music producer and youtuber.", "question": "Is Jack Black unlikely to compete with Bear McCreary for an award?", "answer": true, "facts": [ "Jack Black is a musician but not a composer", "Bear McCreary is a composer", "Their interests are similar but their skills not overlap in awards categories" ], "decomposition": [ "What music-related occupation does Bear McCreary have?", "What types of awards are won by notable figures who work as #1? ", "What music-related occupation does Jack Black have?", "What types of awards have been won by notable figures who work as #3?", "Are #2 and #4 separate categories of awards?" ], "evidence": [ [ [ [ "Bear McCreary-1" ] ], [ [ "American Society of Composers, Authors and Publishers-18" ] ], [ [ "The Pick of Destiny-8" ] ], [ [ "Jack Black-24" ] ], [ [ "American Society of Composers, Authors and Publishers-18", "Jack Black-24" ] ] ], [ [ [ "Bear McCreary-1" ] ], [ [ "American Society of Composers, Authors and Publishers-18", "International Film Music Critics Association-1" ], "no_evidence" ], [ [ "Jack Black-1" ] ], [ [ "Grammy Award-1", "Grammy Award-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bear McCreary-1" ] ], [ [ "Bear McCreary-2" ] ], [ [ "Jack Black-16" ] ], [ [ "Grammy Award for Best Hard Rock Performance-1" ] ], [ "operation" ] ] ] }, { "qid": "a67d586dc8bd0c0a2a9e", "term": "Insomnia", "description": "The inability to fall or stay sleep", "question": "Would Cuba Libre consumption help with insomnia?", "answer": false, "facts": [ "Cuba Libre is a highball cocktail consisting of cola, rum, and in many recipes lime juice on ice.", "Traditionally, the cola ingredient is Coca-Cola (\"Coke\"), and the alcohol is a light rum such as Bacardi. ", "Coca-Cola typically contains caffeine.", "Caffeine consumption often promotes insomnia." ], "decomposition": [ "What is the traditional source of cola in Cuba Libre?", "Does intake of #1 help treat insomnia?" ], "evidence": [ [ [ [ "Rum and Coke-1" ] ], [ [ "Caffeine-42", "Coca-Cola-38" ], "operation" ] ], [ [ [ "Coca-Cola-1", "Rum and Coke-1" ] ], [ "operation" ] ], [ [ [ "Rum and Coke-1" ] ], [ [ "Caffeine-18", "Coca-Cola formula-9" ] ] ] ] }, { "qid": "66101dcd75caa3fe265d", "term": "Solubility", "description": "Capacity of a designated solvent to hold a designated solute in homogeneous solution under specified conditions", "question": "Does Nigella Lawson care about solubility?", "answer": true, "facts": [ "Nigella Lawson is a chef", "Chefs are concerned with cooking processes and nutrition", "Solubility plays a role in both the chemistry of cooking processes as well as the body's interaction with substances that it ingests" ], "decomposition": [ "What is Nigella Lawson's major occupation?", "What kind of substances and processes is the concept of solubility applicable to?", "What kind of substances and processes are of importance to #1?", "Are any of #2 included in #3?" ], "evidence": [ [ [ [ "Nigella Lawson-1" ] ], [ [ "Solubility-1" ], "no_evidence" ], [ [ "Cooking-34" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Nigella Lawson-1" ] ], [ [ "Solubility-1" ] ], [ [ "Cooking-17" ] ], [ "operation" ] ], [ [ [ "Nigella Lawson-1" ] ], [ [ "Solubility-1" ], "no_evidence" ], [ [ "Cooking-14" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "3ed6c807bfa51e5577d1", "term": "Daytona 500", "description": "Auto race held in Daytona, Florida, United States", "question": "Can E6000 cure before a hoverboard finishes the Daytona 500? ", "answer": true, "facts": [ "The Daytona 500 is 500 miles", "A hoverboard can move at six to eight miles per hour", "E6000 fully cures in 24 to 72 hours" ], "decomposition": [ "How long is the Daytona 500?", "How fast can a hoverboard move in hours?", "What is #1 divided by #2?", "How many hours does it take for a E6000 to cure?", "Is #4 more less than #3?" ], "evidence": [ [ [ [ "Daytona 500-1" ] ], [ [ "Franky Zapata-12" ], "no_evidence" ], [ "operation" ], [ [ "Conroe (microprocessor)-8" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Daytona 500-1" ] ], [ [ "Self-balancing scooter-9" ] ], [ "operation" ], [ [ "Adhesive-28" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Daytona 500-1" ] ], [ [ "Hoverboard-18" ] ], [ "operation" ], [ [ "Adhesive-42" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "98c00a69ee9fd33c60f7", "term": "Pope", "description": "Leader of the Catholic Church", "question": "Could the Pope be on an episode of Pimp My Ride?", "answer": true, "facts": [ "Pimp My Ride is a show where people's cars are upgraded and improved", "The Pope has a personal vehicle called the Popemobile" ], "decomposition": [ "In the show Pimp My Ride, what type of vehicles are upgraded and improved?", "Does the Pope have #1?" ], "evidence": [ [ [ [ "Pimp My Ride-5" ] ], [ [ "Landaulet (car)-7" ] ] ], [ [ [ "Pimp My Ride-1" ] ], [ [ "Popemobile-1" ] ] ], [ [ [ "Pimp My Ride-1" ] ], [ [ "Popemobile-1" ], "operation" ] ] ] }, { "qid": "8a0c5118b5aba20a9ab6", "term": "Penny", "description": "unit of currency in various countries", "question": "Are pennies commonly used in Canada?", "answer": false, "facts": [ "Canada used pennies historically as one cent coins.", "Canada stopped minting pennies in 2012. " ], "decomposition": [ "What coins are used in Canada?", "Are pennies among #1?" ], "evidence": [ [ [ [ "Canadian dollar-16" ] ], [ "operation" ] ], [ [ [ "Coins of the Canadian dollar-2" ] ], [ [ "Coins of the Canadian dollar-2" ] ] ], [ [ [ "Canadian dollar-16" ] ], [ "operation" ] ] ] }, { "qid": "4c00343e96364ec24227", "term": "Drag king", "description": "female performance artists who dress and behave in masculine way for performance", "question": "Do drag kings take testosterone to look masculine?", "answer": false, "facts": [ "Drag Kings will use contouring and makeup to make their facial features appear more masculine and chiseled. ", "Testosterone is prescribed for transgender men to help with transitioning and dysphoria.", "Drag kings often identify as women, but dress as men for show." ], "decomposition": [ "Which features of themselves do drag kings modify to look masculine?", "Would #1 require testosterone intake?" ], "evidence": [ [ [ [ "Passing (gender)-31" ], "no_evidence" ], [ [ "Transgender hormone therapy (male-to-female)-42" ], "operation" ] ], [ [ [ "Drag king-1" ] ], [ "operation" ] ], [ [ [ "Drag king-1" ] ], [ [ "Testosterone-1" ], "operation" ] ] ] }, { "qid": "7d6b8191f43b8526074e", "term": "Halloween", "description": "Holiday celebrated October 31", "question": "Will Chick Fil A be open on Halloween 2021?", "answer": false, "facts": [ "Chick Fil A restaurants close on Sundays.", "Halloween 2021 falls on a Sunday." ], "decomposition": [ "What day of the week does Halloween fall on in 2021?", "What days of the week is Chick Fil A closed?", "Is #1 included in #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Chick-fil-A-18" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Chick-fil-A-2" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Chick-fil-A-2" ] ], [ "operation" ] ] ] }, { "qid": "d7bac31bafedfe9de6ee", "term": "Alfa Romeo", "description": "Italian automotive manufacturer", "question": "Can you order an Alfa Romeo at Starbucks?", "answer": false, "facts": [ "Alfa Romeo is a brand of automobile", "Starbucks sells coffee, tea, food, and some drink products like thermoses" ], "decomposition": [ "What kind of product is an Alfa Romeo?", "What kind of goods does Starbucks sell?", "Is #1 found in #2?" ], "evidence": [ [ [ [ "Alfa Romeo Giulietta (940)-1" ] ], [ [ "Starbucks-1" ] ], [ "operation" ] ], [ [ [ "Alfa Romeo-1" ] ], [ [ "Starbucks-1" ] ], [ "operation" ] ], [ [ [ "Alfa Romeo-1" ] ], [ [ "Starbucks-1" ] ], [ "operation" ] ] ] }, { "qid": "0a1946f12a7b7dbbf9cd", "term": "United States Marine Corps", "description": "Amphibious warfare branch of the United States Armed Forces", "question": "Would a recruit for the United States Marine Corps be turned away for self harm?", "answer": true, "facts": [ "Self harm is when someone intentionally causes injury or pain to themselves.", "Self harm is almost always related to a mental health condition.", "Those experiencing mental wellness related illnesses cannot join the Marines." ], "decomposition": [ "What medical condition is self harm usually related to?", "Are people who suffer from #1 turned away from the US Marine Corps?" ], "evidence": [ [ [ [ "Self-harm-3" ] ], [ [ "United States Marine Corps-2" ], "no_evidence", "operation" ] ], [ [ [ "Self-harm-22" ] ], [ "no_evidence" ] ], [ [ [ "Self-harm-16", "Self-harm-17" ] ], [ "no_evidence" ] ] ] }, { "qid": "b978c4051673fd21035b", "term": "Aldi", "description": "Germany-based supermarket chain", "question": "Are Aldi's foods discounted due to being out of date?", "answer": false, "facts": [ "Aldi cuts costs by charging for bags, buying in bulk, and by avoiding brand name items. ", "Aldi removes spoiled or expired foods from their shelves immediately upon identification." ], "decomposition": [ "How does Aldi cut cost?", "Is selling discounted food part of #1?" ], "evidence": [ [ [ [ "Aldi-33" ] ], [ [ "Aldi-25", "Aldi-27" ], "operation" ] ], [ [ [ "Aldi-5" ] ], [ "operation" ] ], [ [ [ "Aldi-27" ] ], [ [ "Aldi-27" ] ] ] ] }, { "qid": "4173af9c4eeb33828e37", "term": "Frost", "description": "coating or deposit of ice that may form in humid air in cold conditions, usually overnight", "question": "Would it be unusual to see frost in September in Texas?", "answer": true, "facts": [ "Texas is a Southern state of the United States, known for high heat.", "On average, Texas is between 68 and 89 degrees during the month of September.", "Frost forms at 32 degrees or lower." ], "decomposition": [ "What are the average temperatures in Texas during the month of September?", "What temperature does frost form at?", "Is #1 warmer than #2?" ], "evidence": [ [ [ [ "Climate of Dallas-3" ], "no_evidence" ], [ [ "Frost (temperature)-1", "Frost-1" ] ], [ "operation" ] ], [ [ [ "Texas-29", "Texas-30", "Texas-31" ] ], [ [ "Dew point-1", "Frost-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Climate of Texas-3" ], "no_evidence" ], [ [ "Dew point-1", "Frost-20", "Frost-5" ] ], [ "operation" ] ] ] }, { "qid": "25ae21677aaed6aa1f06", "term": "Breakdancing", "description": "Style of street dance", "question": "Did breakdancing grow in popularity during WW2?", "answer": false, "facts": [ "Breakdancing was created by the African American youth in the early 1970s.", "World War II was a global war that lasted from 1939 to 1945." ], "decomposition": [ "When did break-dancing experience a growth in popularity?", "Through which period did World War II take place?", "Is #1 within #2?" ], "evidence": [ [ [ [ "Breakdancing-2" ] ], [ [ "World War II-1" ] ], [ "operation" ] ], [ [ [ "Breakdancing-2" ] ], [ [ "World War II-1" ] ], [ "operation" ] ], [ [ [ "Breakdancing-2" ] ], [ [ "World War II-1" ] ], [ "operation" ] ] ] }, { "qid": "26cfd3b238056cc1dc82", "term": "Vitamin C", "description": "nutrient found in citrus fruits and other foods", "question": "Can vitamin C rich fruits be bad for health?", "answer": true, "facts": [ "Oranges are fruits that are rich in vitamin C.", "Oranges are very acidic fruits that can wear down tooth enamel.", "Too much Vitamin C can cause nausea and diarrhea." ], "decomposition": [ "Which vitamin are oranges rich in?", "Is #1 the same as vitamin C?", "Can excess of #1 be harmful to a person's health?", "Can the acidity of oranges have adverse effects on human consumers?", "Are #2, #3 and #4 positive?" ], "evidence": [ [ [ [ "Orange (fruit)-20" ] ], [ "operation" ], [ [ "Vitamin C megadosage-7" ] ], [ [ "Citric acid-39", "Orange (fruit)-41" ] ], [ "operation" ] ], [ [ [ "Orange (fruit)-20" ] ], [ "operation" ], [ "no_evidence", "operation" ], [ [ "Citric acid-39" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Vitamin C-69" ] ], [ [ "Vitamin C-69" ] ], [ [ "Vitamin C-22" ] ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "d949f30354c842f5562a", "term": "Monarch butterfly", "description": "milkweed butterfly in the family Nymphalidae", "question": "Could a monarch butterfly rule a kingdom?", "answer": false, "facts": [ "A monarch butterfly would be easily killed by a human due to its small size.", "A monarch butterfly does not have the intellectual capacity to rule over a kingdom of humans." ], "decomposition": [ "Does a monarch butterfly have the physical capacity to rule over humans?", "Does a monarch butterfly have the intellectual ability to rule over humans?", "Is #1 or #2 positive?" ], "evidence": [ [ [ [ "Monarch butterfly-1" ] ], [ [ "Monarch butterfly-1" ] ], [ [ "Monarch butterfly-1" ] ] ], [ [ [ "Monarch butterfly-1" ] ], [ [ "Butterfly-15" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Monarch butterfly-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "8b9f3835041b7bbc30a3", "term": "Crucifixion", "description": "Method of capital punishment in which the victim is tied or nailed to a large wooden beam and left to hang until eventual death", "question": "If it socially acceptable to wear an icon depicting crucifixion? ", "answer": true, "facts": [ "The crucifixion of Jesus is a common sign used by Catholics and Christian denominations. ", "Many jewelry stores offer necklaces with the Crucifixion of Jesus Christ." ], "decomposition": [ "Which common symbol is used by Catholics to depict crucifixion?", "Is #1 commonly found in jewelry stores?" ], "evidence": [ [ [ [ "Christian symbolism-6" ], "no_evidence" ], [ [ "Christian cross variants-3" ], "operation" ] ], [ [ [ "Crucifix-2" ] ], [ [ "Crucifix-12" ], "no_evidence", "operation" ] ], [ [ [ "Crucifixion-2" ] ], [ "no_evidence" ] ] ] }, { "qid": "5f96f27de4c8cdafc070", "term": "Brake", "description": "mechanical device that inhibits motion", "question": "Can people die from brake failure?", "answer": true, "facts": [ "Brake failure is the inability of brakes to function.", "When vehicles experience brake failure, they cannot be stopped safely, which results in a crash.", "People die in vehicular crashes." ], "decomposition": [ "What is a brake failure?", "What can #1 lead to in a car?", "Have people died from #2?" ], "evidence": [ [ [ [ "Disc brake-63" ] ], [ [ "Traffic collision-1", "Traffic collision-50" ] ], [ [ "Falco (musician)-22" ], "operation" ] ], [ [ [ "Brake-1" ], "no_evidence" ], [ [ "Traffic collision-1", "Traffic collision-24" ], "no_evidence" ], [ [ "Traffic collision-3" ], "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "5a781808bf59dc835bd2", "term": "Humour", "description": "tendency of experiences to provoke laughter and provide amusement", "question": "Was the man who played the male lead in Mrs. Doubtfire known for his humour?", "answer": true, "facts": [ "Robin Williams played the male lead in Mrs. Doubtfire.", "Robin Williams had a prolific standup and film comedy career." ], "decomposition": [ "Who played the male lead in MRs. Doubtfire?", "What was the career of #1?", "Is #2 a humorous job?" ], "evidence": [ [ [ [ "Mrs. Doubtfire-1" ] ], [ [ "Robin Williams-1" ] ], [ "operation" ] ], [ [ [ "Mrs. Doubtfire-3" ] ], [ [ "Robin Williams-14" ] ], [ "operation" ] ], [ [ [ "Mrs. Doubtfire-1" ] ], [ [ "Robin Williams-1" ] ], [ [ "Comedian-1" ], "operation" ] ] ] }, { "qid": "16e41c83724a949fb983", "term": "Stone Cold Steve Austin", "description": "American professional wrestler", "question": "Coud every wife of Stone Cold Steve Austin fit in Audi TT?", "answer": true, "facts": [ "Stone Cold Steve Austin has been married to 4 different women.", "The Audi TT is a sports car with 4 seats." ], "decomposition": [ "How many wives has Stone Cold Steve Austin had?", "How many people can sit in an Audi TT", "Is #2 at least #1?" ], "evidence": [ [ [ [ "Stone Cold Steve Austin-67" ] ], [ [ "Audi TT-2" ] ], [ "operation" ] ], [ [ [ "Stone Cold Steve Austin-67" ] ], [ [ "2+2 (car body style)-1", "Audi TT-2" ] ], [ "operation" ] ], [ [ [ "Stone Cold Steve Austin-67" ] ], [ [ "Audi TT-1" ] ], [ "operation" ] ] ] }, { "qid": "6662006c915a62db5a89", "term": "Hamlet", "description": "tragedy by William Shakespeare", "question": "Is Hamlet more common on IMDB than Comedy of Errors?", "answer": true, "facts": [ "IMDB, The Internet Movie Database, catalogs movies.", "IMDB lists 6 Hamlet movie adaptations.", "IMDB lists 5 Comedy of Errors movie adaptations." ], "decomposition": [ "How many listings of Hamlet are there on IMDB?", "How many listing of Comedy of Errors is there on IMDB?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Hamlet-3" ], "no_evidence" ], [ [ "The Comedy of Errors-24", "The Comedy of Errors-25" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Hamlet-92" ], "no_evidence" ], [ [ "The Comedy of Errors-24" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "5278f0501c540dff6407", "term": "Freemasonry", "description": "group of fraternal organizations", "question": "Has Freemasonry been represented on the Moon?", "answer": true, "facts": [ "Freemasonry is a group of fraternal organizations rooted in fraternities of stonemasons of the fourteenth century.", "Buzz Aldrin was initiated into the Freemason fraternity in 1955", "Buzz Aldrin and Neil Armstrong were the first men to land on the moon in 1969." ], "decomposition": [ "What occupation goes into space?", "Have any #1 been Free Masons?", "Have any people listed in #2 been to the moon?" ], "evidence": [ [ [ [ "Astronaut-1" ] ], [ [ "James Irwin-1", "James Irwin-23" ] ], [ [ "James Irwin-1" ] ] ], [ [ [ "Astronaut-1" ] ], [ [ "Buzz Aldrin-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Astronaut-1" ] ], [ [ "John Glenn-62" ] ], [ [ "John Glenn-3" ], "operation" ] ] ] }, { "qid": "757a33340ccf017e9719", "term": "Legend", "description": "Traditional story of heroic humans.", "question": "Do urban legends always have to occur in cities?", "answer": false, "facts": [ "An urban legend is commonly accepted folk lore. ", "Urban means related to a city.", "Many urban legends occur in rural towns that lack dense population.", "Most Urban legends are unverified due to lack of witnesses. " ], "decomposition": [ "In what settings do urban legends occur?", "Does #1 only consist of urban environments?" ], "evidence": [ [ [ [ "Chupacabra-1", "Chupacabra-5", "Urban legend-1" ], "no_evidence" ], [ [ "Urban area-1" ], "operation" ] ], [ [ [ "Urban legend-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Legend-15" ] ], [ [ "Legend-15" ] ] ] ] }, { "qid": "93264708b0d600fb9650", "term": "Copper", "description": "Chemical element with atomic number 29", "question": "Would a fungal life-form be threatened by a pigment from copper?", "answer": true, "facts": [ "Verdigris is a pigment made from copper", "Verdigris is also used as a fungicide " ], "decomposition": [ "Which element is the pigment verdigris derived from?", "Is #1 copper and verdigris also used as a fungicide?" ], "evidence": [ [ [ [ "Verdigris-1" ] ], [ [ "Verdigris-6" ] ] ], [ [ [ "Verdigris-1" ] ], [ [ "Copper-5", "Verdigris-6" ], "operation" ] ], [ [ [ "Verdigris-1" ] ], [ [ "Copper-5", "Verdigris-6" ], "operation" ] ] ] }, { "qid": "810d006c5cb0e27081c8", "term": "Goldstone Deep Space Communications Complex", "description": "United States historic place", "question": "Do the telescopes at Goldstone Deep Space Communications Complex work the night shift?", "answer": true, "facts": [ "The night shift is considered to be the hours of 11pm - 7am.", "The telescopes at Goldstone Deep Space Communications Complex are running 24 hours a day." ], "decomposition": [ "What hours are typically considered the night shift?", "What hours do the telescopes at Goldstone Deep Space Communications Complex run?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Shift work-11" ] ], [ [ "Goldstone Deep Space Communications Complex-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Shift work-11" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Astronomy-2", "Goldstone Deep Space Communications Complex-1" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "ee9770dcce6a42b4c97e", "term": "Jackson Pollock", "description": "American painter", "question": "Was Jackson Pollock straight edge?", "answer": false, "facts": [ "Jackson Pollock was a famous painter.", "Straight Edge is a punk inspired lifestyle who's adherents abstain from alcohol and drugs.", "Jackson Pollock was an alcoholic.", "Jackson Pollock died in a car crash while driving under the influence of alcohol." ], "decomposition": [ "What substances do people avoid if they are straight edge?", "Did Jackson Pollock always avoid #1?" ], "evidence": [ [ [ [ "Straight edge-5" ] ], [ [ "Jackson Pollock-17" ], "operation" ] ], [ [ [ "Straight edge-1" ] ], [ [ "Jackson Pollock-3" ] ] ], [ [ [ "Straight edge-1" ] ], [ [ "Jackson Pollock-3" ], "operation" ] ] ] }, { "qid": "5812637ba98cba20a9af", "term": "Mercedes-Benz", "description": "automobile brand of Daimler AG", "question": "Is it legal for a licensed child driving Mercedes-Benz to be employed in US?", "answer": true, "facts": [ "The minimum age for driving in the US is 16.", "Child labor laws in the US require a child to be 14 years of age or older to work." ], "decomposition": [ "What is the minimum driving age in the US?", "What is the minimum age for someone to be employed in the US?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Graduated driver licensing-35" ], "no_evidence" ], [ [ "Child labour law-10" ] ], [ "operation" ] ], [ [ [ "Driver's licenses in the United States-9" ] ], [ [ "Child labor laws in the United States-2" ] ], [ "operation" ] ], [ [ [ "Driver's licenses in the United States-9" ] ], [ [ "Child labour-66", "Legal working age-1" ] ], [ "operation" ] ] ] }, { "qid": "cd920b1595f9bc70126f", "term": "Sofer", "description": "profession", "question": "Would a sofer be a bad job for a vegan?", "answer": true, "facts": [ "A sofer is a transcriber of religious texts, and has its origins in Judaism.", "Sofers transcribe texts on a material known as vellum.", "Vellum is made of materials derived from calfskin.", "Vegans do not use any animal products." ], "decomposition": [ "What materials do sofers use?", "What products do vegans refuse to use?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Marshmallow sofa-5" ] ], [ [ "Veganism-9" ] ], [ [ "Leather-4" ] ] ], [ [ [ "Sofer-1" ] ], [ [ "Veganism-1" ] ], [ "operation" ] ], [ [ [ "Parchment-1" ] ], [ [ "Veganism-1" ] ], [ "operation" ] ] ] }, { "qid": "cce78efa2e5a91f1a7eb", "term": "Year", "description": "Orbital period of the Earth around the Sun", "question": "Can an African Elephant get pregnant twice in a year?", "answer": false, "facts": [ "There are 365 days in one year.", "It takes around 645 days for an African Elephant to give birth to one baby elephant." ], "decomposition": [ "What is the gestation period of an African elephant?", "Is #1 less than a year?" ], "evidence": [ [ [ [ "African elephant-26" ] ], [ "operation" ] ], [ [ [ "African elephant-28" ] ], [ "operation" ] ], [ [ [ "African elephant-26" ] ], [ "operation" ] ] ] }, { "qid": "1f7ba99375d4fa45119f", "term": "JAG (TV series)", "description": "American legal drama television series (1996-2005)", "question": "Did Joan Crawford guest star on JAG (TV series)?", "answer": false, "facts": [ "JAG began airing in 1995.", "Joan Crawford died in 1977." ], "decomposition": [ "When did Joan Crawford's career as a television actress come to an end?", "When was the TV series JAG launched?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Joan Crawford-64" ] ], [ [ "NCIS (TV series)-19" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Joan Crawford-61", "Joan Crawford-64" ] ], [ [ "JAG (season 1)-1" ] ], [ "operation" ] ], [ [ [ "Joan Crawford-37" ] ], [ [ "JAG (season 1)-1" ] ], [ "operation" ] ] ] }, { "qid": "a3acf1afdbeea87948d7", "term": "Elijah Cummings", "description": "U.S. Representative from Maryland", "question": "Will Elijah Cummings vote for Joe Biden in the next presidential elections?", "answer": false, "facts": [ "The next presidential elections will take place in November of 2020", "Elijah Cummings passed away in October of 2019" ], "decomposition": [ "When will the next presidential election be held?", "When did Elijah Cummings pass away?", "Is #2 after #1?" ], "evidence": [ [ [ [ "2020 United States presidential election-1" ] ], [ [ "Elijah Cummings-1" ] ], [ "operation" ] ], [ [ [ "2020 United States presidential election-1" ] ], [ [ "Elijah Cummings-1" ] ], [ "operation" ] ], [ [ [ "2020 United States presidential election-1" ] ], [ [ "Elijah Cummings-1" ] ], [ "operation" ] ] ] }, { "qid": "cfba036236507374976d", "term": "Coen brothers", "description": "American filmmakers", "question": "Do people watching Coen brothers films in Guinea Bissau need subtitles?", "answer": true, "facts": [ "The Coen brothers direct films primarily using English", "The primary languages used in Guinea Bissau are Creole, native African languages, and Portuguese" ], "decomposition": [ "In what country do the Coen Brothers make films?", "What is the primary language spoken in #1?", "What is the primary language spoken in Guinea Bissau?", "Is #3 different than #2?" ], "evidence": [ [ [ [ "Coen brothers-1" ] ], [ [ "Languages of the United States-1" ] ], [ [ "Guinea-Bissau-3" ] ], [ "operation" ] ], [ [ [ "Coen brothers-1" ] ], [ [ "American English-2" ] ], [ [ "Guinea-Bissau-3" ] ], [ "operation" ] ], [ [ [ "Coen brothers-13" ], "no_evidence" ], [ [ "United States-80" ] ], [ [ "Guinea-Bissau-3" ] ], [ "operation" ] ] ] }, { "qid": "59978a3e99a498e5567b", "term": "Great Pyramid of Giza", "description": "Largest pyramid in the Giza Necropolis, Egypt", "question": "Is Great Pyramid of Giza the last wonder of its kind?", "answer": true, "facts": [ "The Great Pyramid of Giza is classified as one of the Seven Wonders of the Ancient World.", "Five of the ancient wonders were destroyed, and a sixth (the Hanging Gardens of Babylon) may not have existed.", "The Great Pyramid of Giza is largely intact as of 2020." ], "decomposition": [ "What are the wonders of the ancient world that are either destroyed or non-existent?", "What is the wonder of the ancient world that is still intact?", "Has #2 survived a much longer time than #1?" ], "evidence": [ [ [ [ "Seven Wonders of the Ancient World-1" ] ], [ [ "Great Pyramid of Giza-1" ] ], [ "operation" ] ], [ [ [ "Seven Wonders of the Ancient World-1" ] ], [ [ "Great Pyramid of Giza-1" ] ], [ "operation" ] ], [ [ [ "Seven Wonders of the Ancient World-1" ] ], [ [ "Great Pyramid of Giza-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "85ed1db2e2c89f3b93ed", "term": "Hydrogen", "description": "Chemical element with atomic number 1", "question": "Hydrogen's atomic number squared exceeds number of Spice Girls?", "answer": false, "facts": [ "Hydrogen is the first element and has an atomic number of one.", "To square a number, you multiply it by itself.", "The Spice Girls has five members." ], "decomposition": [ "What is the atomic number of hydrogen?", "How many people are in the Spice Girls band?", "Is the square of #1 greater than #2?" ], "evidence": [ [ [ [ "Hydrogen-1" ] ], [ [ "Spice Girls-25" ] ], [ "operation" ] ], [ [ [ "Hydrogen-1" ] ], [ [ "Spice Girls-1" ] ], [ "operation" ] ], [ [ [ "Hydrogen-1" ] ], [ [ "Spice Girls-1" ] ], [ "operation" ] ] ] }, { "qid": "ede4abdab566098acc1c", "term": "Fibonacci number", "description": "integer in the infinite Fibonacci sequence", "question": "If you have a serious injury in Bangladesh, would you probably dial a Fibonacci number?", "answer": false, "facts": [ "The number for emergency services in Bangladesh is 999", "999 is not a Fibonacci sequence integer " ], "decomposition": [ "What number would you dial for help with an emergency in Bangladesh?", "Is #1 a Fibonacci number?" ], "evidence": [ [ [ [ "999 (emergency telephone number)-2" ] ], [ "no_evidence" ] ], [ [ [ "Emergency telephone number-27" ], "no_evidence" ], [ [ "Fibonacci-12" ], "operation" ] ], [ [ [ "999 (emergency telephone number)-2" ] ], [ [ "Fibonacci-12" ], "operation" ] ] ] }, { "qid": "36c497860f72d148e4e8", "term": "Donkey", "description": "El burrito de sheck", "question": "Are Donkeys part of Christmas celebrations?", "answer": true, "facts": [ "\"Dominic The Donkey\" is a popular Christmas song.", "\"Nestor The Ling Eared Christmas Donkey\" is a popular Christmas Movie." ], "decomposition": [ "Which animals have been popularly recognized as part of the Christmas culture?", "Are donkeys one of #1?" ], "evidence": [ [ [ [ "Nestor, the Long-Eared Christmas Donkey-2" ] ], [ "operation" ] ], [ [ [ "Christmas-1" ], "no_evidence" ], [ [ "Nativity of Jesus in art-12" ], "no_evidence", "operation" ] ], [ [ [ "Nativity scene-20" ] ], [ "operation" ] ] ] }, { "qid": "5ffcf33878ad664f3a1b", "term": "Medicine", "description": "The science and practice of the diagnosis, treatment, and prevention of physical and mental illnesses", "question": "Did polio medicine save the life of polio vaccine creator?", "answer": false, "facts": [ "Jonas Salk developed the first polio vaccine.", "Jonas Salk died of a heart attack in 1995.", "Heart attacks are commonly treated with beta blockers." ], "decomposition": [ "Who created polio vaccine?", "Did #1 have his life saved by the use of polio vaccine?" ], "evidence": [ [ [ [ "Polio vaccine-3" ] ], [ [ "Jonas Salk-43" ] ] ], [ [ [ "Polio vaccine-30" ] ], [ [ "Jonas Salk-43" ], "no_evidence", "operation" ] ], [ [ [ "Polio vaccine-3" ] ], [ "no_evidence" ] ] ] }, { "qid": "d2af086597e0e03a4f57", "term": "Latitude", "description": "The angle between zenith at a point and the plane of the equator", "question": "Is latitude required to determine the coordinates of an area?", "answer": true, "facts": [ "Longitude is one of the required data points needed for determining coordinates.", "Latitude is the other angle required to determine coordinates of an area. " ], "decomposition": [ "What are the two sets of data points that determine coordinates of a location?", "Is latitude one of the answers to #1?" ], "evidence": [ [ [ [ "Geographic coordinate system-15" ] ], [ "operation" ] ], [ [ [ "Geographic coordinate system-4" ] ], [ "operation" ] ], [ [ [ "Geographic coordinate system-15" ] ], [ "operation" ] ] ] }, { "qid": "cf1365a8abd4e35c2f0b", "term": "Asiana Airlines", "description": "airline in South Korea", "question": "Can Harry Potter book a flight on Asiana Airlines?", "answer": false, "facts": [ "Asiana Airlines is the second largest airline in South Korea", "Harry Potter is a fictional character" ], "decomposition": [ "Which universe does Harry Potter exist in?", "Does Asiana Airlines exist in #1?" ], "evidence": [ [ [ [ "Fiction-1", "Fictional universe of Harry Potter-1" ] ], [ [ "Asiana Airlines-1", "Universe-8" ], "operation" ] ], [ [ [ "Fictional universe of Harry Potter-1" ] ], [ [ "Asiana Airlines-1" ] ] ], [ [ [ "Harry Potter-1" ] ], [ [ "Asiana Airlines-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "cea31f260dfec9aa0f1f", "term": "Monkey", "description": "Animal of the \"higher primates\" (the simians), but excluding the apes", "question": "Would a monkey outlive a human being on average?", "answer": false, "facts": [ "The average human lifespan is 79 years.", "The longest-lived monkey species have a lifespan about 45-50 years in captivity." ], "decomposition": [ "How long does the average human live?", "What is the longest lifespan of a monkey?", "Is #2 larger than #1?" ], "evidence": [ [ [ [ "Life expectancy-2" ] ], [ [ "Monkey-20" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Old age-99" ] ], [ [ "Night monkey-9" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Life expectancy-2" ] ], [ [ "Little Mama-2" ] ], [ "operation" ] ] ] }, { "qid": "47fc029ca411d1f47914", "term": "Olive oil", "description": "liquid fat extracted by pressing olives", "question": "Would Carmine's kitchen staff be panicked if they had no olive oil?", "answer": true, "facts": [ "Carmine's is an Italian restaurant.", "Olive oil is a large component of a lot of Italian cooking." ], "decomposition": [ "What kind of food does Carmine's serve?", "What are essential ingredients in #1?", "Is olive oil listed in #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Pizza in the United States-3" ] ], [ "operation" ] ], [ [ [ "Carmine Romano-2" ], "no_evidence" ], [ [ "Italian cuisine-16" ] ], [ "operation" ] ] ] }, { "qid": "d825d90c0c4d555b3bea", "term": "Black pepper", "description": "species of plant", "question": "Are ground bell peppers the main ingredient of black pepper?", "answer": false, "facts": [ "Black pepper is made from black peppercorns.", "Black peppercorns grow on the Piper Nigrum plant.", "Bell peppers are from the capsicum annuum plant." ], "decomposition": [ "What is used to make black pepper?", "Is #1 the same thing as bell pepper?" ], "evidence": [ [ [ [ "Black pepper-1" ] ], [ [ "Bell pepper-1" ], "operation" ] ], [ [ [ "Black pepper-1" ] ], [ [ "Bell pepper-1" ], "operation" ] ], [ [ [ "Black pepper-1" ] ], [ [ "Bell pepper-1" ] ] ] ] }, { "qid": "5b8d0f04c83745464141", "term": "Snow leopard", "description": "species of mammal", "question": "Can a snow leopard eat twice its own body weight?", "answer": true, "facts": [ "The average snow leopard weighs 72 pounds.", "The favorite food of snow leopards is an ibex.", "The average weight of an ibex is 150 pounds." ], "decomposition": [ "How much do snow leopards weigh on average?", "What is a snow leopard's favorite food?", "How much does #2 weigh?", "Is #3 at least twice as much as #1?" ], "evidence": [ [ [ [ "Snow leopard-17" ] ], [ [ "Snow leopard-31" ] ], [ [ "Bharal-3" ] ], [ [ "Bharal-3", "Snow leopard-17" ] ] ], [ [ [ "Snow leopard-17" ] ], [ [ "Snow leopard-31" ] ], [ [ "Snow leopard-31" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Snow leopard-17" ] ], [ [ "Snow leopard-31" ] ], [ [ "Snow leopard-31" ] ], [ "operation" ] ] ] }, { "qid": "31328c97391c0e047f44", "term": "Fibonacci number", "description": "integer in the infinite Fibonacci sequence", "question": "Are there five different single-digit Fibonacci numbers?", "answer": true, "facts": [ "The first six numbers in the Fibonacci sequence are 1,1,2,3,5,8.", "Since 1 is doubled, there are only five different single digit numbers." ], "decomposition": [ "What are the single-digit numbers in the Fibonacci sequence?", "How many unique numbers are in #1?", "Does #2 equal 5?" ], "evidence": [ [ [ [ "Fibonacci-12" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Random Fibonacci sequence-4" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Fibonacci-12" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "085b0b414514ce251e76", "term": "Kinetic energy", "description": "Energy in motion Or Object In Motion", "question": "Does taking ukemi halt kinetic energy?", "answer": false, "facts": [ "\"Taking ukemi\" refers to the art of falling or receiving in martial arts", "Taking ukemi usually requires the person doing it to move their body in a way that minimizes injury, and so it uses kinetic energy" ], "decomposition": [ "What does the term 'taking ukemi' refer to?", "What state of an object indicates exertion of kinetic energy?", "Can #1 be executed while avoiding #2?" ], "evidence": [ [ [ [ "Uke (martial arts)-3" ] ], [ [ "Kinetic energy-1" ] ], [ "operation" ] ], [ [ [ "Uke (martial arts)-3" ] ], [ [ "Kinetic energy-1" ] ], [ "no_evidence" ] ], [ [ [ "Uke (martial arts)-3" ] ], [ [ "Kinetic energy-1" ] ], [ [ "Uke (martial arts)-4" ], "operation" ] ] ] }, { "qid": "a9d3a5f8cd6eb6b2c19a", "term": "Sahara", "description": "desert in Africa", "question": "Can Spartina Patens thrive in the Sahara Desert?", "answer": false, "facts": [ "Spartina Patens is a type of cordgrass that grows in salt marshes.", "Spartina Patens requires a marsh-like environment to thrive.", "The Sahara Desert is known for being dry and very hot." ], "decomposition": [ "What soil conditions are suitable for the growth of Spartina Patens?", "Is #1 likely to be present in the Sahara desert?" ], "evidence": [ [ [ [ "Spartina patens-2" ] ], [ [ "Sahara-2" ], "operation" ] ], [ [ [ "Spartina patens-2" ] ], [ "operation" ] ], [ [ [ "Spartina patens-1" ] ], [ [ "Sahara-1" ] ] ] ] }, { "qid": "e5f13851532305d2f932", "term": "Cucumber", "description": "species of plant", "question": "Is growing seedless cucumber good for a gardener with entomophobia?", "answer": true, "facts": [ "Seedless cucumber fruit does not require pollination", "Cucumber plants need insects to pollinate them", "Entomophobia is a fear of insects" ], "decomposition": [ "What are people with Entomophobia fearful of?", "How do #1's usually help in the process of gardening?", "Do seedless cucumbers not require #2?" ], "evidence": [ [ [ [ "Entomophobia-1" ] ], [ [ "Pollination-4" ] ], [ [ "Cucumber-3" ], "operation" ] ], [ [ [ "Entomophobia-1" ] ], [ [ "Pollination-1" ], "no_evidence" ], [ [ "Cucumber-3" ], "operation" ] ], [ [ [ "Entomophobia-1" ] ], [ [ "Cucumber beetle-1" ] ], [ [ "Cucumber-4" ], "operation" ] ] ] }, { "qid": "90e6d4060a0b911fe436", "term": "PayPal", "description": "Online financial services company based in San Jose, California", "question": "Would it be unusual to use paypal for drug deals?", "answer": true, "facts": [ "Paypal prohibits the use of their platform for drugs or drug paraphernalia. ", "Using paypal leaves a digital footprint of any drug purchase." ], "decomposition": [ "Which kind of payments are prohibited on Paypal?", "Does #1 include payment for drug deals?" ], "evidence": [ [ [ "no_evidence" ], [ "operation" ] ], [ [ [ "PayPal-55" ] ], [ "operation" ] ], [ [ [ "Reception of WikiLeaks-37" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "3f3050d10948d4d99cff", "term": "Casio", "description": "Japanese electronics company", "question": "Is Casio's founding year a composite number?", "answer": true, "facts": [ "Electronics company Casio was founded in 1946.", "A composite number is a number that can be divided by numbers other than 1 and itself.", "1946 can be divided by 278 and 7." ], "decomposition": [ "What condition(s) makes a number composite?", "When was Casio founded?", "Does #2 satisfy #1?" ], "evidence": [ [ [ [ "Composite number-1" ] ], [ [ "Casio-2" ] ], [ "operation" ] ], [ [ [ "Composite number-2" ] ], [ [ "Casio-2" ] ], [ "operation" ] ], [ [ [ "Condition number-4" ], "no_evidence" ], [ [ "Casio-2" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "0e2302c5416dbec922e6", "term": "Al-Farabi", "description": "Philosopher in 10th century Central Asia", "question": "Would ISIS agree with Al-Farabi's religious sect?", "answer": false, "facts": [ "The philosopher Al-Farabi was believed to be a Shia Muslim.", "ISIS is an extremist Sunni Muslim group.", "The Sunni and Shia are constantly at war—Sunni often use car bombs, while Shia favor death squads." ], "decomposition": [ "What religious sect did Al-Farabi belong to?", "What religious sect does ISIS belong to?", "Do #1 and #2 avoid conflict with each other?" ], "evidence": [ [ [ [ "Al-Farabi-11" ] ], [ [ "Islamic State of Iraq and the Levant-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Al-Farabi-11" ] ], [ [ "Islamic State of Iraq and the Levant-1" ] ], [ [ "Shia–Sunni relations-4" ] ] ], [ [ [ "Al-Farabi-11" ], "no_evidence" ], [ [ "Islamic State of Iraq and the Levant-64" ] ], [ [ "Shia–Sunni relations-1" ], "operation" ] ] ] }, { "qid": "610c03ac321041d868e2", "term": "Lord Voldemort", "description": "Fictional character of Harry Potter series", "question": "Would half muggle wizards fear Lord Voldemort?", "answer": true, "facts": [ "Lord Voldemort created a slur to describe half muggle wizards, calling them mudbloods.", "Lord Voldemort sought to purge the wizarding world of half muggle wizards through death." ], "decomposition": [ "What did Lord Voldemort seek to do to half muggle wizards?", "Is #1 enough to instill fear in them?" ], "evidence": [ [ [ [ "Lord Voldemort-2" ] ], [ "no_evidence" ] ], [ [ [ "Lord Voldemort-2" ] ], [ "operation" ] ], [ [ [ "Lord Voldemort-4", "Lord Voldemort-6" ], "no_evidence" ], [ [ "Fear-1" ], "operation" ] ] ] }, { "qid": "f51a52b400daa8f80388", "term": "Casablanca", "description": "City / State in Casablanca-Settat, Morocco", "question": "Is it hard to get a BLT in Casablanca?", "answer": true, "facts": [ "A BLT is a sandwich consisting of bacon, lettuce and tomato", "Casablanca is predominantly Muslim", "Islam forbids the consumption of pork and pork products" ], "decomposition": [ "What is the predominant religion of Casablanca?", "What dietary restrictions does #1 impose?", "What all goes on a BLT?", "Are there items common to both #2 and #3?" ], "evidence": [ [ [ [ "Casablanca-43" ] ], [ [ "Islamic culture-45" ] ], [ [ "BLT-1" ] ], [ "operation" ] ], [ [ [ "Casablanca-43" ] ], [ [ "Haram-13" ] ], [ [ "BLT-7" ] ], [ "operation" ] ], [ [ [ "Casablanca-43" ] ], [ [ "Islamic culture-45" ] ], [ [ "BLT-1" ] ], [ [ "Bacon-1" ] ] ] ] }, { "qid": "25e1fe4b511b0194c8ac", "term": "Construction worker", "description": "tradesman, labourer, or professional employed in the physical construction of the built environment", "question": "Is a construction worker required to build a portfolio?", "answer": false, "facts": [ "Construction workers build physical constructs, usually buildings or structures", "A portfolio is a collection of items of a similar type, including art, writing, or financial investments" ], "decomposition": [ "What is a portfolio?", "Who are people that builds #1?", "Is a construction worker among #2?" ], "evidence": [ [ [ [ "Career portfolio-1" ] ], [ [ "Career portfolio-8" ] ], [ "operation" ] ], [ [ [ "Artist's portfolio-1" ] ], [ [ "Artist-1" ] ], [ [ "Construction worker-1" ] ] ], [ [ [ "Artist's portfolio-1", "Portfolio (finance)-1" ] ], [ [ "Artist's portfolio-1", "Portfolio (finance)-2" ] ], [ "operation" ] ] ] }, { "qid": "83dc57cde5122f0a5d19", "term": "Lactobacillus", "description": "genus of bacteria", "question": "Are vinegar pickled cucumbers rich in lactobacillus?", "answer": false, "facts": [ "Pickles made with vinegar are not probiotic and are simply preserved.", "Pickles made through a soak in a salt brine solution begin to ferment because of lactobacillus. " ], "decomposition": [ "What natural process is lactobacillus associated with?", "Do pickles made with vinegar undergo #1?" ], "evidence": [ [ [ [ "Lactobacillus-11" ] ], [ [ "Pickling-2" ] ] ], [ [ [ "Lactobacillus-11" ] ], [ [ "Pickling-3" ], "no_evidence", "operation" ] ], [ [ [ "Lactobacillus-11" ] ], [ [ "Lactobacillus brevis-8" ] ] ] ] }, { "qid": "39e2e14c7916898a717d", "term": "Ammonia", "description": "Chemical compound of nitrogen and hydrogen", "question": "Would a dog easily notice ammonia?", "answer": true, "facts": [ "Ammonia has a characteristic pungent smell.", "Dogs have an extremely strong sense of smell, almost 40 times as sensitive as humans." ], "decomposition": [ "What common chemical has a characteristic pungent smell?", "What common pet has a sense of smell ten thousand to a hundred thousand times better than humans?", "Can #2 sense #1?" ], "evidence": [ [ [ [ "Ammonia-1" ] ], [ [ "Dog anatomy-117" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Ammonia-1" ], "no_evidence" ], [ [ "Dog anatomy-117" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Ammonia-1" ] ], [ [ "Tracking (dog)-3" ] ], [ "no_evidence" ] ] ] }, { "qid": "3900eb40c0ee1e1a5b6d", "term": "Curling", "description": "Team sport played on ice", "question": "Are the brooms from curling good for using on house floors?", "answer": false, "facts": [ "Curling brooms are designed for use within the sport specifically. ", "Curling brooms do not have traditional bristle heads, and the heads are costly to replace. " ], "decomposition": [ "What are the characteristics of brooms used in curling?", "What are the characteristics of brooms used for house cleaning?", "Does #1 completely match #2?" ], "evidence": [ [ [ [ "Curling-31" ] ], [ [ "Broom-2" ] ], [ "operation" ] ], [ [ [ "Curling-31" ], "no_evidence" ], [ [ "Broom-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Curling-31" ] ], [ [ "Broom-1" ] ], [ "operation" ] ] ] }, { "qid": "14f5ccb693d62b4397ee", "term": "Christopher Walken", "description": "American actor", "question": "Is Christopher Walken close to achieving EGOT status?", "answer": false, "facts": [ "EGOT refers to people that have won an Emmy, a Grammy, an Oscar, and a Tony Award.", "Christopher Walken won the Oscar in 1979 for Best Actor in a Supporting Role.", "Christopher Walken was nominated for two Tony Awards but has never won.", "Christopher Walken was nominated for an Emmy Award but has never won.", "Christopher Walken has never been nominated for a Grammy." ], "decomposition": [ "What awards are included in EGOT?", "What entertainment awards has Christopher Walken won?", "Do the awards listed in #2 belong to at least 3 different awards listed in #1?" ], "evidence": [ [ [ [ "EGOT (disambiguation)-1" ] ], [ [ "Christopher Walken-1" ] ], [ [ "Christopher Walken-1" ] ] ], [ [ [ "Emmy Award-2" ] ], [ [ "Christopher Walken-2", "Christopher Walken-43" ] ], [ "operation" ] ], [ [ [ "EGOT (disambiguation)-1" ] ], [ [ "Christopher Walken-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "30cca3728fe936ff1bae", "term": "Black Sea", "description": "Marginal sea of the Atlantic Ocean between Europe and Asia", "question": "Can sunlight travel to the deepest part of the Black Sea?", "answer": false, "facts": [ "The Black Sea has a maximum depth of 2,212 meters", "Sunlight does not penetrate water below 1000 meters" ], "decomposition": [ "What is the maximum depth of the Black Sea?", "How deep can sunlight penetrate a sea?", "Is #1 less than #2?" ], "evidence": [ [ [ [ "Black Sea-2" ] ], [ [ "Deep sea-1" ] ], [ "operation" ] ], [ [ [ "Black Sea-2" ] ], [ [ "Photic zone-5" ] ], [ "operation" ] ], [ [ [ "Black Sea-2" ] ], [ [ "Photic zone-5" ] ], [ "operation" ] ] ] }, { "qid": "0628999cf77312e0f23a", "term": "Justin Bieber", "description": "Canadian singer-songwriter and actor", "question": "Did U.S. soldiers listen to Justin Bieber's Believe album during the Battle of Baghdad?", "answer": false, "facts": [ "The Battle of Baghdad was the U.S. invasion of Baghdad in the year 2003.", "Justin Bieber's album Believe was released in 2012." ], "decomposition": [ "When did the Battle of Baghdad take place?", "When was the Justin Bieber album Believe released?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Battle of Baghdad (2003)-1" ] ], [ [ "Believe (Justin Bieber album)-1" ] ], [ "operation" ] ], [ [ [ "Battle of Baghdad (2003)-1" ] ], [ [ "Believe (Justin Bieber album)-1" ] ], [ "operation" ] ], [ [ [ "Battle of Baghdad (2003)-1" ] ], [ [ "Believe (Justin Bieber album)-1" ] ], [ "operation" ] ] ] }, { "qid": "1d0e6d453ffcf9094140", "term": "Honey bee", "description": "Eusocial flying insect of genus Apis, producing surplus honey", "question": "Can a honey bee sting a human more than once?", "answer": false, "facts": [ "Human skin is tough, and the bee's stinger gets lodged in the skin.", "The stinger becomes separated from the bee which dies soon after." ], "decomposition": [ "What happens to a bee's stinger when it stings a human?", "Are bees able to survive if #1 happens?" ], "evidence": [ [ [ [ "Bee sting-6" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Stinger-7" ] ], [ [ "Stinger-7" ] ] ], [ [ [ "Honey bee-61" ] ], [ "operation" ] ] ] }, { "qid": "d1ca8206707ec8277030", "term": "Snowdon", "description": "highest mountain in Wales", "question": "Would the yearly precipitation on Snowdon submerge an upright bowling pin?", "answer": true, "facts": [ "Snowdown gets about 200 inches of precipitation a year ", "A standard bowling pin is one foot, three inches tall" ], "decomposition": [ "How much precipitation does Snowdown get yearly?", "How tall is a standard bowling pin?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Snowdon-9" ] ], [ [ "Bowling pin-2" ] ], [ [ "Bowling pin-2", "Snowdon-9" ] ] ], [ [ [ "Snowdon-9" ] ], [ [ "Bowling pin-2" ] ], [ "operation" ] ], [ [ [ "Snowdon-9" ] ], [ [ "Bowling pin-2" ] ], [ "operation" ] ] ] }, { "qid": "accc5deaf39058435541", "term": "Tailor", "description": "person who makes, repairs, or alters clothing professionally, typically men's clothing", "question": "Would an expensive tailor use adhesive to create a shorter hem on slacks?", "answer": false, "facts": [ "Adhesive hems are usually performed at home with a DIY kit.", "Professionally hemmed pants are created using a needle and thread." ], "decomposition": [ "Is a tailor professional when creating hems?", "Is using adhesive a professional way to create hems?", "Are #1 and #2 the same?" ], "evidence": [ [ [ [ "Tailor-1" ] ], [ [ "Adhesive-14" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Hem-2" ] ], [ [ "Hem-3" ], "operation" ], [ "operation" ] ], [ [ [ "Tailor-1" ] ], [ [ "Adhesive-2" ] ], [ "operation" ] ] ] }, { "qid": "0b7787655f4a9d600dfc", "term": "Clark Gable", "description": "American actor", "question": "Did Clark Gable marry more women once than Richard Burton?", "answer": true, "facts": [ "Richard Burton was married to Elizabeth Taylor twice. ", "Richard Burton married Sally Burton, Suzy Hunt, and Sybil Williams once.", "Clark Gable was married to five different women, one time each." ], "decomposition": [ "How many different women was Richard Burton married to?", "How many different wives did Clark Gable have?", "Is #2 more than #1?" ], "evidence": [ [ [ [ "Richard Burton-57" ] ], [ [ "Clark Gable-78" ] ], [ "operation" ] ], [ [ [ "Richard Burton-57" ] ], [ [ "Clark Gable-14", "Clark Gable-34", "Clark Gable-39" ] ], [ "operation" ] ], [ [ [ "Richard Burton-57" ] ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "45ed5c1d56e942bee839", "term": "Lolcat", "description": "image combining a photograph of a cat with text intended to contribute humour", "question": "Did the 40th president of the United States forward lolcats to his friends?", "answer": false, "facts": [ "The 40th president of the United States was Ronald Reagan", "Ronald Reagan died in 2004", "The first recorded use of the term lolcat occurred in 2006" ], "decomposition": [ "Who was the 40th president of the United States?", "In what year did #1 die?", "In what year did the first lolcat appear?", "Is #3 before or the same as #2?" ], "evidence": [ [ [ [ "Ronald Reagan-1" ] ], [ [ "Ronald Reagan-120" ] ], [ [ "Lolcat-4" ] ], [ "operation" ] ], [ [ [ "Ronald Reagan-1" ] ], [ [ "Ronald Reagan-1" ] ], [ [ "Lolcat-4" ] ], [ "operation" ] ], [ [ [ "Ronald Reagan-1" ] ], [ [ "Ronald Reagan-1" ] ], [ [ "Lolcat-4" ] ], [ "operation" ] ] ] }, { "qid": "76468dce5b02e75851b2", "term": "Daily Mirror", "description": "British daily tabloid newspaper owned by Reach plc.", "question": "Did William Shaespeare read the Daily Mirror?", "answer": false, "facts": [ "The Daily Mirror is a British tabloid founded in 1903.", "William Shakespeare died in 1616." ], "decomposition": [ "When did William Shakespeare die?", "When was the Daily Mirror founded?", "Is #2 before #1?" ], "evidence": [ [ [ [ "William Shakespeare-17" ] ], [ [ "Daily Mirror-1" ] ], [ "operation" ] ], [ [ [ "William Shakespeare-1" ] ], [ [ "Daily Mirror-1" ] ], [ "operation" ] ], [ [ [ "William Shakespeare-1" ] ], [ [ "Daily Mirror-1" ] ], [ "operation" ] ] ] }, { "qid": "7b02fc57b8949146bd28", "term": "Comic book", "description": "Publication of comics art", "question": "Does Disney own a major comic book publisher?", "answer": true, "facts": [ "The three biggest comic book publishers are DC, Marvel, and IDW.", "Disney has owned Marvel since 2007." ], "decomposition": [ "What are the top three biggest comic book publishers?", "What comic book publishers does Disney own?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Big Two Comics-1" ], "no_evidence" ], [ [ "Marvel Comics-1" ] ], [ "operation" ] ], [ [ [ "American comic book-39", "Marvel Comics-1" ], "no_evidence" ], [ [ "Marvel Comics-1" ] ], [ "operation" ] ], [ [ [ "Comic book-2" ] ], [ [ "Marvel Entertainment-1" ] ], [ "operation" ] ] ] }, { "qid": "d15cac26462700b9d6ca", "term": "Jackfruit", "description": "species of plant", "question": "Can jackfruit be used as a weapon?", "answer": true, "facts": [ "Jackfruit is the fruit of a species of plant called the Jacktree.", "Jackfruit can weigh up to one hundred and twenty pounds.", "Jackfruit is covered in little spikes.", "Jackfruit can be thrown or flung at an enemy.", "A weapon is a thing that is used to cause bodily harm." ], "decomposition": [ "What are the prominent physical features of a jackfruit?", "Does #1 make it a suitable weapon?" ], "evidence": [ [ [ [ "Jackfruit-2" ] ], [ "operation" ] ], [ [ [ "Jackfruit-12" ] ], [ [ "Tubercle-1" ], "operation" ] ], [ [ [ "Jackfruit-12" ] ], [ [ "Jackfruit-12", "Weapon-2" ], "no_evidence" ] ] ] }, { "qid": "763ef31236d06e080681", "term": "James Bond", "description": "Media franchise about a British spy", "question": "Do the James Bond and Doctor Who series have a similarity in format?", "answer": true, "facts": [ "The character of James Bond has been played by numerous actors. ", "The character of The Doctor from Doctor Who has been played by many actors." ], "decomposition": [ "Who has played James Bond?", "Who has played the Doctor? ", "Are multiple actors listed for #1 and #2?" ], "evidence": [ [ [ [ "Portrayal of James Bond in film-4" ] ], [ [ "The Doctor (Doctor Who)-111" ] ], [ "operation" ] ], [ [ [ "Portrayal of James Bond in film-4" ] ], [ [ "The Doctor (Doctor Who)-1" ] ], [ "operation" ] ], [ [ [ "James Bond-2" ], "no_evidence" ], [ [ "The Doctor (Doctor Who)-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0eb259222f89112e1486", "term": "Shiva", "description": "One of the principal deities of Hinduism.", "question": "Does Sam Harris worship Shiva?", "answer": false, "facts": [ "Sam Harris is an atheist.", "Atheism is, in the broadest sense, an absence of belief in the existence of deities." ], "decomposition": [ "What is Sam Harris' religious affiliation?", "Does a #1 worship any gods?" ], "evidence": [ [ [ [ "Sam Harris-1" ] ], [ [ "Atheism-1" ] ] ], [ [ [ "Sam Harris-1" ] ], [ [ "Atheism-1" ] ] ], [ [ [ "Sam Harris-12" ] ], [ "operation" ] ] ] }, { "qid": "443f5ef5e6d353f28eda", "term": "Lord Voldemort", "description": "Fictional character of Harry Potter series", "question": "Would Lord Voldemort hypothetically be an effective fighter after Final Fantasy silence is cast?", "answer": false, "facts": [ "Lord Voldemort is a powerful wizard from the Harry Potter Series.", "Lord Voldemort casts magical curses and charms on his enemies.", "Silence spell in Final Fantasy mutes the enemies spells.", "Mute makes it impossible for characters to cast any spells." ], "decomposition": [ "What does Lord Voldemort use in combat against enemies?", "What would Lord Voldemort have to do in order to cast #1?", "Which ability does the silence spell in Final Fantasy affect?", "Can all of #2 still be done when #3 is gone?" ], "evidence": [ [ [ [ "Lord Voldemort-15" ], "no_evidence" ], [ [ "Incantation-1" ], "no_evidence" ], [ [ "Speech-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Lord Voldemort-28" ], "no_evidence" ], [ [ "Lord Voldemort-29" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Lord Voldemort-2", "Lord Voldemort-30" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "73627f72e83c519d8a0e", "term": "Astrology", "description": "Pseudoscience claiming celestial objects influence human affairs", "question": "Does Capricorn astrology symbol have all of the parts of a chimera?", "answer": false, "facts": [ "The Capricorn astrology symbol is a sea goat which consists of: a goat, and a fish.", "A chimera is a legendary beast that is made up of: a lion, a goat, and a snake." ], "decomposition": [ "What are the parts of the capricorn symbol?", "What is the chimera made up of?", "Does #1 include all of #2?" ], "evidence": [ [ [ [ "Capricorn (astrology)-1" ] ], [ [ "Chimera (mythology)-1" ] ], [ "operation" ] ], [ [ [ "Capricorn (astrology)-2" ] ], [ [ "Chimera (mythology)-1" ] ], [ "operation" ] ], [ [ [ "Capricorn (astrology)-2" ] ], [ [ "Chimera (mythology)-3" ] ], [ "operation" ] ] ] }, { "qid": "7deb49d3ba50fb75a04f", "term": "Capsaicin", "description": "chemical compound", "question": "Is Black Lives Matter connected with capsaicin?", "answer": true, "facts": [ "Black Lives Matter has held numerous protests", "Protesters at Black Lives Matter events have had pepper spray used against them by police", "Capsaicin is the main ingredient of pepper spray" ], "decomposition": [ "What are the common practical applications of the capsaicin compund?", "What kind of activities does the Black Lives Matter movement engage in?", "Is any of #1 relevant to any of #2?" ], "evidence": [ [ [ [ "Capsaicin-11", "Capsaicin-5", "Capsaicin-7" ] ], [ [ "Black Lives Matter-20" ] ], [ [ "Pepper spray-1", "Riot control-1" ], "operation" ] ], [ [ [ "Capsaicin-2" ] ], [ [ "Black Lives Matter-30" ] ], [ "operation" ] ], [ [ [ "Capsaicin-11" ] ], [ [ "Movement for Black Lives-6" ] ], [ [ "Movement for Black Lives-7" ], "operation" ] ] ] }, { "qid": "106bf37e9d8b5293775c", "term": "Breast", "description": "Region of the torso of a primate containing the mammary gland", "question": "Do people in middle school usually get breast exams?", "answer": false, "facts": [ "Women should begin giving themselves breast exams at the age of 20.", "Middle school students are usually preteens or young teens." ], "decomposition": [ "What age do people usually get breast exams?", "How old are the students in Middle school in the US?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Mammography-1", "Mammography-3" ] ], [ [ "Secondary education in the United States-1" ] ], [ "operation" ] ], [ [ [ "Mammography-1", "Mammography-3" ] ], [ [ "Middle school-88" ] ], [ "operation" ] ], [ [ [ "Breast cancer screening-3" ] ], [ [ "Secondary education in the United States-1" ] ], [ "operation" ] ] ] }, { "qid": "e78a917a9d12dfe19ba4", "term": "Stanley Baldwin", "description": "Former Prime Minister of the United Kingdom", "question": "Was a woman Prime Minister directly before or after Stanley Baldwin?", "answer": false, "facts": [ "Stanley Baldwin was preceded by Ramsay MacDonald as Prime Minister.", "Stanley Baldwin was succeeded as Prime Minister by Neville Chamberlain." ], "decomposition": [ "Who was the Prime Minister before Stanley Baldwin?", "Who was the Prime Minister after Stanley Baldwin?", "Was #1 a woman?", "Was #2 a woman?", "Is the answer to either #3 or #4 yes?" ], "evidence": [ [ [ [ "Ramsay MacDonald-1" ] ], [ [ "Neville Chamberlain-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Ramsay MacDonald-1", "Ramsay MacDonald-1", "Stanley Baldwin-5" ] ], [ [ "Neville Chamberlain-3" ] ], [ [ "Ramsay MacDonald-1" ] ], [ [ "Neville Chamberlain-8" ] ], [ "operation" ] ], [ [ [ "Stanley Baldwin-14" ] ], [ [ "Sir Roger Conant, 1st Baronet-3" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "4b2e265815601bfa4152", "term": "Astrophotography", "description": "specialized type of photography for recording images of astronomical objects and large areas of the night sky", "question": "Is it difficult to conduct astrophotography in the summer in Sweden?", "answer": true, "facts": [ "Astrophotography is used to photograph the night sky.", "Swedish summers have short nights." ], "decomposition": [ "What does Astrophotography take photos of?", "Are #1's short in the summers of Sweden?" ], "evidence": [ [ [ [ "Astrophotography-1" ] ], [ [ "Sweden-59" ], "operation" ] ], [ [ [ "Astrophotography-1" ] ], [ [ "Sweden-56" ], "operation" ] ], [ [ [ "Astrophotography-1" ] ], [ [ "Tourism in Sweden-6" ] ] ] ] }, { "qid": "e29d4982a58b5e70410e", "term": "Eagle", "description": "large carnivore bird", "question": "Would bald eagle deliver an urgent message before B-52?", "answer": false, "facts": [ "A bald eagle can travel up to 99 MPH.", "The B-52 is a US air bomber that can travel up to 650 MPH." ], "decomposition": [ "How fast can an eagle travel?", "How fast can a B-52 travel?", "Is #1 greater than #2?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Eagle Flight-6" ], "no_evidence" ], [ [ "B-52 (cocktail)-10" ], "no_evidence" ], [ "no_evidence" ] ], [ [ "no_evidence" ], [ [ "Boeing B-52 Stratofortress-6" ] ], [ "operation" ] ] ] }, { "qid": "ef67433aeac38f7fada4", "term": "Tokyo Tower", "description": "observation tower", "question": "Did Tokyo Tower designers appreciate Stephen Sauvestre?", "answer": true, "facts": [ "Tokyo Tower is a communications tower in Japan, built in 1958, that was inspired by the Eiffel Tower.", "Stephen Sauvestre was the architect of the the Eiffel Tower which was competed in 1889." ], "decomposition": [ "Which architectural design is Stephen Sauvestre famous for?", "Was #1 influential in the design of the Tokyo Tower?" ], "evidence": [ [ [ [ "Stephen Sauvestre-1" ] ], [ [ "Tokyo Tower-1" ] ] ], [ [ [ "Stephen Sauvestre-3" ] ], [ "operation" ] ], [ [ [ "Stephen Sauvestre-1" ] ], [ [ "Tokyo Tower-1" ] ] ] ] }, { "qid": "7254b3ce052273ee1a0a", "term": "Professional boxing", "description": "sport", "question": "Can professional boxers expect to have low dental bills?", "answer": false, "facts": [ "Professional boxers often receive punches to their face.", "Even with a mouth guard, dental injuries occur often in boxing.", "The average cost for one dental crown is between $500-$3000" ], "decomposition": [ "What types of injuries are professional boxers likely to sustain?", "Are #1 inexpensive to treat?" ], "evidence": [ [ [ [ "Boxing-63" ] ], [ [ "History of dental treatments-6" ] ] ], [ [ [ "Boxing-82" ] ], [ "operation" ] ], [ [ [ "Boxing-63" ] ], [ [ "Dental insurance-1" ] ] ] ] }, { "qid": "a771fc7612cc3b8dc2dc", "term": "Augustus", "description": "First emperor of the Roman Empire", "question": "Would a hippie hypothetically be bummed out by Augustus's Pax Romana?", "answer": false, "facts": [ "A hippie was a member of the counterculture movement of the 1960s.", "One of the most prevalent hippie statements is peace and love.", "The Pax Romana was a near 200 year era of peace in the Roman Empire that began during the reign of Augustus.", "Augustus had several loves, including three wives." ], "decomposition": [ "What ideals did hippies promote?", "What was the defining attribute of Pax Romana?", "Is #2 not a subset of #1?" ], "evidence": [ [ [ [ "Hippie-13" ] ], [ [ "Pax Romana-1" ] ], [ "operation" ] ], [ [ [ "17 Hippies-5" ], "no_evidence" ], [ [ "Pax Romana-4" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Hippie-13" ] ], [ [ "Pax Romana-1" ] ], [ "operation" ] ] ] }, { "qid": "b921f2496791cca37167", "term": "Oyster", "description": "salt-water bivalve mollusc", "question": "Can oysters be preserved without refrigeration? ", "answer": true, "facts": [ "In some types of Korean kimchi, oysters are placed between the leaves of nappa cabbage. ", "Many grocery stores carry canned oysters in the shelf stable section. " ], "decomposition": [ "How are oysters preserved in various types of Korean kimchi?", "What are the common methods of preserving oysters in grocery stores?", "Do any of #1 or #2 not require refrigeration?" ], "evidence": [ [ [ [ "Kimchi-28" ], "no_evidence" ], [ [ "Oyster-57", "Oyster-61" ] ], [ "operation" ] ], [ [ [ "Korean cuisine-25" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Oyster-48" ], "no_evidence" ], [ [ "Oyster-35" ] ], [ "operation" ] ] ] }, { "qid": "9a84fdc02e3eddb2d395", "term": "The Atlantic", "description": "Magazine and multi-platform publisher based in Washington, D.C.", "question": "Could the Atlantic readers fill 500 battalions?", "answer": false, "facts": [ "A battalion is a military unit of measurement that includes 1000 soldiers.", "As of 2018 The Atlantic has a circulation of 478,534." ], "decomposition": [ "What is the number of readers (copies in circulation) of The Atlantic magazines?", "What is the average number of soldiers in a battalion in the US?", "Is #1 at least equal to 500 times #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Battalion-1" ] ], [ "operation" ] ], [ [ [ "The Atlantic-24" ] ], [ [ "Battalion-21" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Atlantic-24" ] ], [ [ "Battalion-21" ] ], [ "operation" ] ] ] }, { "qid": "02d58ebdbdb3a1cdc79e", "term": "Hollywood", "description": "District in Los Angeles, California, United States", "question": "Is it normally unnecessary to wear a coat in Hollywood in July?", "answer": true, "facts": [ "The average high temperature in Hollywood in July is 77.2°F.", "The average low temperature in Hollywood in July is 61.5°F.", "A coat is a garment worn on the upper body for warmth." ], "decomposition": [ "What is the average high temperature in Hollywood in July?", "What is the average low temperature in Hollywood in July?", "What temperature does one usually wear a coat?", "Is #3 outside of #1 to #2?" ], "evidence": [ [ [ [ "Hollywood-1", "Los Angeles-36" ] ], [ [ "Los Angeles-34" ] ], [ [ "Overcoat-1", "Winter-5" ] ], [ "operation" ] ], [ [ [ "Climate of Los Angeles-5", "Climate of Los Angeles-7" ], "no_evidence" ], [ [ "Climate of Los Angeles-7" ], "no_evidence" ], [ [ "Winter clothing-1" ] ], [ "operation" ] ], [ [ [ "Los Angeles-35" ], "no_evidence" ], [ [ "Los Angeles-35" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ffd8a720264778c0fd6e", "term": "Toyota Hilux", "description": "Series of light commercial vehicles produced by the Japanese car-manufacturer Toyota.", "question": "Can the Toyota Hilux tip the scales against Mr. Ed?", "answer": true, "facts": [ "The current generation of Toyota Hilux weighs at least 4,310 lbs", "Mr. Ed was portrayed by an adult horse", "The average adult horse weighs up to 2,000 lbs" ], "decomposition": [ "What does a Toyota Hilux weigh?", "What does an adult horse weigh?", "Is #1 greater than #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Horse-13" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Horse-13" ] ], [ "operation" ] ], [ [ [ "Toyota Hilux-1" ], "no_evidence" ], [ [ "Horse-12" ] ], [ "operation" ] ] ] }, { "qid": "75cff4dab3152b768aa9", "term": "Cerebral palsy", "description": "A group of disorders affecting the development of movement and posture, often accompanied by disturbances of sensation, perception, cognition, and behavior. It results from damage to the fetal or infant brain.", "question": "Could a young Wizard of Oz Scarecrow have gotten Cerebral palsy?", "answer": false, "facts": [ "Cerebral palsy is a disease that results from damage to a young person's brain.", "The Scarecrow in the Wizard of Oz did not have a brain and was on a quest to get one." ], "decomposition": [ "Which organ of the body can cerebral palsy be traced back to?", "Did the Scarecrow in Wizard of Oz initially have #1 ?" ], "evidence": [ [ [ [ "Cerebral palsy-5" ] ], [ [ "Scarecrow (Oz)-3" ], "operation" ] ], [ [ [ "Cerebral palsy-2" ] ], [ [ "Scarecrow (Oz)-1" ], "operation" ] ], [ [ [ "Cerebral palsy-2" ] ], [ [ "The Wizard of Oz (1939 film)-6" ] ] ] ] }, { "qid": "8b72c6650e23554f51eb", "term": "Glenn Beck", "description": "American talk radio and television host", "question": "Would Glen Beck and Stephen Colbert be likely to tour together?", "answer": false, "facts": [ "Glenn Beck is a right wing commentator known for strong opinions and serious tone.", "Stephen Colbert is a liberal political commentator who takes a comedic approach to his work." ], "decomposition": [ "What political party does Glen Beck support?", "What political party does Stephen Colbert support?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Glenn Beck-49" ] ], [ [ "Stephen Colbert-62" ] ], [ [ "Stephen Colbert-62" ], "operation" ] ], [ [ [ "Glenn Beck-46" ] ], [ [ "Stephen Colbert-62" ] ], [ "operation" ] ], [ [ [ "Glenn Beck-46" ] ], [ [ "Stephen Colbert-3" ] ], [ "operation" ] ] ] }, { "qid": "a12eb8ae9b38c231a00f", "term": "Maroon 5", "description": "American pop punk band", "question": "Could Maroon 5 have hypothetically held a concert at Roman Colosseum?", "answer": true, "facts": [ "The Roman Colosseum had a capacity of 87,000 people. ", "Maroon 5 has held concerts at Brazil's Allianz Parque, which has a capacity of close to 44,000.", "Almost 30,000 people attended Maroon 5's 2015 Madison Square Garden concert over two days." ], "decomposition": [ "How many spectators could the Roman Colosseum hold?", "How many people were in attendance at Maroon 5's largest concert?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Colosseum-1" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Colosseum-1" ] ], [ [ "Super Bowl LIII halftime show-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Colosseum-2" ] ], [ [ "Maroon V Tour-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0d88c13d582a6c3c6f94", "term": "Junk (ship)", "description": "Type of boat", "question": "Does Carmen Electra own a junk?", "answer": false, "facts": [ "A junk is a boat.", "Boats are sailed on open water.", "Carmen Electra has a fear of open water." ], "decomposition": [ "What is another name for a junk?", "Where does one use #1?", "Does Carmen Electra like being in #2?" ], "evidence": [ [ [ [ "Junk (ship)-1" ] ], [ [ "Junk (ship)-6" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Junk (ship)-1" ] ], [ [ "Junk (ship)-1" ] ], [ [ "Carmen Electra-1" ], "no_evidence", "operation" ] ], [ [ [ "Junk (ship)-1" ] ], [ [ "Sailing ship-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "527a89feb66ade5f0908", "term": "Ashland, Oregon", "description": "City in Oregon, United States", "question": "Is 2018 Ashland, Oregon population inadequate to be a hypothetical military division?", "answer": false, "facts": [ "The 2018 population of Ashland Oregon was 21,263 people.", "The number of soldiers in a military division is between 10,000 and 25,000 people." ], "decomposition": [ "What was the population of Ashland, Oregon in 2018?", "How many soldiers are in a military division?", "Is #1 less than the minimum in #2?" ], "evidence": [ [ [ [ "Ashland, Oregon-1" ], "no_evidence" ], [ [ "Division (military)-16" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Ashland, Oregon-1" ] ], [ [ "Division (military)-1" ] ], [ "operation" ] ], [ [ [ "Ashland, Oregon-1" ] ], [ [ "Division (military)-1" ] ], [ "operation" ] ] ] }, { "qid": "e5fa5f6c12bfa1aed955", "term": "Audiobook", "description": "recording of a text being read", "question": "Do Youtube viewers get unsolicited audiobook advice often?", "answer": true, "facts": [ "Audible is one of the most common sponsors for Youtubers to have.", "Audible is an audiobook subscription service. ", "Audible ads typically involve discussing a book that the speaker has recently listened to." ], "decomposition": [ "What company is one of the most common sponsors for Youtubers to have?", "What do the ads for #1 typically involve?", "Does #2 involve someone giving audiobook advice?" ], "evidence": [ [ [ [ "Audible (store)-1" ], "no_evidence" ], [ [ "Audible (store)-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "YouTube-3" ], "no_evidence" ], [ [ "YouTube-3" ] ], [ "no_evidence" ] ], [ [ [ "Audible (store)-16" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "6b9f97d9cb0b477804df", "term": "Osama bin Laden", "description": "Co-founder of al-Qaeda", "question": "Did Osama bin Laden likely abstain from alcohol?", "answer": true, "facts": [ "Osama bin Laden belonged to the religion of Islam.", "Islam prohibits the consumption of alcohol." ], "decomposition": [ "What religion was Osama bin Laden?", "Does #1 prohibit consumption of alcohol?" ], "evidence": [ [ [ [ "Osama bin Laden-10" ] ], [ [ "Alcohol law-14" ], "operation" ] ], [ [ [ "Osama bin Laden-10" ] ], [ [ "Islamic culture-45" ] ] ], [ [ [ "Osama bin Laden-16" ] ], [ [ "Islamic dietary laws-9" ], "operation" ] ] ] }, { "qid": "f9abd88f0bd83d3316f0", "term": "Garlic", "description": "species of plant", "question": "Are fresh garlic cloves as easy to eat as roasted garlic cloves?", "answer": false, "facts": [ "Allicin is the component of garlic that makes it 'spicy' feeling in the mouth.", "When garlic is cooked, the Allicin in it is removed." ], "decomposition": [ "What makes garlic uncomfortable to eat?", "Does #1 remain after cooking?" ], "evidence": [ [ [ [ "Garlic-31", "Garlic-32" ] ], [ [ "Garlic-34", "Garlic-35" ] ] ], [ [ [ "Garlic-22" ] ], [ [ "Garlic-22" ], "operation" ] ], [ [ [ "Garlic-22" ] ], [ [ "Garlic-22" ], "operation" ] ] ] }, { "qid": "341be84d785651838007", "term": "JPEG", "description": "Lossy compression method for reducing the size of digital images", "question": "Would JPEG be a good format for saving an image of Da Vinci's Vitruvian Man?", "answer": false, "facts": [ "JPEG is not well suited for line drawings and other textual or iconic graphics, where the sharp contrasts between adjacent pixels can cause noticeable artifacts. ", "Da Vinci's Vitruvian Man is a line drawing done in pen and ink." ], "decomposition": [ "What kind of details are portrayed in Da Vinci's Vitruvian Man?", "Are JPEGs an ideal format for saving pictures containing #1?" ], "evidence": [ [ [ [ "Vitruvian Man-3" ] ], [ [ "JPEG-29" ] ] ], [ [ [ "Vitruvian Man-3" ] ], [ [ "JPEG-1", "JPEG-110" ] ] ], [ [ [ "Vitruvian Man-2" ], "no_evidence" ], [ [ "JPEG-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "4a0bd486029477dc8249", "term": "Lionel Richie", "description": "American singer-songwriter, musician, record producer and actor", "question": "Did Lionel Richie ever have dinner with Abraham Lincoln?", "answer": false, "facts": [ "Abraham Lincoln died in 1865.", "Lionel Richie was born in 1949." ], "decomposition": [ "When did Abraham Lincoln die?", "When was Lionel Richie born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Abraham Lincoln-1" ] ], [ [ "Lionel Richie-1" ] ], [ "operation" ] ], [ [ [ "Outline of Abraham Lincoln-2" ] ], [ [ "Lionel Richie-1" ] ], [ "operation" ] ], [ [ [ "Abraham Lincoln-1" ] ], [ [ "Lionel Richie-1" ] ], [ "operation" ] ] ] }, { "qid": "c664e847b957da434f15", "term": "Hair", "description": "protein filament that grows from follicles found in the dermis, or skin", "question": "Is it safe to eat hair?", "answer": true, "facts": [ "Hair is made of keratin.", "Food manufacturers use L-cysteine as a food additive.", "L-cysteine is made from keratin." ], "decomposition": [ "What is hair made of?", "What else is made from #1?", "Are any of #2 used in food production?" ], "evidence": [ [ [ [ "Hair-2" ] ], [ [ "Alpha-keratin-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Hair-6" ] ], [ [ "Hair-6" ] ], [ [ "Food-1" ] ] ], [ [ [ "Hair-1" ] ], [ [ "Beef-1" ] ], [ [ "Hamburger-1" ], "operation" ] ] ] }, { "qid": "139d12df0ad15cc0347c", "term": "Washington Monument", "description": "Obelisk in Washington, D.C.", "question": "Did Sojourner Truth use the elevator at the Washington Monument?", "answer": false, "facts": [ "The Washington Monument was opened to the public in October 1888.", "Sojourner Truth died November 26, 1883. " ], "decomposition": [ "When did Sojourner Truth pass away?", "When was the Washington Monument opened to the public?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Sojourner Truth-1" ] ], [ [ "Washington Monument-26" ] ], [ [ "Washington Monument-26" ], "operation" ] ], [ [ [ "Sojourner Truth-1" ] ], [ [ "Washington Monument-2" ] ], [ "operation" ] ], [ [ [ "Sojourner Truth-1" ] ], [ [ "Washington Monument-2" ] ], [ "operation" ] ] ] }, { "qid": "fbcda00a5524a9fda557", "term": "Florence", "description": "Capital and most populous city of the Italian region of Tuscany", "question": "Was Florence a Theocracy during Italian Renaissance?", "answer": true, "facts": [ "The Italian Renaissance was a period of history from the 13th century to 1600.", "A theocracy is a type of rule in which religious leaders have power.", "Friar Girolamo Savonarola was the ruler of Florence, after driving out the Medici family, from November 1494 – 23 May 1498." ], "decomposition": [ "When was the Italian Renaissance?", "When did Friar Girolamo Savonarola rule Florence?", "Is #2 within the span of #1?", "Did Friar Girolamo Savonarola belong to a religious order during #3?" ], "evidence": [ [ [ [ "Italian Renaissance-1" ] ], [ [ "Girolamo Savonarola-1" ], "no_evidence" ], [ "operation" ], [ [ "Girolamo Savonarola-1" ] ] ], [ [ [ "Italian Renaissance-1" ] ], [ [ "Republic of Florence-38", "Republic of Florence-39", "Republic of Florence-40" ] ], [ "operation" ], [ [ "Girolamo Savonarola-1" ], "operation" ] ], [ [ [ "Italian Renaissance-1" ] ], [ [ "Girolamo Savonarola-2", "Girolamo Savonarola-3" ] ], [ "operation" ], [ [ "Dominican Order-1", "Girolamo Savonarola-1" ], "operation" ] ] ] }, { "qid": "5f267b7c20090236a2fb", "term": "Beauty and the Beast", "description": "traditional fairy tale", "question": "Were Beauty and the Beast adaptations devoid of Kurt Sutter collaborators?", "answer": false, "facts": [ "Beauty and the Beast is a fairy tale adapted into several movie and TV shows.", "Kurt Sutter created the TV series Sons of Anarchy and The Shield.", "Charlie Hunnam and Ron Perlman starred in Sons of Anarchy.", "Ron Perlman starred in the TV series Beauty and the Beast which aired from 1987-1990." ], "decomposition": [ "Which characters were featured in Kurt Sutter's Sons of Anarchy and The Shield?", "Which characters were featured in TV series Beauty and the Beast?", "Is there no character common to #1 and #2?" ], "evidence": [ [ [ [ "Clay Morrow-1", "The Shield-1" ], "no_evidence" ], [ [ "Beauty and the Beast (1987 TV series)-13" ] ], [ [ "Ron Perlman-1" ], "operation" ] ], [ [ [ "Clay Morrow-1" ], "no_evidence" ], [ [ "Ron Perlman-5" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Ron Perlman-1", "The Shield-1" ] ], [ [ "Beauty and the Beast (1987 TV series)-1" ] ], [ "operation" ] ] ] }, { "qid": "e9689243222e7afa28ad", "term": "Swan", "description": "large water bird", "question": "Would a Nike shoebox be too small to fit a swan in?", "answer": true, "facts": [ "Nike Shoeboxes are usually 14\" x 10\" x 5\".", "An average swan is 4-5.6 ft in length." ], "decomposition": [ "What is the average size of a Nike Shoebox?", "What is the average length of a swan?", "Is #2 smaller than #1?" ], "evidence": [ [ [ [ "Shoe size-13", "Sneakers-16" ], "no_evidence" ], [ [ "Swan-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Shoe-1" ], "no_evidence" ], [ [ "Swan-3" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Swan-3" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "115dd3102b245bdbc737", "term": "Dr. Seuss", "description": "American children's writer and illustrator", "question": "Did Dr. Seuss live a tragedy free life?", "answer": false, "facts": [ "Dr. Seuss's wife committed suicide.", "In his later years, Dr. Seuss was diagnosed with cancer." ], "decomposition": [ "Was Dr. Seuss' life free of tragic occurrences?" ], "evidence": [ [ [ [ "Dr. Seuss-20" ] ] ], [ [ [ "Dr. Seuss-20" ] ] ], [ [ [ "Dr. Seuss-22" ], "no_evidence" ] ] ] }, { "qid": "6571aafb3bae1b2e75e9", "term": "Johann Sebastian Bach", "description": "German composer", "question": "Did Johann Sebastian Bach ever win a Grammy Award?", "answer": false, "facts": [ "Johann Sebastian Bach died in 1750.", "The first Grammy Awards ceremony was held on May 4, 1959." ], "decomposition": [ "In what year did Johann Sebastian Bach die?", "When was the first Grammy Award given?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Johann Sebastian Bach-1" ] ], [ [ "Grammy Award-3" ] ], [ "operation" ] ], [ [ [ "Johann Sebastian Bach-34" ] ], [ [ "Grammy Award-6" ] ], [ "operation" ] ], [ [ [ "Johann Sebastian Bach-5" ] ], [ [ "Grammy Award-6" ] ], [ "operation" ] ] ] }, { "qid": "716d2417cd373b9d25f0", "term": "Chinchilla", "description": "Rodent genus", "question": "Are chinchillas cold-blooded?", "answer": false, "facts": [ "Chinchillas are rodents.", "Rodents are mammals.", "All mammals are warm-blooded." ], "decomposition": [ "What type of animal are Chinchillas?", "What animal class is #1?", "Are #2s cold blooded?" ], "evidence": [ [ [ [ "Chinchilla-1" ] ], [ [ "Rodent-1" ] ], [ [ "Mammal-53" ], "operation" ] ], [ [ [ "Chinchilla-2" ] ], [ [ "Chinchilla-2" ] ], [ [ "Mammal-53" ], "operation" ] ], [ [ [ "Chinchilla-3" ] ], [ [ "Rodent-1" ] ], [ [ "Mammal-53" ] ] ] ] }, { "qid": "91bb99711affe05abe7b", "term": "Samsung", "description": "South Korean multinational conglomerate", "question": "Is Samsung accountable to shareholders?", "answer": true, "facts": [ "Samsung is a publicly traded company.", "Publicly traded companies are ultimately accountable to shareholders. " ], "decomposition": [ "What kind of company is Samsung?", "Are #1's accountable to shareholders?" ], "evidence": [ [ [ [ "Samsung Electronics-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Samsung-1" ] ], [ [ "Conglomerate (company)-21" ], "operation" ] ], [ [ [ "Samsung-14" ], "operation" ], [ "operation" ] ] ] }, { "qid": "d4c4c4c7fb085e6d1d98", "term": "Alan Alda", "description": "American actor, director, and writer", "question": "Is Alan Alda old enough to have fought in the Vietnam War?", "answer": true, "facts": [ "Alan Alda was born in 1936.", "The Vietnam War was from 1955 to 1975, with American involvement from 1965 to 1973.", "American soldiers must be at least 18 years old.", "Alan Alda was 29 in 1965." ], "decomposition": [ "When were US forces first involved in the Vietnam war?", "When was Alan Alda born?", "What is the minimum age required to join the US Army?", "What is #1 minus #2?", "Is #4 greater than or equal to #3?" ], "evidence": [ [ [ [ "Vietnam War-58" ] ], [ [ "Alan Alda-2" ] ], [ [ "United States Army Recruiting Command-13" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Vietnam War-2" ] ], [ [ "Alan Alda-2" ] ], [ "no_evidence" ], [ "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Vietnam War-1" ] ], [ [ "Alan Alda-2" ] ], [ [ "United States Armed Forces-3" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "f67e4140ba9ecdb6dcaa", "term": "Prophet", "description": "person claiming to speak for divine beings", "question": "Did Disney's second film rip off a prophet story?", "answer": true, "facts": [ "Disney's second film, Pinocchio, was released in 1940.", "The biblical prophet Jonah was swallowed by a whale.", "In Pinocchio, Gepetto is swallowed by a giant whale while searching for Pinocchio." ], "decomposition": [ "What is Disney's second film?", "In #1, what happens to Gepetto while searching for Pinocchio?", "In a biblical prophet, what happens to Jonah?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Pinocchio (1940 film)-1" ] ], [ [ "Pinocchio (1940 film)-9" ] ], [ [ "Jonah-1" ] ], [ "operation" ] ], [ [ [ "Pinocchio (1940 film)-1" ] ], [ [ "Pinocchio (1940 film)-9" ] ], [ [ "Jonah-4" ] ], [ "operation" ] ], [ [ [ "Pinocchio (1940 film)-1" ] ], [ [ "Pinocchio (1940 film)-9" ] ], [ [ "Jonah-1" ] ], [ "operation" ] ] ] }, { "qid": "6a698f75fc0cd0f9799b", "term": "Uranium", "description": "Chemical element with atomic number 92", "question": "Is eating a Dicopomorpha echmepterygis size Uranium pellet fatal?", "answer": false, "facts": [ "Dicopomorpha echmepterygis is a wingless insect that is .13mm large.", "Uranium is a radioactive element that is dangerous if ingested in large doses.", "25mg of Uranium would cause kidney damage, while 50mg would cause complete kidney failure in humans." ], "decomposition": [ "How much does a Dicopomorpha echmepterygis weigh?", "How much ingested Uranium is fatal for a human?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Dicopomorpha echmepterygis-1", "Micrometre-1" ], "no_evidence" ], [ [ "Iron tris(dimethyldithiocarbamate)-7" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Dicopomorpha echmepterygis-1" ], "no_evidence" ], [ [ "Uranium-40" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Dicopomorpha echmepterygis-1" ], "no_evidence" ], [ [ "Self-harm-12" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "07b9fcd269bd1f8b0803", "term": "Knight", "description": "An award of an honorary title for past or future service with its roots in chivalry in the Middle Ages", "question": "Can musicians become knights?", "answer": true, "facts": [ "Elton John was knighted by the Queen of England.", "Tom Jones was knighted by the Queen of England.", "Elton John is a famous pop singer.", "Tom Jones is a famous musician." ], "decomposition": [ "Has Elton John been knighted?", "Has Tom Jones been knighted?", "Are #1 and #2 musicians?", "Are #1, #2 and #3 positive?" ], "evidence": [ [ [ [ "Elton John-3" ] ], [ [ "Tom Jones (singer)-3" ] ], [ [ "Elton John-1", "Tom Jones (singer)-1" ] ], [ "operation" ] ], [ [ [ "Elton John-1" ] ], [ [ "Tom Jones (singer)-1" ] ], [ [ "Elton John-3", "Tom Jones (singer)-2" ] ], [ "operation" ] ], [ [ [ "Elton John-3" ] ], [ [ "Tom Jones (singer)-3" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "751232e3ff80ee198616", "term": "Lemon", "description": "citrus fruit", "question": "Can a lemon aggravate dyspepsia?", "answer": true, "facts": [ "Dyspepsia is a condition where the stomach is irritated.", "Lemons are highly acidic fruits.", "Common stomach irritants include alcohol, coffee, and acidic foods." ], "decomposition": [ "Which condition is referred to as dyspepsia?", "What are some common irritants that could aggravate #1?", "Is lemon an example of #2?" ], "evidence": [ [ [ [ "Indigestion-1" ] ], [ [ "Indigestion-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Indigestion-1" ] ], [ [ "Indigestion-15" ], "no_evidence" ], [ [ "Indigestion-25" ], "no_evidence", "operation" ] ], [ [ [ "Indigestion-11" ] ], [ [ "Indigestion-12" ] ], [ [ "Lemon-17" ] ] ] ] }, { "qid": "47ed3fe19be0eea3ffaa", "term": "Oscar Wilde", "description": "19th-century Irish poet, playwright and aesthete", "question": "Was Oscar Wilde's treatment under the law be considered fair in the US now?", "answer": false, "facts": [ "Oscar Wilde was imprisoned for sexual indecency that amounted to having sexual relations with another man.", "In the United States, being gay is not a punishable offense. " ], "decomposition": [ "Why was Oscar Wilde imprisioned?", "is #1 considered a punishable offense in the US today?" ], "evidence": [ [ [ [ "Oscar Wilde-4" ] ], [ "no_evidence" ] ], [ [ [ "Oscar Wilde-68" ] ], [ [ "Same-sex marriage in the United States-1" ], "operation" ] ], [ [ [ "Oscar Wilde-75" ] ], [ [ "Sodomy laws in the United States-2" ], "operation" ] ] ] }, { "qid": "79f41f77e80dbef57439", "term": "Jackie Chan", "description": "Hong Kong actor and martial artist", "question": "Would Jackie Chan have trouble communicating with a deaf person?", "answer": false, "facts": [ "Jackie Chan speaks Cantonese, Mandarin, English, and American Sign Language.", "American Sign Language (ASL) is a natural language that serves as the predominant sign language of Deaf communities in the United States and most of Anglophone Canada." ], "decomposition": [ "What languages can Jackie Chan speak?", "What language do deaf people communicate with?", "Is #2 not included in #1?" ], "evidence": [ [ [ [ "Jackie Chan-38" ] ], [ [ "Sign language-1" ] ], [ "operation" ] ], [ [ [ "Jackie Chan-38" ] ], [ [ "Sign language-3" ] ], [ "operation" ] ], [ [ [ "Jackie Chan-38" ] ], [ [ "American Sign Language-1" ] ], [ "operation" ] ] ] }, { "qid": "22f35192d8d58cfb6ba9", "term": "John Key", "description": "38th Prime Minister of New Zealand", "question": "Could a fan of the Botany Swarm vote for John Key?", "answer": true, "facts": [ "The Botany Swarm is a hockey team based in Auckland, New Zealand", "John Key is the Prime Minister of New Zealand" ], "decomposition": [ "What city is the Botany Swarm based in?", "In what country is #1?", "What country was John Key the Prime Minister of?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Botany Swarm-1" ] ], [ [ "East Auckland-1" ] ], [ [ "John Key-1" ] ], [ "operation" ] ], [ [ [ "Botany Swarm-1" ] ], [ [ "Botany Swarm-1" ] ], [ [ "John Key-1" ] ], [ "operation" ] ], [ [ [ "Botany Swarm-1" ] ], [ [ "Botany Swarm-1" ] ], [ [ "John Key-1" ] ], [ "operation" ] ] ] }, { "qid": "57fefe73cfa1e7388aba", "term": "Jackson Pollock", "description": "American painter", "question": "Were Jackson Pollock's parents not required to say The Pledge of Allegiance as children?", "answer": true, "facts": [ "Jackson Pollock's parents were both born and grew up in Tingley, Iowa.", "All states except California, Hawaii, Iowa, Vermont, and Wyoming require a regularly scheduled recitation of the pledge in public schools." ], "decomposition": [ "What state did Jackson Pollock's parents grow up in?", "What states do not require the pledge to be recited in school?", "Is #1 in the list in #2?" ], "evidence": [ [ [ [ "Jackson Pollock-4" ] ], [ [ "Pledge of Allegiance-2" ] ], [ "operation" ] ], [ [ [ "Jackson Pollock-4" ] ], [ [ "Pledge of Allegiance-2" ] ], [ "operation" ] ], [ [ [ "Jackson Pollock-4" ] ], [ [ "Pledge of Allegiance-2" ] ], [ "operation" ] ] ] }, { "qid": "955a55c2c64209b0ab7d", "term": "Call of Duty", "description": "First-person shooter video game franchise", "question": "Will Conan the Barbarian hypothetically last a short time inside of Call of Duty?", "answer": true, "facts": [ "Conan the Barbarian is a comic book character.", "Conan the Barbarian is equipped with a sword and does not typically wear armor.", "Call of Duty is a modern warfare video game.", "Soldiers in Call of Duty are equipped with weapons like sniper rifles, shotguns, and machine guns." ], "decomposition": [ "What equipment for fighting does Conan the Barbarian use?", "What equipment for fighting does Call of Duty use?", "Are the items listed in #2 deadlier than those in #1?" ], "evidence": [ [ [ [ "Conan the Barbarian-1" ] ], [ [ "Call of Duty-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Conan the Barbarian-20" ] ], [ [ "Call of Duty-46" ] ], [ [ "Sword-58" ], "operation" ] ], [ [ [ "Conan the Barbarian-16" ] ], [ [ "Call of Duty-4" ] ], [ "operation" ] ] ] }, { "qid": "36352a5a049bf7ee6acd", "term": "Vitamin C", "description": "nutrient found in citrus fruits and other foods", "question": "Do pirates care about vitamin C?", "answer": true, "facts": [ "Scurvy is caused by a prolonged period of time without Vitamin C", "People spending long periods of time at sea without to vitamin C are at high risk for scurvy", "Pirates spend long periods of time at sea" ], "decomposition": [ "What diseases are caused by a lack of vitamin C?", "What behaviors increase people risk of getting #1?", "Do pirate engage in #2?" ], "evidence": [ [ [ [ "Scurvy-1" ] ], [ [ "Scurvy-4" ] ], [ [ "Piracy-1" ], "operation" ] ], [ [ [ "Vitamin C-12", "Vitamin C-2" ] ], [ [ "Scurvy-4" ] ], [ "operation" ] ], [ [ [ "Vitamin C-9" ] ], [ [ "Scurvy-2" ] ], [ "operation" ] ] ] }, { "qid": "80b3ba19b90c340ea5cc", "term": "Surveillance", "description": "monitoring of behavior, activities, or other changing information", "question": "Is video surveillance of a room possible without an obvious camera or new item?", "answer": true, "facts": [ "Surveillance cameras can be built into light socket covers that look no different from a normal one.", "Surveillance cameras can be installed in special light bulbs to document activity in a room." ], "decomposition": [ "What are the various types of surveillance cameras based on installation?", "Are some of installed so as to be #1 hidden from view?" ], "evidence": [ [ [ [ "Closed-circuit television-2", "Closed-circuit television-3", "Closed-circuit television-4" ] ], [ [ "Hidden camera-2" ] ] ], [ [ [ "Hidden camera-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Hidden camera-1" ] ], [ [ "Hidden camera-2" ] ] ] ] }, { "qid": "0cafe7ae4858b90c92ab", "term": "1800", "description": "Year", "question": "Did England win any Olympic gold medals in 1800?", "answer": false, "facts": [ "Olympic medals can only be won during the Olympics.", "The Olympics were first held in 1896." ], "decomposition": [ "Which sporting event would England have to participate in to win an Olympic gold medal?", "When was the first modern edition of #1 held?", "Is #2 before or the same as 1800?" ], "evidence": [ [ [ [ "Gold medal-11" ] ], [ [ "Olympic Games-2" ] ], [ [ "Olympic Games-2" ] ] ], [ [ [ "Olympic Games-4" ] ], [ [ "Olympic Games-2" ] ], [ "operation" ] ], [ [ [ "Olympic medal-1" ] ], [ [ "Olympic Games-2" ] ], [ "operation" ] ] ] }, { "qid": "6201dfb1c934528c9a62", "term": "Snake", "description": "limbless, scaly, elongate reptile", "question": "In the world of Harry Potter, would a snake and skull tattoo be good luck?", "answer": false, "facts": [ "In Harry Potter, a tattoo of a snake and a skull is a symbol of being a \"Death Eater.\"", "Death Eaters are people who follow the word of the dark lord Voldemort, who is considered wicked and cruel.", "Death Eaters are not embraced in the wizarding communities of Harry Potter." ], "decomposition": [ "In Harry Potter, what does a tattoo of snake and a skull a symbol of?", "Who are #1's?", "Are #2's embraced in the wizarding communities of Harry Potter?" ], "evidence": [ [ [ [ "Magic in Harry Potter-85" ] ], [ [ "Death Eater-1" ] ], [ [ "Death Eater-1", "Order of the Phoenix (fictional organisation)-1" ] ] ], [ [ [ "Magic in Harry Potter-85" ] ], [ [ "Death Eater-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Magic in Harry Potter-85" ] ], [ [ "Death Eater-1" ] ], [ [ "Lord Voldemort-2" ], "operation" ] ] ] }, { "qid": "72d2e2d76ec01e97f88f", "term": "Comma", "description": "Punctuation mark", "question": "Would a Fakir be surprised if they saw a comma in their religious book?", "answer": true, "facts": [ "A Fakir is a Muslim Sufi holy man or woman that lives a simple life.", "The holy book for Muslims is the Quran.", "The comma is a punctuation mark in modern language.", "The Quran does not use any forms of modern punctuation." ], "decomposition": [ "What religion is a Fakir from?", "What is the name of #1's Holy Book?", "What kind of punctuation mark is the comma?", "Is #2 written without using #3?" ], "evidence": [ [ [ [ "Fakir-1" ] ], [ [ "Quran-1" ] ], [ [ "Comma-1" ] ], [ "no_evidence" ] ], [ [ [ "Fakir-1" ] ], [ [ "Quran-1" ] ], [ [ "Comma-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Fakir-1" ] ], [ [ "Quran-1" ] ], [ [ "Comma-2" ] ], [ [ "Classical Arabic-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "9e616f744f4084608c54", "term": "Sesame Street", "description": "American children's television program", "question": "Was Elmo an original muppet character on Sesame Street?", "answer": false, "facts": [ "Sesame Street started in 1969.", "Elmo first appeared on the show in 1980." ], "decomposition": [ "When did Sesame Street make its debut?", "When did Elmo first appear on Sesame Street?", "Is #2 the same as #1?" ], "evidence": [ [ [ [ "Sesame Street-1" ] ], [ [ "Elmo-3" ] ], [ "operation" ] ], [ [ [ "Sesame Street-1" ] ], [ [ "Elmo-3" ] ], [ "operation" ] ], [ [ [ "Sesame Street-21" ], "no_evidence" ], [ [ "Sesame Street-7" ], "operation" ], [ "operation" ] ] ] }, { "qid": "99853e416a63527f954e", "term": "San Diego County, California", "description": "County in California, United States", "question": "Is San Diego County the home of a Shamu?", "answer": true, "facts": [ "Shamu is the name of Sea World's mascot orca.", "Every Sea World has a Shamu.", "There is a Sea World location in San Diego." ], "decomposition": [ "What is Shamu the name of?", "Where can you find #1?", "Is there a #2 in San Diego?" ], "evidence": [ [ [ [ "Shamu-1" ] ], [ [ "SeaWorld San Diego-1", "SeaWorld-1" ] ], [ "operation" ] ], [ [ [ "Shamu-1" ] ], [ [ "Captive killer whales-19" ] ], [ [ "SeaWorld San Diego-1" ] ] ], [ [ [ "Shamu-1" ] ], [ [ "SeaWorld San Diego-27" ] ], [ [ "SeaWorld San Diego-27" ], "operation" ] ] ] }, { "qid": "fee20d28322885672ccf", "term": "Bob Marley", "description": "Jamaican singer-songwriter", "question": "Is sunscreen unhelpful for the condition that killed Bob Marley?", "answer": true, "facts": [ "Bob Marley died of acral lentiginous melanoma ", "Acral lentiginous melanoma occurs on skin that may not have any sun exposure " ], "decomposition": [ "What disease killed Bob Marley?", "What is the cause of #1?", "Would sunscreen help with preventing #2?" ], "evidence": [ [ [ [ "Bob Marley-4" ] ], [ [ "Acral lentiginous melanoma-4" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Bob Marley-4" ] ], [ [ "Acral lentiginous melanoma-1" ] ], [ "operation" ] ], [ [ [ "Bob Marley-26" ] ], [ [ "Acral lentiginous melanoma-4" ] ], [ [ "Acral lentiginous melanoma-1" ] ] ] ] }, { "qid": "7cf376afb78c20dd6f18", "term": "Stanford University", "description": "Private research university in Stanford, California", "question": "Was John Gall from same city as Stanford University?", "answer": true, "facts": [ "John Gall is a former major league baseball player born in Stanford, California.", "Stanford University was founded by Leland and Jane Stanford in Stanford, California." ], "decomposition": [ "Where was John Gall (baseball player) born?", "Where is Stanford University located?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "John Gall (baseball)-2" ] ], [ [ "Stanford University-1" ] ], [ "operation" ] ], [ [ [ "John Gall (baseball)-2" ] ], [ [ "Stanford University-1" ] ], [ "operation" ] ], [ [ [ "John Gall (baseball)-2" ] ], [ [ "Stanford University-1" ] ], [ "operation" ] ] ] }, { "qid": "2b2a1d9d526e9a99a8ca", "term": "Snowboarding", "description": "winter sport", "question": "Snowboarding is a rarity in Hilo?", "answer": true, "facts": [ "Snowboarding is a sport that involves descending snow-covered slopes.", "Hilo, a town in Hawaii, has not had snow in almost 200 years." ], "decomposition": [ "What kind of surface is needed for snowboarding?", "Is #1 likely to be nonexistent in Hilo, going by the prevailing climatic conditions?" ], "evidence": [ [ [ [ "Snowboarding-1" ] ], [ [ "Hilo, Hawaii-12" ], "operation" ] ], [ [ [ "Snowboarding-1" ] ], [ [ "Hilo, Hawaii-13" ], "operation" ] ], [ [ [ "Snowboarding-1" ] ], [ [ "Hilo, Hawaii-12" ], "operation" ] ] ] }, { "qid": "07ea848474b8dff89299", "term": "Portuguese people", "description": "ethnic group", "question": "Did King of Portuguese people in 1515 have familial ties to the Tudors?", "answer": true, "facts": [ "Manuel I was King of Portugal from 1495 to 1521.", "Manuel I of Portugal married Maria of Aragon.", "Maria of Aragon was the sister of Catherine of Aragon.", "Catherine of Aragon was the first wife of Henry VIII and was one of a handful that he did not behead." ], "decomposition": [ "Who was the King of Portugal in 1515?", "Who were in #1's immediate family?", "Were any of #2 related to the Tudors?" ], "evidence": [ [ [ [ "Manuel I of Portugal-1" ] ], [ [ "Manuel I of Portugal-5" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Manuel I of Portugal-1" ] ], [ [ "Isabella of Portugal-4" ] ], [ [ "Mary I of England-1", "Philip II of Spain-1", "Philip II of Spain-2" ], "operation" ] ], [ [ [ "Manuel I of Portugal-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "5e13590738c00273d4d7", "term": "Darth Vader", "description": "fictional character in the Star Wars franchise", "question": "Could Darth Vader hypothetically catch the Coronavirus?", "answer": false, "facts": [ "The Coronavirus is transferred through infected droplets that can get into eyes, nose, or mouth.", "Darth Vader permanently wears an iron weave helmet that he needs to breathe." ], "decomposition": [ "How is the Coronavirus transferred?", "What does Darth Vader wear on his head?", "Can #1's get through #2?" ], "evidence": [ [ [ [ "Coronavirus disease 2019-13" ] ], [ [ "Darth Vader-16" ] ], [ [ "Coronavirus disease 2019-4" ] ] ], [ [ [ "Coronavirus-21" ] ], [ [ "Darth Vader-15" ] ], [ "operation" ] ], [ [ [ "Coronavirus-21" ] ], [ [ "Darth Vader-15" ] ], [ [ "Coronavirus disease 2019-13" ], "no_evidence" ] ] ] }, { "qid": "f6dd9567a3fd14eec5d4", "term": "Funeral", "description": "ceremony for a person who has died", "question": "Is it normal to blow out candles during a funeral?", "answer": false, "facts": [ "Blowing out candles is typically done during a birthday celebration, prior to eating the birthday cake.", "Funerals are typically very somber events in which cake is not served." ], "decomposition": [ "Blowing out candles is a typical part of which celebration?", "What kind of aura is naturally associated with #1?", "Is the atmosphere in a funeral typically similar to #2?" ], "evidence": [ [ [ [ "Party-5" ] ], [ [ "Party-1" ] ], [ [ "Funeral-88" ] ] ], [ [ [ "Birthday cake-7" ] ], [ [ "Joy-1" ] ], [ [ "Funeral-1" ], "operation" ] ], [ [ [ "Birthday cake-9" ] ], [ [ "Happy, Happy Birthday Baby-1" ] ], [ [ "Funeral-1" ] ] ] ] }, { "qid": "06adc3a703c49b96a7e5", "term": "Artillery", "description": "Heavy ranged guns or weapons", "question": "Would a slingshot be improperly classified as artillery?", "answer": true, "facts": [ "Artillery refers to ranged weaponry that is predominantly used in breaching fortifications.", "Examples of artillery include: howitzers, mortars, and rockets.", "Mortars can have a range up to 4,680m.", "A slingshot is a string weapon that propels a rock or other small projectile.", "Some slingshots can fire projectiles up to 9m." ], "decomposition": [ "What are the basic characteristics of a weapon considered artillery?", "Does a slingshot fail to possess all of #1?" ], "evidence": [ [ [ [ "Artillery-9" ] ], [ [ "Slingshot-8" ] ] ], [ [ [ "Artillery-1" ] ], [ [ "Slingshot-1" ], "operation" ] ], [ [ [ "Artillery-1" ] ], [ "operation" ] ] ] }, { "qid": "9fb41cefb010f47033f3", "term": "Gladiator", "description": "combatant who entertained audiences in the Roman Republic and Roman Empire", "question": "Did Gladiator's weapon of choice require less hands than Soul Calibur's Faust?", "answer": true, "facts": [ "Faust is a zweihander sword in the Soul Calibur video game series.", "A zweihander is a giant sword that requires two hands to wield.", "Gladiators used the Gladius which was a short one handed sword." ], "decomposition": [ "Faust in the Soul Calibur video game series is what kind of sword?", "How many hands would be needed to lift #1?", "How many hands would be needed to lift a typical Gladiator's sword?", "Is #3 less than #2?" ], "evidence": [ [ [ [ "Siegfried and Nightmare-17" ] ], [ [ "Siegfried and Nightmare-17" ] ], [ [ "Gladius-1", "Gladius-2" ] ], [ "operation" ] ], [ [ [ "Siegfried and Nightmare-17" ] ], [ "operation" ], [ [ "Gladius-20" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Soulcalibur (video game)-1" ], "no_evidence" ], [ "no_evidence" ], [ [ "Gladius-1", "Gladius-2" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "27fc311d200f621edcbc", "term": "Flying Spaghetti Monster", "description": "Chief deity of Pastafarianism", "question": "Is the Flying Spaghetti Monster part of an ancient pantheon?", "answer": false, "facts": [ "Ancient polytheistic religions are generally no longer popular in Western culture.", "The Church of the Flying Spaghetti Monster was established in 2006 after the creation of the FSM itself in 2005." ], "decomposition": [ "What time period is considered ancient?", "What religion is the Flying Spaghetti Monster part of?", "Was #2 established during #1?" ], "evidence": [ [ [ [ "Ancient history-2" ] ], [ [ "Flying Spaghetti Monster-1" ] ], [ [ "Flying Spaghetti Monster-3" ], "operation" ] ], [ [ [ "Ancient history-2" ] ], [ [ "Flying Spaghetti Monster-1" ] ], [ [ "Ancient history-2", "Flying Spaghetti Monster-2" ], "operation" ] ], [ [ [ "Ancient history-2" ] ], [ [ "Flying Spaghetti Monster-5" ] ], [ "operation" ] ] ] }, { "qid": "40a10c0c2ae965cd067d", "term": "Saddam Hussein", "description": "Iraqi politician and President", "question": "Did Saddam Hussein witness the inauguration of Donald Trump?", "answer": false, "facts": [ "Saddam Hussein died on December 30th, 2006.", "Donald Trump was inaugurated as the President of the United States on January 20, 2017." ], "decomposition": [ "When did Saddam Hussein die?", "When was Donald Trump inaugurated as President?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Saddam Hussein-101" ] ], [ [ "Timeline of the Donald Trump presidency-1" ] ], [ "operation" ] ], [ [ [ "Saddam Hussein-4" ] ], [ [ "Inauguration of Donald Trump-1" ] ], [ "operation" ] ], [ [ [ "Saddam Hussein-1" ] ], [ [ "Inauguration of Donald Trump-1" ] ], [ "operation" ] ] ] }, { "qid": "601db2d5fba830c71962", "term": "Batman (1989 film)", "description": "1989 film directed by Tim Burton", "question": "Is Batman (1989 film) likely to be shown on flight from NY to Kansas City?", "answer": true, "facts": [ "A flight from NY to Kansas City is four and a half hours.", "The run time of Batman (1989 film) is two hours and six minutes.", "Batman (1989 film) is rated PG-13", "The average age group of passengers is 18-34.", "Airlines have relaxed their rules for in-flight movies in last few years and even R rated movies have been shown." ], "decomposition": [ "How long is a flight from NY to Kansas City?", "How long is the 1989 Batman film? ", "Is #2 less than #1?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Kansas City metropolitan area-1", "New York City-1" ], "no_evidence" ], [ [ "Batman (1989 film)-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Flight length-5" ], "no_evidence" ], [ [ "Batman (1989 film)-23" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "2df30aa0fc5bdc229dce", "term": "Eric Clapton", "description": "English musician, singer, songwriter, and guitarist", "question": "Could Eric Clapton's children play a regulation game of basketball among themselves?", "answer": false, "facts": [ "NBA regulations require two teams of five players each for a game, for a total of 10 players.", "Eric Clapton has 5 children." ], "decomposition": [ "How many players are required for a regulation game of basketball?", "How many children does Eric Clapton have?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "Basketball-1" ] ], [ [ "Eric Clapton-78", "Eric Clapton-79", "Eric Clapton-80" ] ], [ "operation" ] ], [ [ [ "Basketball-1" ] ], [ [ "Eric Clapton-78", "Eric Clapton-79", "Eric Clapton-80" ] ], [ "operation" ] ], [ [ [ "Basketball-1" ] ], [ [ "Eric Clapton-78", "Eric Clapton-79", "Eric Clapton-80" ] ], [ "operation" ] ] ] }, { "qid": "10bcf9c1d9026f741f75", "term": "Supreme Court of Canada", "description": "highest court of Canada", "question": "Can the Supreme Court of Canada fight a Lucha trios match?", "answer": true, "facts": [ "A Lucha trios match requires at least two teams of three wrestlers each", "The Supreme Court of Canada has nine justices" ], "decomposition": [ "How many Justices are in the Supreme Court of Canada?", "What is the total number of people needed to fight in a Lucha trios match?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Supreme Court of Canada-5" ] ], [ [ "Lucha libre-16" ] ], [ "operation" ] ], [ [ [ "Supreme Court of Canada-5" ] ], [ [ "Lucha libre-1" ] ], [ "operation" ] ], [ [ [ "Supreme Court of Canada-30" ] ], [ [ "Lucha libre-1" ] ], [ "operation" ] ] ] }, { "qid": "4f4e7cbebe4b1b2bdb13", "term": "Sudoku", "description": "Logic-based number-placement puzzle", "question": "Do you need different colored pens for sudoku?", "answer": false, "facts": [ "Sudoku is played both online and offline.", "Sudoku has no color component in the game." ], "decomposition": [ "How is Sudoku played?", "Is color necessary to do #1?" ], "evidence": [ [ [ [ "Sudoku-1" ] ], [ [ "Sudoku-1" ] ] ], [ [ [ "Sudoku-1" ] ], [ "operation" ] ], [ [ [ "Sudoku-1" ] ], [ "operation" ] ] ] }, { "qid": "2e95e20da8a6cfacaa83", "term": "Dessert", "description": "A course that concludes a meal; usually sweet", "question": "Would an ancient visitor to Persia probably consume crocus threads?", "answer": true, "facts": [ "Ancient Persians would have several desserts after a simple meal", "Saffron is made from crocus styles or threads", "Saffron is a common ingredient in Persian desserts" ], "decomposition": [ "What would Ancient Persians typically have after a simple meal?", "What was a common ingredient in #1?", "Is #2 made from crocus threads?" ], "evidence": [ [ [ [ "History of saffron-16" ] ], [ [ "History of saffron-16" ] ], [ [ "Saffron-1" ] ] ], [ [ [ "Tahchin-1" ], "no_evidence" ], [ [ "Saffron (color)-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Crocus sativus-6" ], "no_evidence", "operation" ] ] ] }, { "qid": "4931b5e5a55f092e40a8", "term": "The Who", "description": "English rock band", "question": "Would the Who concert in international space station be audible?", "answer": true, "facts": [ "Sound travels through the vibration of atoms and molecules in a medium (such as air or water). ", "There is air in the international space station. " ], "decomposition": [ "What is the most common medium of transmission of sound?", "Is #1 present in the international space station?" ], "evidence": [ [ [ [ "Speech science-10" ], "no_evidence" ], [ [ "International Space Station-90" ], "no_evidence", "operation" ] ], [ [ [ "Transmission medium-2" ], "no_evidence" ], [ [ "Space station-20" ], "no_evidence" ] ], [ [ [ "Sound-1", "Sound-5" ] ], [ [ "International Space Station-89" ], "operation" ] ] ] }, { "qid": "ee62f46a39caaa860b26", "term": "King Kong (2005 film)", "description": "2005 film directed by Peter Jackson", "question": "Was King Kong (2005 film) solvent?", "answer": true, "facts": [ "Solvent refers to the assets of a project being greater than the liabilities.", "The assets of a movie film are the box office receipts, and the liabilities is the budget.", "King Kong (2005) had box office receipts of 562 million.", "King Kong (2005) had a budget of 207 million." ], "decomposition": [ "What does it mean to be solvent in business/finance?", "What was the budget (liabilities) of the 2005 movie King Kong?", "How much did 2005 movie King Kong gross (assets) worldwide?", "Does #3 compare favorably with #2 as defined in #1?" ], "evidence": [ [ [ [ "Solvency-1" ] ], [ [ "King Kong (2005 film)-2" ] ], [ [ "King Kong (2005 film)-2" ] ], [ "operation" ] ], [ [ [ "Solvency-1" ] ], [ [ "King Kong (2005 film)-2" ] ], [ [ "King Kong (2005 film)-2" ] ], [ "operation" ] ], [ [ [ "Solvency-1" ] ], [ [ "King Kong (2005 film)-2" ] ], [ [ "King Kong (2005 film)-2" ] ], [ "operation" ] ] ] }, { "qid": "2b5b926a732d43620065", "term": "Don't ask, don't tell", "description": "Former policy on gay people serving in the United States military", "question": "During the time immediately after 9/11, was don't ask don't tell still in place?", "answer": true, "facts": [ "Don't ask don't tell was the official military policy for LGBT service members until 2011.", "9/11 Occured on September 11th, 2001." ], "decomposition": [ "Until what year was \"Don't ask; Don't tell.\" in place?", "In what year did 9/11 occur?", "Is #1 more recent than #2?" ], "evidence": [ [ [ [ "Don't ask, don't tell-23" ] ], [ [ "Post-9/11-2" ] ], [ "operation" ] ], [ [ [ "Don't ask, don't tell-1" ] ], [ [ "September 11 attacks-1" ] ], [ "operation" ] ], [ [ [ "Don't ask, don't tell-1" ] ], [ [ "September 11 attacks-1" ] ], [ "operation" ] ] ] }, { "qid": "b2c86a150121834c668c", "term": "Doctor Who", "description": "British science fiction TV series", "question": "Would the 10th doctor enjoy a dish of stuffed pears?", "answer": false, "facts": [ "The 10th Doctor in David Who is played by David Tennant.", "In multiple episodes of the series, the 10th doctor mentions that he hates pears." ], "decomposition": [ "Who is the 10th Doctor?", "Does #1 like pears?" ], "evidence": [ [ [ [ "Tenth Doctor-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Tenth Doctor-1" ] ], [ "no_evidence" ] ], [ [ [ "Tenth Doctor-14" ] ], [ [ "Pear-5" ], "operation" ] ] ] }, { "qid": "bf739e062c0321178c7a", "term": "The Great Gatsby", "description": "1925 novel by F. Scott Fitzgerald", "question": "Will speed reader devour The Great Gatsby before the Raven?", "answer": false, "facts": [ "F. Scott Fitzgerald's The Great Gatsby is 218 pages.", "Edgar Allan Poe's The Raven is 42 pages." ], "decomposition": [ "How many pages does The Great Gatsby have?", "How many pages does The Raven have?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "The Great Gatsby-1" ], "no_evidence" ], [ [ "The Raven-19" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Great Gatsby-1", "The Great Gatsby-24" ], "no_evidence" ], [ [ "The Raven-15" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "f1d755a68c3627fa938a", "term": "Cook (profession)", "description": "occupation involving cooking food", "question": "Can Michael Jordan become a professional cook in America? ", "answer": true, "facts": [ "Based on the American Culinary Federation, the minimum requirements for entering culinary apprenticeships include being 17 years old and having a high school diploma or equivalent.", "Michael Jordan graduated from Laney High School in 1981.", "Michael Jordan was born on February 17, 1963, which makes him 57 years old in 2020." ], "decomposition": [ "What are the minimum requirements to become a professional cook in America?", "Does Michael Jordan satisfy all of #1?" ], "evidence": [ [ [ [ "Cook (profession)-16" ] ], [ [ "Michael Jordan-2" ], "operation" ] ], [ [ [ "Chef-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Chef-17" ] ], [ [ "Michael Jordan-1" ], "no_evidence" ] ] ] }, { "qid": "943cee73791171355fef", "term": "Elizabeth II", "description": "Queen of the United Kingdom and the other Commonwealth realms", "question": "Does Elizabeth II reign over the Balearic Islands?", "answer": false, "facts": [ "Queen Elizabeth II is the monarch of the United Kingdom and its commonwealth", "The Balearic Islands are part of the country of Spain" ], "decomposition": [ "What are all the areas Queen Elizabeth II rules over?", "What country owns the Balearic Islands?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Monarchy of the United Kingdom-1" ] ], [ [ "Balearic Islands-1" ] ], [ "operation" ] ], [ [ [ "Commonwealth realm-1" ] ], [ [ "Balearic Islands-3" ] ], [ "operation" ] ], [ [ [ "Commonwealth realm-1" ] ], [ [ "Balearic Islands-1" ] ], [ "operation" ] ] ] }, { "qid": "47ba7dfed8eef54588a8", "term": "Toyota Prius", "description": "Hybrid electric automobile", "question": "Can a microwave melt a Toyota Prius battery?", "answer": false, "facts": [ "A Toyota Prius uses a 202 V nickel-metal hydride battery.", "Nickel has a melting point of 2651 F.", "Microwaves rarely warm food more than 212 F." ], "decomposition": [ "What kind of battery does a Toyota Prius use?", "What type of material is #1 made out of?", "What is the melting point of #2?", "Can a microwave's temperature reach at least #3?" ], "evidence": [ [ [ [ "Toyota Prius-53" ] ], [ "operation" ], [ [ "Lanthanum-5" ] ], [ [ "Microwave oven-50" ] ] ], [ [ [ "Toyota Prius-53" ] ], [ [ "Nickel–metal hydride battery-1" ] ], [ [ "Nickel–cadmium battery-7" ], "no_evidence" ], [ [ "Microwave oven-45" ], "no_evidence", "operation" ] ], [ [ [ "Toyota Prius (XW20)-4" ] ], [ [ "Toyota Prius (XW20)-4" ] ], [ [ "Nickel-1" ], "no_evidence" ], [ [ "Microwave oven-3" ], "no_evidence" ] ] ] }, { "qid": "1b3dfeb62e7e27c2461a", "term": "Irish mythology", "description": "Pre-Christian Mythology of Ireland", "question": "Did Irish mythology inspire Washington Irving?", "answer": true, "facts": [ "Washington Irving's most famous novel was The Legend of Sleepy Hollow.", "The antagonist of the Legend of Sleepy Hollow, The Headless Horseman, was a man on horseback that was missing his head.", "The Dullahan is a mythological creature in Irish mythology.", "The Dullahan comes from the Irish word Gan Ceann, meaning \"without a head\".", "The Dullahan is depicted as a headless rider, usually on a black horse, who carries their own head in their arm." ], "decomposition": [ "Which Washington Irving novel was the most popular?", "What are the features of the antagonist of #1?", "What are the features of Dullahan from Irish mythology?", "Does #2 considerably match #3?" ], "evidence": [ [ [ [ "The Legend of Sleepy Hollow-1" ] ], [ [ "The Legend of Sleepy Hollow-1" ] ], [ [ "Dullahan-2" ] ], [ "operation" ] ], [ [ [ "The Legend of Sleepy Hollow-1" ], "no_evidence" ], [ [ "The Legend of Sleepy Hollow-2" ] ], [ [ "Dullahan-2" ] ], [ "operation" ] ], [ [ [ "Washington Irving-1" ] ], [ [ "Headless Horseman-1" ] ], [ [ "Dullahan-2" ] ], [ "operation" ] ] ] }, { "qid": "3103007c7b0ca91df965", "term": "Times Square", "description": "Neighborhood in Manhattan in New York City, New York", "question": "At midnight in Times Square on New Years Eve, are you likely to meet people in diapers?", "answer": true, "facts": [ "Times Square holds an enormous gathering on New Year's Eve.", "There are no portable restrooms in Times Square.", "People begin waiting for the midnight event in Times Square before 10 a.m." ], "decomposition": [ "How early do people arrive to wait for the ball to drop on New Years Eve?", "When does the ball drop?", "How much time is between #1 and #2?", "Within #3 hours, would someone need to use the bathroom?" ], "evidence": [ [ [ [ "New Year's Eve-37" ], "no_evidence" ], [ [ "New Year's Eve-37" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "New Year's Eve-39" ], "no_evidence" ], [ [ "Time ball-8" ] ], [ "operation" ], [ [ "Urinary system-8" ] ] ], [ [ [ "Times Square-39" ], "no_evidence" ], [ [ "Times Square Ball-8" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "cdd68b5d76364d35779f", "term": "United States Air Force", "description": "Air and space warfare branch of the United States Armed Forces", "question": "Would a member of the United States Air Force get a discount at Dunkin Donuts?", "answer": true, "facts": [ "The United States Air Force is part of the military.", "Dunkin Donuts offers a military discount. " ], "decomposition": [ "What is the The United States Air Force a branch of?", "What groups of people get a discount at Dunkin Donuts?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "United States Air Force-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "United States Air Force-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "United States Air Force-1" ] ], [ [ "Discounts and allowances-30" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "111dac3a619eaba99951", "term": "Bitcoin", "description": "decentralized cryptocurrency", "question": "Can you put bitcoin in your pocket?", "answer": true, "facts": [ "Bitcoin is a digital crypto currency.", "Bitcoin can be stored in tangible wallets, called hard wallets.", "Cryptocurrency hard wallets are the size of a thumb drive.", "Thumb drives can fit in your pocket." ], "decomposition": [ "What kind of currency is bitcoin?", "What are some common ways of storing #1?", "Which of #2 is a physical object?", "Is #3 small enough to fit in a pocket?" ], "evidence": [ [ [ [ "Bitcoin-1" ] ], [ [ "Bitcoin-44" ] ], [ [ "Bitcoin-47" ] ], [ "no_evidence" ] ], [ [ [ "Cryptocurrency-2" ] ], [ [ "Bitcoin-47" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Bitcoin-1" ] ], [ [ "Bitcoin-46", "Bitcoin-49" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "a73446e82b72b4a99e5b", "term": "Glutamic acid", "description": "amino acid", "question": "Does Masaharu Morimoto rely on glutamic acid?", "answer": true, "facts": [ "Masaharu Morimoto is a Japanese chef", "Japanese cuisine relies on several forms of seaweed as ingredients and flavorings for broth like kombu dashi", "Glutamic acid has been identified as the flavoring component in kombu seaweed" ], "decomposition": [ "What is Masaharu Morimoto's profession?", "What cuisine does #1 make?", "What is a main ingredient in #2?", "Is glutamic acid a flavoring component in #3?" ], "evidence": [ [ [ [ "Masaharu Morimoto-1" ] ], [ [ "Masaharu Morimoto-2" ] ], [ [ "Monosodium glutamate-2" ] ], [ [ "Glutamic acid-3" ] ] ], [ [ [ "Masaharu Morimoto-1" ] ], [ [ "Masaharu Morimoto-1" ] ], [ [ "Rice-8" ] ], [ "no_evidence" ] ], [ [ [ "Masaharu Morimoto-1" ] ], [ [ "Masaharu Morimoto-1" ] ], [ [ "Japanese cuisine-2", "Soy sauce-6" ], "no_evidence" ], [ [ "Glutamic acid-22" ], "operation" ] ] ] }, { "qid": "82059a3aaefe9bb821d9", "term": "Football War", "description": "1969 War between Honduras and El Salvador", "question": "Did the Football War last at least a month?", "answer": false, "facts": [ "The Football War began on July 14 1969.", "It ended on July 20 1969.", "Therefore, it did not even last a whole week." ], "decomposition": [ "How long did the Football War last?", "Is #1 greater than or equal to a month?" ], "evidence": [ [ [ [ "Football War-1" ] ], [ "operation" ] ], [ [ [ "Football War-1" ] ], [ "operation" ] ], [ [ [ "Football War-1" ] ], [ "operation" ] ] ] }, { "qid": "b7ee54ccf00c2de84abb", "term": "Koala", "description": "An arboreal herbivorous marsupial native to Australia.", "question": "Would a nickel fit inside a koala pouch?", "answer": true, "facts": [ "Koala joeys (babies) enter their mother's pouch when they are about 2 to 3 centimeters long.", "An American nickel is 2.12 centimeters in diameter." ], "decomposition": [ "Who usually sits in a koala's pouch?", "What is the size of #1?", "How big is a nickel?", "Is #2 more than #3?" ], "evidence": [ [ [ [ "Koala-2" ] ], [ [ "Koala-25", "Marsupial-26" ] ], [ [ "Nickel (United States coin)-1" ] ], [ "operation" ] ], [ [ [ "Koala-2" ] ], [ [ "Koala-1" ], "no_evidence" ], [ [ "Nickel (United States coin)-1" ] ], [ "operation" ] ], [ [ [ "Koala-2" ] ], [ [ "Koala-23", "Koala-24" ] ], [ [ "Nickel (United States coin)-1" ] ], [ "operation" ] ] ] }, { "qid": "16d8da02bc5e1975a1d9", "term": "Christmas carol", "description": "Song or hymn or carol on the theme of Christmas", "question": "When the shuttle Columbia 11 landed, was it the season for Christmas carols?", "answer": true, "facts": [ "The Columbia 11 shuttle landed on December 10th 1990.", "Christmas is celebrated during the month of December every year." ], "decomposition": [ "What month did the space shuttle Columbia 11 land?", "In what month are Christmas carols typically sung?", "Are #1 and #2 the same answer?" ], "evidence": [ [ [ [ "STS-40-1" ], "no_evidence" ], [ [ "Christmas-1" ] ], [ "operation" ] ], [ [ [ "STS-40-7" ] ], [ [ "Christmas and holiday season-1", "Christmas carol-1" ] ], [ "operation" ] ], [ [ [ "Space Shuttle Columbia-1" ], "no_evidence" ], [ [ "Christmas and holiday season-1", "Christmas carol-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "bb9c5fc3b21e60bf9c7f", "term": "Patronage", "description": "support that one organization or individual bestows to another", "question": "Was Lorenzo de Medici's patronage of Da Vinci exclusive?", "answer": false, "facts": [ "Lorenzo de Medici was a great patron of the arts in Florence and served as a patron for Leonardo Da Vinci.", "Lorenzo de Medici was also a patron of the artist Sandro Boticelli.", "Leonardo Da Vinci had many patrons including Ludovico Sforza and Cesare Borgia." ], "decomposition": [ "What artists did Lorenzo de Medici support?", "How many people are listed in #1?", "Is #2 equal to one?" ], "evidence": [ [ [ [ "Lorenzo de' Medici-1" ] ], [ [ "Lorenzo de' Medici-1" ] ], [ "operation" ] ], [ [ [ "Lorenzo de' Medici-13" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Lorenzo de' Medici-1" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "694ac8d334fc4545ee31", "term": "Kurt Cobain", "description": "American singer, composer, and musician", "question": "Did Kurt Cobain's music genre survive after his death?", "answer": true, "facts": [ "Kurt Cobain was the lead singer of Nirvana.", "Nirvana's music is classified as Grunge rock.", "Kurt Cobain died on April 5, 1994.", "Some of the major Grunge rock bands included Alice in Chains, Pearl Jam, and Soundgarden.", "Alice in Chains and Pearl Jam released their latest albums in 2018 and 2020 respectively." ], "decomposition": [ "What is the musiucal genre associated with both Kurt Cobain and Pearl Jam?", "What year did Kurt Cobain die? ", "Did Pearl Jam release a #1 genre album after #2?", "Is #3 yes?" ], "evidence": [ [ [ [ "Grunge-2" ] ], [ [ "Kurt Cobain-1" ] ], [ [ "No Code-1" ] ], [ [ "No Code-1" ] ] ], [ [ [ "Kurt Cobain-2", "Pearl Jam-2" ] ], [ [ "Kurt Cobain-55" ] ], [ [ "Vitalogy-9" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Grunge-2" ] ], [ [ "Kurt Cobain-1" ] ], [ [ "Lightning Bolt (Pearl Jam album)-11" ] ], [ "operation" ] ] ] }, { "qid": "2fa554b4794001df3629", "term": "Forbidden City", "description": "Art museum, Imperial Palace, Historic site in Beijing, China", "question": "Are people banned from entering the Forbidden City?", "answer": false, "facts": [ "The Forbidden City is a tourist attraction.", "Tourist attractions allow people to enter." ], "decomposition": [ "Is the Forbidden City a tourist attraction?", "Are tourist attractions open to the public?", "Are the answers to #1 and #2 the same?" ], "evidence": [ [ [ [ "History of the Forbidden City-12" ] ], [ [ "Tourist attraction-1" ] ], [ "operation" ] ], [ [ [ "Forbidden City-3" ] ], [ [ "Tourist attraction-1" ] ], [ "operation" ] ], [ [ [ "Forbidden City-3" ] ], [ [ "Tourist attraction-1" ] ], [ "operation" ] ] ] }, { "qid": "7e552e58565771a2008c", "term": "Noah's Ark", "description": "the vessel in the Genesis flood narrative", "question": "Were there eight humans on Noah's Ark?", "answer": true, "facts": [ "Noah only took his family aboard the Ark.", "Noah brought his wife, three sons, and his sons' wives.", "Four couples lived on the Ark, eight total people." ], "decomposition": [ "How many people entered Noah's Ark?", "Is #1 greater than or equal to eight?" ], "evidence": [ [ [ [ "Wives aboard Noah's Ark-6" ] ], [ [ "Wives aboard Noah's Ark-6" ] ] ], [ [ [ "Wives aboard Noah's Ark-6" ] ], [ "operation" ] ], [ [ [ "Wives aboard Noah's Ark-1" ] ], [ "operation" ] ] ] }, { "qid": "ea8a51f7a2d1d21096a5", "term": "Nikola Tesla", "description": "Serbian American inventor", "question": "Was Nikola Tesla's home country involved in the American Civil War?", "answer": false, "facts": [ "Nikola Tesla was born in the Austrian Empire", "The American Civil War was a domestic American conflict" ], "decomposition": [ "What country was Nikola Tesla born in?", "What countries were involved in the American Civil War?", "Is #1 listed in #2?" ], "evidence": [ [ [ [ "Nikola Tesla-5" ] ], [ [ "American Civil War-1" ] ], [ "operation" ] ], [ [ [ "Nikola Tesla-2" ] ], [ [ "American Civil War-1", "American Civil War-7" ] ], [ "operation" ] ], [ [ [ "Nikola Tesla-5" ] ], [ [ "American Civil War-1" ] ], [ "operation" ] ] ] }, { "qid": "f3e238989015dd72bfda", "term": "Queen Elizabeth The Queen Mother", "description": "Queen consort of King George VI, mother of Queen Elizabeth II", "question": "Did Queen Elizabeth The Queen Mother and her daughter share name with Tudor queen?", "answer": true, "facts": [ "Queen Elizabeth the Queen Mother gave birth to Queen Elizabeth II in 1926.", "The Tudor dynasty had a number of Queens including: Mary I of England, Elizabeth I of England, and Margaret Tudor, Queen of Scots." ], "decomposition": [ "Which name did the Queen Mother and Queen Elizabeth have in common?", "What are the names of some queens from the Tudor dynasty?", "Is #1 included in any of #2?" ], "evidence": [ [ [ [ "Elizabeth II-1", "Queen Elizabeth The Queen Mother-1" ] ], [ [ "House of Tudor-1" ] ], [ "operation" ] ], [ [ [ "Queen Elizabeth The Queen Mother-1" ] ], [ [ "Elizabeth I of England-1", "Mary I of England-1" ] ], [ "operation" ] ], [ [ [ "Queen Elizabeth The Queen Mother-1" ] ], [ [ "House of Tudor-1" ] ], [ "operation" ] ] ] }, { "qid": "4bb58d0456bfea654c0f", "term": "Compact disc", "description": "Optical disc for storage and playback of digital audio", "question": "Did John Lennon listen to Compact discs?", "answer": false, "facts": [ "The Compact disc was released in 1982 by Philips and Sony.", "John Lennon was killed on December 8, 1980." ], "decomposition": [ "When were Compact Discs first available for use?", "When did John Lennon die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "Compact disc-1" ] ], [ [ "John Lennon-1" ] ], [ "operation" ] ], [ [ [ "Compact disc-1" ] ], [ [ "John Lennon-1" ] ], [ "operation" ] ], [ [ [ "Compact disc-1" ] ], [ [ "John Lennon-36" ] ], [ "operation" ] ] ] }, { "qid": "a0362ec2c3b195e02315", "term": "Last Supper", "description": "Final meal that, in the Gospel accounts, Jesus shared with his apostles in Jerusalem before his crucifixion", "question": "Is anyone at the Last Supper celebrated in Islam?", "answer": true, "facts": [ "The Last Supper was a meal between Jesus and his twelve disciples in Christianity.", "In Islam, Jesus is one of many revered prophets.", "In Islam, Jesus returns in a Second Coming to fight the \"False Messiah\" and establish peace on earth." ], "decomposition": [ "Who was present at the Last Supper?", "Are any of #1 celebrated in Islam?" ], "evidence": [ [ [ [ "Last Supper-1" ] ], [ [ "Jesus-4" ], "operation" ] ], [ [ [ "Last Supper-1" ] ], [ [ "Jesus in Islam-2" ] ] ], [ [ [ "Apostles-1", "Last Supper-1" ] ], [ [ "Disciples of Jesus in Islam-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "c7a1cb4992a5eafa99ec", "term": "Bengal cat", "description": "Breed of cat", "question": "Would a Bengal cat be afraid of catching a fish?", "answer": false, "facts": [ "Fish live in water. ", "Many Bengal owners say that their Bengal naturally retrieves items.", "Bengal cats often enjoy playing in water." ], "decomposition": [ "Where do fish live?", "What do bengal cats naturally do when they see something?", "Would a bengal cat be able to #2 from #1?" ], "evidence": [ [ [ [ "Fish-1" ] ], [ [ "Bengal cat-20" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Fish-1" ] ], [ [ "Bengal cat-20" ] ], [ [ "Bengal cat-20" ], "operation" ] ], [ [ [ "Fish-5" ] ], [ [ "Bengal cat-20", "Bengal cat-21" ] ], [ "operation" ] ] ] }, { "qid": "b4e09d5ac3429e358bd8", "term": "Tumulus", "description": "Mound of earth and stones raised over graves", "question": "Has a tumulus been discovered on Mars?", "answer": false, "facts": [ "A tumulus is a mound of earth and stones raised over a grave or graves.", "A grave is a location where a dead body (typically that of a human, although sometimes that of an animal) is buried.", "Mars has only been explored by unmanned spacecraft.", "Evidence suggests that the planet was once significantly more habitable than it is today, but whether living organisms ever existed there remains unknown. " ], "decomposition": [ "What do tumulus cover?", "What do #1 contain?", "What is a previous condition for something to be considered now #2?", "Have things with the characteristic of #3 been to Mars?" ], "evidence": [ [ [ [ "Tumulus-1" ] ], [ [ "Grave-1" ] ], [ [ "Death-1" ] ], [ [ "Mars-58" ], "operation" ] ], [ [ [ "Tumulus-1" ] ], [ [ "Tumulus-6" ], "no_evidence" ], [ [ "Funeral-1" ], "no_evidence" ], [ [ "Life on Mars-1" ], "operation" ] ], [ [ [ "Tumulus-1" ] ], [ [ "Tumulus-1" ] ], [ [ "Grave-1" ] ], [ [ "Mars-20", "Mars-85" ] ] ] ] }, { "qid": "a7b23152e978745fa82b", "term": "League of Legends", "description": "Multiplayer online battle arena video game", "question": "Could Cosmic Girls play League of Legends alone?", "answer": true, "facts": [ "Cosmic Girls is a 13 member kpop group", "League of Legends is a video game requiring two teams of five players each" ], "decomposition": [ "How many players are needed for a League of Legends match?", "How many people are in the group \"Cosmic Girls\"?", "Is #2 greater than or equal to #1?" ], "evidence": [ [ [ [ "League of Legends: Wild Rift-2" ], "no_evidence" ], [ [ "Cosmic Girls-1" ] ], [ "operation" ] ], [ [ [ "League of Legends: Wild Rift-4" ] ], [ [ "Cosmic Girls-1" ] ], [ "operation" ] ], [ [ [ "League of Legends-10" ] ], [ [ "Cosmic Girls-1" ] ], [ "operation" ] ] ] }, { "qid": "5bed196de6b59f2adf64", "term": "Torso", "description": "the central part of the living body", "question": "is the brain located in the torso?", "answer": false, "facts": [ "The brain is located inside the head.", "The head is located on top of the torso. ", "The torso contains the heart, lungs, and stomach." ], "decomposition": [ "What part of the body stores the brain?", "Is #1 part of the torso?" ], "evidence": [ [ [ [ "Brain-1" ] ], [ [ "Torso-1" ], "operation" ] ], [ [ [ "Skull-1" ] ], [ [ "Torso-1" ] ] ], [ [ [ "Brain-1" ] ], [ [ "Head-3" ] ] ] ] }, { "qid": "3d55b2ce338f4e2cdcf4", "term": "Augustus", "description": "First emperor of the Roman Empire", "question": "Was Augustus his real name?", "answer": false, "facts": [ "Augustus was given the name Gaius Octavius at birth.", "After he was adopted by his uncle Julius Caesar, he took the name Gaius Iulius Caesar.", "He took the name Augustus upon the breaking of the ruling Triumvirate and becoming Emperor." ], "decomposition": [ "What name did Augustus have when he was born?", "Is #1 identical to Augustus?" ], "evidence": [ [ [ [ "Augustus-2" ] ], [ "operation" ] ], [ [ [ "Augustus-2" ] ], [ "operation" ] ], [ [ [ "Augustus-2" ] ], [ "operation" ] ] ] }, { "qid": "42100a5a2f10584fb923", "term": "Spice Girls", "description": "British girl group", "question": "Were the Spice Girls inspired by Little Mix?", "answer": false, "facts": [ "The Spice Girls were formed in 1994 and mainly active during the late 1990s", "Little Mix was formed in 2011" ], "decomposition": [ "When was the English pop group Spice Girls formed?", "When was the British girl group Little Mix formed?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Spice Girls-1" ] ], [ [ "Little Mix-1" ] ], [ "operation" ] ], [ [ [ "Spice Girls-1" ] ], [ [ "Little Mix-1" ] ], [ "operation" ] ], [ [ [ "Spice Girls-1" ] ], [ [ "Little Mix-1" ] ], [ "operation" ] ] ] }, { "qid": "054ae21cacd3669f6694", "term": "Elizabeth II", "description": "Queen of the United Kingdom and the other Commonwealth realms", "question": "Does the actress who played Elizabeth II speak fluent Arabic?", "answer": false, "facts": [ "Elizabeth II was portrayed by Helen Mirren. ", "Helen Mirren doesn't speak fluent Arabic. " ], "decomposition": [ "Which movie has portrayed Queen Elizabeth II of the United Kingdom?", "Who acted as Queen Elizabeth II in #1?", "What is #2's nationality?", "Do they speak fluent Arabic in #3?" ], "evidence": [ [ [ [ "The Queen (2006 film)-2" ] ], [ [ "The Queen (2006 film)-2" ] ], [ [ "Helen Mirren-1", "Helen Mirren-5" ] ], [ [ "English language in England-1" ] ] ], [ [ [ "The Queen (2006 film)-4" ] ], [ [ "The Queen (2006 film)-11" ] ], [ [ "Helen Mirren-1" ] ], [ [ "England-2" ], "operation" ] ], [ [ [ "Elizabeth (film)-1" ] ], [ [ "Elizabeth (film)-1" ] ], [ [ "Cate Blanchett-5" ] ], [ [ "Arab world-1" ], "no_evidence" ] ] ] }, { "qid": "df538a945f0d1c012cd2", "term": "Super Mario", "description": "platform video game series from Nintendo's Mario franchise", "question": "Does Super Mario require electricity to play?", "answer": true, "facts": [ "Super Mario is a video game.", "Video games are played on electronic devices.", "Electronic devices require electricity to function." ], "decomposition": [ "What is Super Mario?", "Where are #1 played?", "Do #2 require electricity?" ], "evidence": [ [ [ [ "Super Mario-1" ] ], [ [ "Nintendo video game consoles-1" ] ], [ [ "Nintendo video game consoles-1" ] ] ], [ [ [ "Super Mario-1" ] ], [ [ "Nintendo Entertainment System-2" ] ], [ [ "Nintendo Entertainment System-12" ], "operation" ] ], [ [ [ "Super Mario-1" ] ], [ [ "Super Mario-1", "Video game console-3" ] ], [ [ "Video game console-3" ] ] ] ] }, { "qid": "ed99d136038b850040f5", "term": "Stroke", "description": "Medical condition where poor blood flow to the brain causes cell death", "question": "Is it impossible to tell if someone is having a stroke?", "answer": false, "facts": [ "Strokes have numerous physical symptoms including facial unevenness and trouble walking.", "Strokes have behavioral symptoms including slurred speech, disorientation, and trouble understanding speech." ], "decomposition": [ "What are the symptoms of a stroke?", "Are all of #1 hidden from physical observation?" ], "evidence": [ [ [ [ "Stroke-1" ] ], [ "operation" ] ], [ [ [ "FAST (stroke)-2" ] ], [ "no_evidence" ] ], [ [ [ "Stroke-15" ] ], [ [ "Stroke-15" ], "no_evidence" ] ] ] }, { "qid": "ba8d04adf8848e0d28a7", "term": "Christopher Reeve", "description": "20th-century American actor, director, producer and screenwriter", "question": "If he were poor, would Christopher Reeve have lived?", "answer": false, "facts": [ "Christopher Reeve suffered a serious spinal cord injury that left him a quadriplegic. ", "Christopher Reeve required a portable ventilator after his injury.", "At one point, Christopher Reeve's treatment was costing $400,000 yearly." ], "decomposition": [ "What injury did Christopher Reeve suffer from?", "What equipment is required for someone with #1 in order to live?", "What would be the cost of #2?", "Would a poor person be able to afford #3?" ], "evidence": [ [ [ [ "Christopher Reeve-3" ] ], [ [ "Mobility aid-1" ] ], [ [ "Disability-68" ] ], [ [ "Poverty-27" ], "operation" ] ], [ [ [ "Christopher Reeve-55" ] ], [ [ "Christopher Reeve-3" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Christopher Reeve-57" ], "operation" ], [ [ "Christopher Reeve-58" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "d47c38cb1ab436fe2f64", "term": "2008 Summer Olympics", "description": "Games of the XXIX Olympiad, held in Beijing in 2008", "question": "Could all of the 2008 Summer Olympics women find a hookup athlete partner?", "answer": true, "facts": [ "The 2008 Summer Olympics had 4,637 women compete.", "The 2008 Summer Olympics had 6,305 men compete." ], "decomposition": [ "How many women participated in the 2008 Summer Olympics?", "How many men participated in the 2008 Summer Olympics?", "Is #2 at least equal to #1?" ], "evidence": [ [ [ [ "2008 Summer Olympics-1", "2008 Summer Olympics-2" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Athletics at the 2008 Summer Olympics-2" ], "no_evidence" ], [ [ "Athletics at the 2008 Summer Olympics-2" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "8675efc5b928149bd48f", "term": "Chinese New Year", "description": "traditional Chinese holiday", "question": "Are any mollusks on Chinese New Year calendar?", "answer": false, "facts": [ "A mollusk is an invertebrate animal such as snails, slugs, mussles, and octopuses.", "The animals on Chinese New Year calendar are: rat, ox, tiger, rabbit, dragon, snake, horse, goat, monkey, rooster, dog, and pig." ], "decomposition": [ "What are the animals on the Chinese New Year calendar?", "Is a mollusk part of #1?" ], "evidence": [ [ [ [ "Chinese zodiac-4", "Chinese zodiac-5" ] ], [ [ "Marine invertebrates-26" ], "operation" ] ], [ [ [ "Chinese New Year-6", "Chinese zodiac-17", "Chinese zodiac-18", "Chinese zodiac-19", "Chinese zodiac-20" ] ], [ "operation" ] ], [ [ [ "Chinese zodiac-5" ] ], [ "operation" ] ] ] }, { "qid": "93029b0b5b4f19ab150a", "term": "Skull", "description": "bony structure that forms the skeleton of head in most vertebrates", "question": "Can an adult human skull hypothetically pass through the birth canal?", "answer": true, "facts": [ "The largest baby ever born was 22 pounds. ", "The average human skull weighs between 10 and 11 pounds." ], "decomposition": [ "How big is the average baby ever delivered vaginally?", "How big is the average adult skull?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Childbirth-29" ], "no_evidence" ], [ [ "Human head-18" ] ], [ [ "Obstetrical dilemma-14" ], "operation" ] ], [ [ [ "Infant-9" ] ], [ [ "Human head-18" ] ], [ [ "Human head-18", "Infant-9" ] ] ], [ [ [ "Infant-5", "Infant-7" ] ], [ [ "Skull-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7398e6bc3daa29b454c9", "term": "Elijah Cummings", "description": "U.S. Representative from Maryland", "question": "Will Elijah Cummings cast a vote in the 2020 presidential election?", "answer": false, "facts": [ "Elijah Cummings died on October 17th, 2019.", "It is not possible, or legal, for a dead person to cast a vote in a presidential election." ], "decomposition": [ "Are deceased people able and allowed to vote in elections?", "Is Elijah Cummings deceased?", "Are the answers to #1 and #2 the same?" ], "evidence": [ [ [ [ "Voting rights in the United States-2" ], "no_evidence" ], [ [ "Elijah Cummings-1" ] ], [ "operation" ] ], [ [ "no_evidence", "operation" ], [ [ "Elijah Cummings-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Voter impersonation (United States)-1", "Voter impersonation (United States)-9" ] ], [ [ "Elijah Cummings-24" ] ], [ "operation" ] ] ] }, { "qid": "a1493f4a007aa2d7ca24", "term": "Super Mario", "description": "platform video game series from Nintendo's Mario franchise", "question": "Does Super Mario mainly focus on a man in green?", "answer": false, "facts": [ "Super Mario follows the adventures of a plumber named Mario.", "Mario wears a red shirt and plumber's overalls." ], "decomposition": [ "Who is the main character of the game Super Mario?", "Does #1 wear green?" ], "evidence": [ [ [ [ "Super Mario-1" ] ], [ [ "Mario-29" ] ] ], [ [ [ "Super Mario-2" ] ], [ [ "Mario-6" ], "operation" ] ], [ [ [ "Super Mario-1" ] ], [ [ "Mario-6" ] ] ] ] }, { "qid": "ccb63564094c6ff09a14", "term": "Julia Roberts", "description": "American actress and producer", "question": "Does Julia Roberts lose the prolific acting contest in her family?", "answer": true, "facts": [ "As of May 2020, Julia Roberts has acted in 64 projects.", "Julia Roberts has a brother in acting, Eric Roberts, and a niece in acting, Emma Roberts.", "As of May 2020, Eric Roberts has acted in 577 projects." ], "decomposition": [ "Who is Julia Roberts brother?", "Is #1 an actor?", "How many projects has #2 appeared in?", "How many projects has Julia Roberts acted in?", "Is #3 larger than #4?" ], "evidence": [ [ [ [ "Julia Roberts-4" ] ], [ [ "Eric Roberts-1" ] ], [ [ "Eric Roberts-2" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Eric Roberts-1" ] ], [ [ "Eric Roberts-1" ] ], [ [ "Eric Roberts filmography-4" ], "no_evidence" ], [ [ "Julia Roberts-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Eric Roberts-3" ] ], [ [ "Eric Roberts-1" ] ], [ [ "Eric Roberts-2" ] ], [ [ "Julia Roberts filmography-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "ee5be0cf3e643eb6b699", "term": "Olive oil", "description": "liquid fat extracted by pressing olives", "question": "Can olive oil kill rabies?", "answer": false, "facts": [ "Olive oil is a fat used in cooking.", "Olive oil is made up of palmitic acid which is a weak acid found in plants and animals.", "Rabies is a disease from an infected animal bite.", "Rabies is treated by a shot containing immunoglobuin, a protein that is found in plasma cells.", "Plasma cells are found in the bone marrow of humans." ], "decomposition": [ "What is used to treat rabies?", "What is olive oil made of?", "Are any of #2 present in #1?" ], "evidence": [ [ [ [ "Rabies-31" ] ], [ [ "Olive oil-1" ] ], [ "operation" ] ], [ [ [ "Rabies-30", "Rabies-33" ] ], [ [ "Olive oil-1" ] ], [ "operation" ] ], [ [ [ "Rabies-30" ] ], [ [ "Olive oil-3" ] ], [ "operation" ] ] ] }, { "qid": "3fc25e1cccc76bb79a68", "term": "Ludacris", "description": "American rapper and actor", "question": "Does Ludacris perform classical music?", "answer": false, "facts": [ "Ludacris is a rap artist.", "Rap and hip hop music are not related to classical music." ], "decomposition": [ "Which kind of music does Ludacris perform?", "Is #1 the same as classical music?" ], "evidence": [ [ [ [ "Ludacris-6" ] ], [ "operation" ] ], [ [ [ "Ludacris-6" ] ], [ "operation" ] ], [ [ [ "Ludacris-1" ] ], [ "operation" ] ] ] }, { "qid": "960c1546ad5913f5c302", "term": "U2", "description": "Four-member Irish rock band, from Dublin", "question": "Did U2 play a concert at the Polo Grounds?", "answer": false, "facts": [ "U2 is an Irish rock band that formed in 1976.", "The Polo Grounds was a sports stadium that was demolished in 1964." ], "decomposition": [ "When was U2 (Irish rock band) formed?", "When was the Polo Grounds demolished?", "Is #1 before #2?" ], "evidence": [ [ [ [ "U2-1" ] ], [ [ "Polo Grounds-4" ] ], [ "operation" ] ], [ [ [ "U2-1" ] ], [ [ "Polo Grounds-1" ] ], [ "operation" ] ], [ [ [ "U2-1" ] ], [ [ "Polo Grounds-32" ] ], [ "operation" ] ] ] }, { "qid": "5099d89884624a70fff7", "term": "Benito Mussolini", "description": "Fascist leader of Italy", "question": "Did Benito Mussolini wear bigger shoes than Hafþór Björnsson?", "answer": false, "facts": [ "Benito Mussolini was 5' 6​1⁄2\" tall.", "Hafþór Björnsson is 6 feet 9 inches tall.", "Shoe size increases proportionally as height increases." ], "decomposition": [ "How tall was Benito Mussolini?", "How tall is Hafþór Björnsson?", "Is #2 smaller than #1?" ], "evidence": [ [ [ [ "Benito Mussolini-8" ], "no_evidence" ], [ [ "Hafþór Júlíus Björnsson-5" ] ], [ "operation" ] ], [ [ [ "Benito Mussolini-1" ], "no_evidence" ], [ [ "Hafþór Júlíus Björnsson-19" ] ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "Hafþór Júlíus Björnsson-5" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7a91f2a0a759f5d6581f", "term": "Carl Friedrich Gauss", "description": "German mathematician and physicist", "question": "Did Gauss have a normal brain structure?", "answer": false, "facts": [ "When Gauss died in 1855, his brain was preserved for study.", "Dr. Rudolf Wagner, who studied the brain, found the mass to be slightly above average, and found highly developed convolutions on the brain." ], "decomposition": [ "What was the outcome of studies carried out on Gauss' brain after his death?", "Did #1 indicate that his brain was the same as the average human brain?" ], "evidence": [ [ [ [ "Carl Friedrich Gauss-13" ] ], [ "operation" ] ], [ [ [ "Carl Friedrich Gauss-13" ] ], [ [ "Carl Friedrich Gauss-13" ] ] ], [ [ [ "Carl Friedrich Gauss-13" ] ], [ "operation" ] ] ] }, { "qid": "53115231af1cbf1ad33a", "term": "Sudoku", "description": "Logic-based number-placement puzzle", "question": "Could an infant solve a sudoku puzzle?", "answer": false, "facts": [ "Solving a sudoku puzzle requires the use of logic and a basic understanding of numbers.", "Infants are too young to understand the numerical system involved in sudoku." ], "decomposition": [ "What is the skill set of an infant?", "What skills are required for sudoku?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Infant-2" ], "no_evidence" ], [ [ "Sudoku-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Sudoku-11" ] ], [ [ "Logic puzzle-1" ] ], [ [ "Sudoku-1" ] ] ], [ [ [ "Infant cognitive development-12" ] ], [ [ "Sudoku code-6" ] ], [ [ "Infant cognitive development-12", "Sudoku code-6" ], "operation" ] ] ] }, { "qid": "280da2485d6504022d34", "term": "Eid al-Fitr", "description": "Islamic holiday that marks the end of Ramadan", "question": "Is Eid al-Fitr holiday inappropriate to watch entire US Office?", "answer": true, "facts": [ "Eid al-Fitr is an Islamic holiday dedicated to prayer.", "Eid al_fitr lasts from 1 to 3 days depending on the country.", "The entire US Office tv show would take 4 days, three hours, and 30 minutes to watch." ], "decomposition": [ "How long does Eid al-Fitr last?", "What is the run time of the Office?", "Is #2 longer than #1?" ], "evidence": [ [ [ [ "Eid al-Fitr-4" ] ], [ [ "The Office (American TV series)-2" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Eid al-Fitr-4" ] ], [ [ "Finale (The Office)-1", "The Office (American TV series)-8" ] ], [ "operation" ] ], [ [ [ "Eid al-Fitr-1" ] ], [ [ "The Office-1" ] ], [ "operation" ] ] ] }, { "qid": "2ee33d6353893e4bd69c", "term": "Japan Airlines", "description": "airline headquartered in Tokyo, Japan", "question": "Are any of the destinations of Japan Airlines former Axis Powers?", "answer": true, "facts": [ "Japan Airlines flies all over the world to places such as Germany, Ireland, and Australia.", "The Axis Powers were the countries that fought against the Allies during World War II.", "Axis Powers included countries such as Germany, Italy, and Japan." ], "decomposition": [ "Which countries does Japan Airlines fly to?", "Which counties were part of the Axis powers?", "Are there any similarities or overlap between #1 and #2?" ], "evidence": [ [ [ [ "Japan Airlines-2" ], "no_evidence" ], [ [ "Axis powers-24" ] ], [ "no_evidence" ] ], [ [ [ "Japan Airlines-2", "Japan Airlines-84" ], "no_evidence" ], [ [ "Axis powers-2" ] ], [ "operation" ] ], [ [ [ "Japan Airlines-63", "Japan Airlines-84" ], "no_evidence" ], [ [ "Axis powers-1", "Axis powers-225" ] ], [ "operation" ] ] ] }, { "qid": "bd9497b8ba9a2085a775", "term": "WWE Raw", "description": "WWE television program", "question": "Would a viewer of Monday Night Football be able to catch WWE Raw during commercial breaks?", "answer": true, "facts": [ "Monday Night Football begins at 8pm EST on Monday nights during the NFL season", "WWE Raw airs on Monday nights between 8pm and 11pm EST" ], "decomposition": [ "When does Monday Night Football air?", "When does WWE Raw air?", "Is there and overlap between #1 and #2?" ], "evidence": [ [ [ [ "Monday Night Football-68" ] ], [ [ "WWE Raw-1" ] ], [ "operation" ] ], [ [ [ "Monday Night Football-7" ], "operation" ], [ [ "WWE Raw-59" ], "operation" ], [ "operation" ] ], [ [ [ "Monday Night Football-68" ] ], [ [ "WWE Raw-1" ] ], [ "operation" ] ] ] }, { "qid": "4543a65a3af216dc6acf", "term": "Voyager 2", "description": "Space probe and the second-farthest man-made object from Earth", "question": "Would Jon Brower Minnoch break a chair before Voyager 2 launch mass?", "answer": false, "facts": [ "Jon Brower Minnoch was the heaviest human being ever recorded.", "At his peak weight, Jon Brower Minnoch weighed almost 1.400 lb.", "The launch mass of Voyager 2 was 1,820 lb." ], "decomposition": [ "What was Jon Brower Minnoch's heaviest weight?", "What was the Voyager 2 launch mass?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Jon Brower Minnoch-1" ] ], [ [ "Voyager 2-9" ] ], [ "operation" ] ], [ [ [ "Jon Brower Minnoch-1" ] ], [ [ "Voyager 2-9" ] ], [ "operation" ] ], [ [ [ "Jon Brower Minnoch-5" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "e3253bcbb5125e2d29d9", "term": "Learning disability", "description": "Range of neurodevelopmental conditions", "question": "Do placozoa get learning disabilities?", "answer": false, "facts": [ "Learning disabilities are neurodevelopmental conditions afflicting a portion of the human population", "Neurodevelopmental conditions affect the nervous system", "Placozoa are multicellular microscopic organisms which do not have a nervous system" ], "decomposition": [ "What bodily system do learning disabilities affect?", "Do placozoa possess #1?" ], "evidence": [ [ [ [ "Learning disability-1" ] ], [ [ "Placozoa-7" ], "operation" ] ], [ [ [ "Learning disability-1", "Learning disability-3" ], "no_evidence" ], [ [ "Placozoa-1" ], "operation" ] ], [ [ [ "Learning disability-5" ] ], [ [ "Placozoa-7" ], "operation" ] ] ] }, { "qid": "fab08cce89ad00ba8339", "term": "Christmas carol", "description": "Song or hymn or carol on the theme of Christmas", "question": "Are multiple Christmas Carol's named after Saints?", "answer": true, "facts": [ "The Christmas Carol Good King Wenceslas is based on the Bohemian king Wenceslaus I.", "Wenceslaus I was named a Saint in the Catholic Church and has a feast day of September 28.", "Jolly Old Saint Nicholas is a Christmas Carol named after an early Christian bishop who became a Saint." ], "decomposition": [ "Who was Christmas carol 'Good King Wenceslas' about?", "Who was Christmas carol 'Jolly Old Saint Nicholas' named for?", "Are #1 and #2 Saints?" ], "evidence": [ [ [ [ "Good King Wenceslas-3" ] ], [ [ "Jolly Old Saint Nicholas-1" ] ], [ [ "Good King Wenceslas-3", "Jolly Old Saint Nicholas-1" ] ] ], [ [ [ "Good King Wenceslas-1" ] ], [ [ "Saint Nicholas-1" ] ], [ [ "Saint Nicholas-1", "Wenceslaus I, Duke of Bohemia-2" ] ] ], [ [ [ "Good King Wenceslas-1" ] ], [ [ "Saint Nicholas-1" ] ], [ "operation" ] ] ] }, { "qid": "9594919b1927f406a344", "term": "Bobby Jindal", "description": "American politician", "question": "Would Bobby Jindal's high school mascot eat kibble?", "answer": true, "facts": [ "Bobby Jindal attended Baton Rouge Magnet High School.", "Baton Rouge Magnet High School's mascot is the bulldog.", "Kibble is another name for the dry form of dog/pet food." ], "decomposition": [ "Which school did Bobby Jindal attend?", "What is #1's mascot?", "What does kibble refer to?", "Would a #2 eat #3?" ], "evidence": [ [ [ [ "Bobby Jindal-7" ] ], [ "no_evidence" ], [ [ "Kibbles 'n Bits-1" ] ], [ "operation" ] ], [ [ [ "Bobby Jindal-7" ] ], [ [ "Baton Rouge Magnet High School-1" ], "no_evidence" ], [ [ "Dog food-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Bobby Jindal-7" ] ], [ "no_evidence" ], [ [ "Dog food-16" ] ], [ [ "Dog food-16" ], "no_evidence" ] ] ] }, { "qid": "94357c05cd9525c7b35a", "term": "New Testament", "description": "Second division of the Christian biblical canon", "question": "Are thetan levels found in the New Testament?", "answer": false, "facts": [ "The New Testament is a collection of texts related to Christianity.", "Thetan levels are a term used in the body of religious beliefs and practices known as Scientology." ], "decomposition": [ "Which religion are thetan levels related to?", "The New Testament is a part of which Holy Book?", "Which religion is #2 associated with?", "Is #1 the same as #3?" ], "evidence": [ [ [ [ "Thetan-1" ] ], [ [ "Bible-2" ] ], [ [ "Christian biblical canons-1" ] ], [ [ "Scientology beliefs and practices-35" ], "operation" ] ], [ [ [ "Thetan-6" ] ], [ [ "Historicity of the Bible-37" ] ], [ [ "New Testament-137" ] ], [ [ "New Testament-137", "Scientology-40" ], "operation" ] ], [ [ [ "Thetan-1" ] ], [ [ "New Testament-1" ] ], [ [ "New Testament-1" ] ], [ "operation" ] ] ] }, { "qid": "73a229ef0fd252aaeb39", "term": "Honey badger", "description": "species of mammal", "question": "Would a honey badger's dentures be different from a wolverine's?", "answer": true, "facts": [ "Dentures are false teeth that resemble the wearer's natural teeth", "Honey badgers and wolverines are physically very similar, but they can be differentiated by their dentition." ], "decomposition": [ "What subfamily does the honey badger belong to?", "What subfamily does the wolverine belong to?", "What helps distinguish #1 from #2?", "Does #3 include dental shape?" ], "evidence": [ [ [ [ "Honey badger-2" ] ], [ [ "Wolverine-1" ] ], [ [ "Honey badger-15" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Honey badger-2" ] ], [ [ "Wolverine-1" ] ], [ [ "Honey badger-15" ] ], [ [ "Wolverine-9" ], "operation" ] ], [ [ [ "Honey badger-6" ] ], [ [ "Wolverine-1" ], "no_evidence" ], [ [ "Honey badger-15", "Wolverine-14" ], "no_evidence" ], [ [ "Honey badger-15" ], "no_evidence", "operation" ] ] ] }, { "qid": "cea676f4afc9e1051166", "term": "United States Secretary of State", "description": "U.S. cabinet member and head of the U.S. State Department", "question": "Does the United States Secretary of State answer the phones for the White House?", "answer": false, "facts": [ "The role of United States Secretary of State carries out the President's foreign policy.", "The White House has multiple phone lines managed by multiple people." ], "decomposition": [ "What are the duties of the US Secretary of State?", "Are answering phones part of #1?" ], "evidence": [ [ [ [ "United States Secretary of State-4" ] ], [ [ "United States Secretary of State-4" ] ] ], [ [ [ "United States Secretary of State-4" ] ], [ "operation" ] ], [ [ [ "United States Secretary of State-4" ] ], [ [ "United States Secretary of State-4" ] ] ] ] }, { "qid": "c22eb5bb5ca50900abc4", "term": "Green", "description": "Additive primary color visible between blue and yellow", "question": "Did the color green help Theodor Geisel become famous?", "answer": true, "facts": [ "Green is a color made by mixing blue and yellow", "Theodor Geisel is the American writer known as Dr. Seuss", "One of Dr. Seuss's most famous books and lines is Green Eggs and Ham" ], "decomposition": [ "What was Theodor Geisel's pen name?", "Which books authored by #1 made him famous?", "Are any of #2 particularly related to the color green?" ], "evidence": [ [ [ [ "Geisel Award-2" ] ], [ [ "Green Eggs and Ham-5" ] ], [ [ "Green Eggs and Ham-5" ] ] ], [ [ [ "Dr. Seuss-1" ] ], [ [ "Dr. Seuss-3" ] ], [ [ "Green Eggs and Ham-3" ], "operation" ] ], [ [ [ "Dr. Seuss-1" ] ], [ [ "Dr. Seuss-3" ] ], [ "operation" ] ] ] }, { "qid": "3b70f1178fdb45ad2a24", "term": "Ariana Grande", "description": "American singer, songwriter, and actress", "question": "Does Ariana Grande's signature style combine comfort items and high fashion?", "answer": true, "facts": [ "Ariana Grande's signature style is a long, over-sized pullover sweater with thigh high heels.", "Oversized pullovers are considered lounge wear, for relaxing at home in. ", "High heels are associated with high style. " ], "decomposition": [ "What is Ariana Grande's signature top?", "What is Ariana Grande's signature shoewear?", "What type of clothing is #1 considered?", "Is #3 considered a comfort and item and is #2 considered a high style item?" ], "evidence": [ [ [ [ "Ariana Grande-34" ], "no_evidence" ], [ [ "Ariana Grande-34" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Ariana Grande-34" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Ariana Grande-34" ] ], [ [ "Ariana Grande-34" ] ], [ [ "Crop top-3" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "2eba35467e49d7e9041d", "term": "Richard III of England", "description": "15th-century King of England", "question": "Did Richard III's father have greater longevity than him?", "answer": true, "facts": [ "Richard III lived until the age of 32.", "Richard, Duke of York, Richard III's father, lived until the age of 49." ], "decomposition": [ "How many years did Richard III live to be?", "Who was Richard III's father?", "How old did #2 live to be?", "Is #3 greater than #1?" ], "evidence": [ [ [ [ "Richard III of England-1" ] ], [ [ "Richard of York, 3rd Duke of York-1", "Richard of York, 3rd Duke of York-2" ] ], [ [ "Richard of York, 3rd Duke of York-1" ] ], [ "operation" ] ], [ [ [ "Richard III of England-1" ] ], [ [ "Richard III of England-5" ] ], [ [ "Richard of York, 3rd Duke of York-1" ] ], [ "operation" ] ], [ [ [ "Richard III (disambiguation)-1" ] ], [ [ "Richard III of England-5" ] ], [ [ "Richard of York, 3rd Duke of York-3", "Sandal Castle-9" ] ], [ "operation" ] ] ] }, { "qid": "613f8af5655a31bda1a5", "term": "Bluetooth", "description": "Short distance wireless technology standard", "question": "Does a dentist treat Bluetooth problems?", "answer": false, "facts": [ "A dentist is a surgeon who specializes in dentistry, the diagnosis, prevention, and treatment of diseases and conditions of the oral cavity.", "Technological problems are typically handled by IT professionals.", "Bluetooth is not a physical entity." ], "decomposition": [ "What type of professional would handle bluetooth problems?", "Are dentists trained in #1?" ], "evidence": [ [ [ [ "Bluetooth-1", "Computer repair technician-1" ], "no_evidence" ], [ [ "Dentist-1" ], "operation" ] ], [ [ [ "Technical support-3" ] ], [ [ "Dentist-1" ] ] ], [ [ [ "Bluetooth-1" ], "no_evidence" ], [ [ "Dentist-1" ] ] ] ] }, { "qid": "7e05efd9ff2438d9ed68", "term": "Walt Disney", "description": "American entrepreneur, animator, voice actor and film producer", "question": "Was Walt Disney ever interviewed by Anderson Cooper?", "answer": false, "facts": [ "Walt Disney died on Dec 15, 1966", "Anderson Cooper was born on Jun 03, 1967" ], "decomposition": [ "When did Walt Disney pass away?", "When was Anderson Cooper born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Walt Disney-1" ] ], [ [ "Anderson Cooper-1" ] ], [ "operation" ] ], [ [ [ "Walt Disney-36" ] ], [ [ "Anderson Cooper-1" ] ], [ "operation" ] ], [ [ [ "Walt Disney-1" ] ], [ [ "Anderson Cooper-1" ] ], [ "operation" ] ] ] }, { "qid": "5858ad50cf60b252c7e7", "term": "Frost", "description": "coating or deposit of ice that may form in humid air in cold conditions, usually overnight", "question": "Is it common to see frost during some college commencements?", "answer": true, "facts": [ "College commencement ceremonies often happen during the months of December, May, and sometimes June. ", "Frost isn't uncommon to see during the month of December, as it is the winter." ], "decomposition": [ "What seasons can you expect see frost?", "What months do college commencements occur?", "Do any of #2 occur during #1?" ], "evidence": [ [ [ [ "Frost-1" ] ], [ [ "Graduation-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Frost-30" ], "no_evidence" ], [ [ "Commencement speech-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Frost-25" ], "no_evidence" ], [ [ "Commencement at Central Connecticut State University-18" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ab1f7f17560c5b0bc203", "term": "Eddie Murphy", "description": "American stand-up comedian and actor", "question": "Could Eddie Murphy's children hypothetically fill a basketball court by themselves?", "answer": true, "facts": [ "Eddie Murphy has ten children.", "Basketball is played with two teams, each having five players on the court at one time." ], "decomposition": [ "How many children does Eddie Murphy have?", "How many players are on a basketball team?", "How many teams are on the basketball court at the same time?", "How much is #2 multiplied by #3?", "Is #1 greater than or equal to #4?" ], "evidence": [ [ [ [ "Eddie Murphy-40", "Eddie Murphy-41", "Eddie Murphy-43" ] ], [ [ "Basketball-1" ] ], [ [ "Basketball-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Eddie Murphy-40" ], "no_evidence" ], [ [ "Basketball-3" ] ], [ [ "Basketball-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Eddie Murphy-41" ], "no_evidence" ], [ [ "Basketball-3" ] ], [ [ "Basketball-1" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "213294e45ef7c7b93a81", "term": "Snow leopard", "description": "species of mammal", "question": "Can you find a snow leopard in the Yucatan?", "answer": false, "facts": [ "Snow leopards are native to mountain ranges in Central and South Asia", "The Yucatan is a peninsula in Mexico", "Mexico is located in North America" ], "decomposition": [ "On what continent is the Yucatan peninsula?", "On what continent are snow leopards found?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Mexico-1", "Yucatán Peninsula-1" ] ], [ [ "Snow leopard-1" ] ], [ "operation" ] ], [ [ [ "Yucatán Peninsula-1" ] ], [ [ "Snow leopard-1" ] ], [ "operation" ] ], [ [ [ "Yucatan woodpecker-5" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "0cc643196b24a6d37b46", "term": "United Nations Conference on Trade and Development", "description": "organization", "question": "Could Edward Snowden have visited the headquarters of United Nations Conference on Trade and Development?", "answer": true, "facts": [ "The headquarters of the United Nations Conference on Trade and Development is in Geneva, Switzerland.", "Edward Snowden was stationed in Geneva in 2007 with the task of representing the US at the UN." ], "decomposition": [ "What city and country is the United Nations Conference on Trade and Development located in?", "In 2007, what was Edward Snowden's tasked with?", "Was Edward Snowden stationed in #1 in 2007 to accomplish #2? " ], "evidence": [ [ [ [ "United Nations Conference on Trade and Development-7" ] ], [ [ "Edward Snowden-13" ] ], [ "operation" ] ], [ [ [ "Palace of Nations-1" ] ], [ [ "Edward Snowden-13" ] ], [ "operation" ] ], [ [ [ "United Nations Conference on Trade and Development-3" ] ], [ [ "Edward Snowden-13" ] ], [ "operation" ] ] ] }, { "qid": "4db6d375c79e86c2818e", "term": "Sony", "description": "Japanese multinational conglomerate corporation", "question": "Did Sony definitively win the video game war against Sega?", "answer": true, "facts": [ "Sony is the maker of the Playstation which has sold over 108 million PS4 units by March 2020.", "Sega's last console, the Sega Dreamcast, was discontinued in 2001.", "Sony Playstation competed with Sega's Dreamcast and Saturn systems in the 1990s.", "Sega now makes games for its former competitor, Sony, including Team Sonic Racing in 2019.", "At the height of the console wars, Sega Saturn sold 9.5 million units while Sony Playstation sold 102 million units." ], "decomposition": [ "How many console did Sega Saturn sell?", "How many console did Sony Playstation?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Sega Saturn-25" ], "no_evidence" ], [ [ "PlayStation-2" ] ], [ "operation" ] ], [ [ [ "Sega Saturn-3" ] ], [ [ "PlayStation-81" ] ], [ [ "PlayStation-81" ] ] ], [ [ [ "Sega Saturn-3" ] ], [ [ "PlayStation (console)-2" ] ], [ "operation" ] ] ] }, { "qid": "1a8fb1401bdae961beea", "term": "Winter", "description": "one of the Earth's four temperate seasons, occurring between autumn and spring", "question": "Is winter associated with hot temperatures?", "answer": false, "facts": [ "Winter is the season that occurs when a hemisphere is tilted away from the sun during Earth's orbit.", "During this season, that hemisphere gets less sunshine and is further from the sun than the other hemisphere.", "As a result, temperatures in that hemisphere are much colder during that season." ], "decomposition": [ "What is earth's primary source of heat energy?", "Which parts of the earth experience winters?", "What is the relative orientation of #2 with respect to #1 during winters?", "Will #3 result in hot temperatures in #2?" ], "evidence": [ [ [ [ "Earth-15" ], "no_evidence" ], [ [ "Winter-1" ] ], [ [ "Winter-4" ] ], [ "operation" ] ], [ [ [ "Sun-1" ] ], [ [ "Winter-1" ] ], [ [ "Winter-1" ] ], [ "operation" ] ], [ [ [ "The Sun-1" ] ], [ [ "Winter-8" ] ], [ [ "Winter-8" ] ], [ "operation" ] ] ] }, { "qid": "75bcf0203ee31aeeddd5", "term": "United States Capitol", "description": "seat of the United States Congress", "question": "Is the United States Capitol located near the White House?", "answer": true, "facts": [ "The Capitol building is located at one end of the National Mall in downtown Washington DC.", "The White House is located next to the Washington Monument a short way down from the Mall." ], "decomposition": [ "What city is the United States Capitol located in?", "What city is the White House located in?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "United States Capitol-1" ] ], [ [ "White House-1" ] ], [ "operation" ] ], [ [ [ "United States-1" ] ], [ [ "White House-1" ] ], [ "operation" ] ], [ [ [ "United States Capitol-1" ] ], [ [ "White House-1" ] ], [ "operation" ] ] ] }, { "qid": "0688bd3291c81ffcfea1", "term": "Radioactive waste", "description": "wastes that contain nuclear material", "question": "Does the United States Navy create radioactive waste?", "answer": true, "facts": [ "Radioactive waste is created by nuclear material processing", "The United States Navy uses many nuclear submarines" ], "decomposition": [ "Radioactive waste is a byproduct of what process?", "Does the US Navy engage in any of the activities in #1?" ], "evidence": [ [ [ [ "Radioactive waste-1" ] ], [ [ "Nuclear submarine-4" ] ] ], [ [ [ "Radioactive waste-1" ] ], [ [ "United States Navy Nuclear Propulsion-1" ], "operation" ] ], [ [ [ "Radioactive waste-1" ] ], [ [ "United States Navy-5" ], "no_evidence", "operation" ] ] ] }, { "qid": "1f187be8cef09e713156", "term": "Black fly", "description": "family of insects", "question": "Was Black fly upstaged by another insect in Jeff Goldblum's 1986 film?", "answer": true, "facts": [ "Jeff Goldnlum starred in the 1986 movie The Fly.", "The fly used in the movie The Fly was a common Housefly.", "The Black fly is most closely related to Chironomidae since they both feed on mammals." ], "decomposition": [ "Which fly was used in the 1986 movie The Fly?", "is #1 a black fly?" ], "evidence": [ [ [ "no_evidence" ], [ "operation" ] ], [ [ [ "The Fly (1986 film)-4" ] ], [ [ "Black fly-1", "Housefly-1" ] ] ], [ [ [ "The Fly (1986 film)-4" ] ], [ "operation" ] ] ] }, { "qid": "dab785cc72fbb866c986", "term": "Alan Rickman", "description": "British actor", "question": "Did Alan Rickman have an improperly functioning organ?", "answer": true, "facts": [ "Alan Rickman died of pancreatic cancer on 14 January 2016 at age 69.", "Pancreatic cancer arises when cells in the pancreas, a glandular organ behind the stomach, begin to multiply out of control and form a mass." ], "decomposition": [ "What medical conditions did Alan Rickman have?", "Does any of the conditions in #1 involve an organ?" ], "evidence": [ [ [ [ "Alan Rickman-25" ] ], [ [ "Pancreatic cancer-1" ] ] ], [ [ [ "Alan Rickman-25" ] ], [ [ "Pancreatic cancer-1" ] ] ], [ [ [ "Alan Rickman-25" ] ], [ "operation" ] ] ] }, { "qid": "34ef40fff88e669f154f", "term": "4", "description": "Natural number", "question": "Would four shoes be insufficient for a set of octuplets?", "answer": true, "facts": [ "There are eight children in a set of octuplets.", "The typical child is born with two feet.", "Normally each foot needs to be covered with a single shoe." ], "decomposition": [ "How many children are in a set of octuplets?", "How many shoes does a person wear?", "What is #1 multiplied by #2?", "Is #3 greater than 4?" ], "evidence": [ [ [ [ "Multiple birth-6" ] ], [ [ "Shoe-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Multiple birth-19" ] ], [ [ "Shoe-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Suleman octuplets-1" ] ], [ [ "Shoe-1" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "d77b8a72c7c3126f35b4", "term": "Judge", "description": "official who presides over court proceedings", "question": "Are banana trees used by judges for maintaining order?", "answer": false, "facts": [ "A banana tree has seeds that only have one embryonic leaf and is called a monocot.", "Judges use gavels to maintain order in court.", "Gavels are made of hardwood.", "Hardwood comes from dicot trees.", "Oak, maple, and sycamore are dicot trees." ], "decomposition": [ "Which instrument do judges use to maintain order in courts?", "What kind of material are #1 made from?", "Which group of trees is #2 obtained from?", "Do banana trees belong to the same group as #3?" ], "evidence": [ [ [ [ "Gavel-8" ] ], [ [ "Gavel-1" ] ], [ [ "Hardwood-1" ] ], [ [ "Banana-13" ] ] ], [ [ [ "Gavel-2" ] ], [ [ "Gavel-1" ] ], [ [ "Hardwood-1" ] ], [ [ "Monocotyledon-2" ], "operation" ] ], [ [ [ "Gavel-1" ] ], [ [ "Gavel-1" ] ], [ [ "Banana-5" ] ], [ "operation" ] ] ] }, { "qid": "ba1242c0a924787b6e03", "term": "Logging", "description": "the cutting, skidding, on-site processing, and loading of trees or logs onto transport vehicles", "question": "Would it be hard to get toilet paper if there were no loggers?", "answer": true, "facts": [ "Logging produces products such as pulp.", "Pulp is used to make paper products such as toilet paper." ], "decomposition": [ "What material is used to make paper products such as toilet paper?", "Is logging an important step in producing #1?" ], "evidence": [ [ [ [ "Pulp (paper)-1" ] ], [ [ "Logging-1" ] ] ], [ [ [ "Paper-1" ] ], [ [ "Logging-1" ] ] ], [ [ [ "Toilet paper-37" ] ], [ [ "Logging-11" ], "operation" ] ] ] }, { "qid": "ddfbe25915ba85ef4716", "term": "Bohai Sea", "description": "The innermost gulf of the Yellow Sea and Korea Bay on the coast of Northeastern and North China", "question": "Could Rhode Island sink into the Bohai Sea?", "answer": true, "facts": [ "The Bohai Sea is 30,000 square miles", "Rhode Island is 1,214 square miles" ], "decomposition": [ "How many square miles is the Bohai Sea?", "How many square miles is Rhode Island?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Bohai Sea-1" ] ], [ [ "Rhode Island-29" ] ], [ [ "Rhode Island-29" ], "operation" ] ], [ [ [ "Bohai Sea-1" ] ], [ [ "Rhode Island-29" ] ], [ "operation" ] ], [ [ [ "Bohai Sea-1" ] ], [ [ "Rhode Island-29" ] ], [ "operation" ] ] ] }, { "qid": "888f196932de7a192d61", "term": "Sloth", "description": "tree dwelling animal noted for slowness", "question": "Will a sloth explode if it's not upside down?", "answer": false, "facts": [ "sloth can climb trees in various positions.", "sloth can crawl along the ground on their stomachs. " ], "decomposition": [ "What are some common positions that a sloth can stay in?", "Is all of #1 upside down in orientation?" ], "evidence": [ [ [ [ "Sloth-4" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Sloth-1" ] ], [ [ "Sloth-1" ] ] ], [ [ [ "Sloth-1", "Sloth-2" ] ], [ "operation" ] ] ] }, { "qid": "4a99bfb0539856dbf1a4", "term": "Newcastle, New South Wales", "description": "City in New South Wales, Australia", "question": "Was the MLB World Series held in Newcastle, New South Wales?", "answer": false, "facts": [ "The MLB World Series is held annually in a stadium belonging to one of its teams", "MLB teams are located in the United States and Canada", "New South Wales is a state in Australia" ], "decomposition": [ "In which countries are MLB World Series held?", "Is Australia one of #1?" ], "evidence": [ [ [ [ "MLB International-1" ] ], [ "operation" ] ], [ [ [ "World Series-1" ] ], [ "operation" ] ], [ [ [ "World Series-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0bc39f6d0566896a2139", "term": "Nancy Pelosi", "description": "52nd speaker of the United States House of Representatives", "question": "Would Nancy Pelosi have hypothetically been on same side as Gerald Ford?", "answer": false, "facts": [ "Gerald Ford was a Republican and was president from 1974-1977.", "Nancy Pelosi is the Democratic Speaker of the House.", "Domestically, Ford was consistently conservative, and led the fight against Johnson's Great Society. ", "Programs of the Great Society included Medicare, and Medicaid.", "Nancy Pelosi has consistently voted for Medicare and Medicaid." ], "decomposition": [ "What was Gerald Ford's political affiliation?", "What is Nancy Pelosi's political affiliation?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Gerald Ford-119" ] ], [ [ "Nancy Pelosi-12" ] ], [ "operation" ] ], [ [ [ "Gerald Ford-4" ] ], [ [ "Nancy Pelosi-12" ] ], [ "operation" ] ], [ [ [ "Gerald Ford-3" ] ], [ [ "Nancy Pelosi-2" ] ], [ "operation" ] ] ] }, { "qid": "0aad1627ee2984c8c147", "term": "1980 United States presidential election", "description": "49th quadrennial presidential election in the United States", "question": "Was the 1980 presidential election won by a member of the Grand Old Party?", "answer": true, "facts": [ "The Republican party is nicknamed the Grand Old Party.", "The 1980 election was won by Ronald Reagan.", "Reagan was a Republican." ], "decomposition": [ "Which political party is also known as the Grand Old Party?", "Who won the 1980 presidential election?", "What political party did #2 belong to?", "Is #3 the same as #1?" ], "evidence": [ [ [ [ "Republican Party (United States)-1" ] ], [ [ "1980 United States presidential election-1" ] ], [ [ "Ronald Reagan-3" ] ], [ "operation" ] ], [ [ [ "Republican Party (United States)-1" ] ], [ [ "1980 United States presidential election-1" ] ], [ [ "Ronald Reagan-3" ] ], [ "operation" ] ], [ [ [ "Republican Party (United States)-1" ] ], [ [ "1980 United States presidential election-1" ] ], [ [ "Ronald Reagan-3" ] ], [ "operation" ] ] ] }, { "qid": "da0b209dff1ccc80f46a", "term": "Glenn Beck", "description": "American talk radio and television host", "question": "Is Glenn Beck known for his mild temper?", "answer": false, "facts": [ "Glenn Beck has gone viral for screaming at callers on his program.", "Glenn Beck has walked off of professional interviews when he doesn't like the questions." ], "decomposition": [ "What kind of temperament has Glenn Beck shown in public on notable ocassions?", "Did all of #1 indicate mild temper?" ], "evidence": [ [ [ [ "Glenn Beck-22" ] ], [ "operation" ] ], [ [ [ "Glenn Beck Program-19", "Glenn Beck-89" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Glenn Beck-22" ] ], [ "operation" ] ] ] }, { "qid": "f8de728cbf65a3f0f1d6", "term": "Kaffir lime", "description": "A citrus fruit native to tropical Southeast Asia and southern China", "question": "Would a kaffir lime be a good ingredient for making a candle?", "answer": true, "facts": [ "Kaffir limes are citrus fruits originating in tropical climates.", "The Kaffir lime leaves and rind emit an intense citrus fragrance when crushed up.", "Yankee Candle, one of the largest candle companies, sells several popular varieties of citrus candles.", "Sage and Citrus is one of the highest rated scents that Yankee Candle sells." ], "decomposition": [ "Which fragrance do Kaffir lime leaves emit when crushed?", "What are the scents of some popular varieties of candles that Yankee Candle sells?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Kaffir lime-2" ] ], [ [ "Yankee Candle-14" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Kaffir lime-8" ], "no_evidence" ], [ [ "Yankee Candle-14" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Kaffir lime-2" ] ], [ [ "Yankee Candle-14" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "590e5edef4ccd793b6df", "term": "Moon Jae-in", "description": "President of South Korea", "question": "Did Moon Jae-in's residence exist when the World Trade Center was completed?", "answer": false, "facts": [ "The President of South Korea lives in the Blue House", "The Blue House finished construction in early 1991", "The World Trade Center complex was completed in 1987" ], "decomposition": [ "Where does Moon Jae-in live?", "When was the construction of #1 finished?", "When was the construction of the World Trade Center completed?", "Is #2 before #3?" ], "evidence": [ [ [ [ "Moon Jae-in-28" ] ], [ "no_evidence" ], [ [ "Construction of the World Trade Center-4" ] ], [ "operation" ] ], [ [ [ "Blue House-1" ] ], [ [ "Blue House-8" ] ], [ [ "World Trade Center (1973–2001)-1" ] ], [ "operation" ] ], [ [ [ "Blue House-1", "Moon Jae-in-1" ] ], [ [ "Blue House-8" ] ], [ [ "World Trade Center (1973–2001)-1" ] ], [ "operation" ] ] ] }, { "qid": "08bd7c4d339a2838f173", "term": "Pulitzer Prize", "description": "U.S. award for achievements in newspaper and online journalism, literature, and musical composition", "question": "Is it impossible for Cheb Mami to win a Pulitzer Prize for musical composition?", "answer": true, "facts": [ "The history Pulitzer Prize can be won by any citizen, all other Pulitzer Prize winners must be a US Citizen.", "Cheb Mami is an Algerian singer.", "Cheb Mami is a citizen of Algeria." ], "decomposition": [ "The Pulitzer Prize for musical composition is exclusive to the citizens of which country?", "Which country is Cheb Mami from?", "Is #1 different from #2?" ], "evidence": [ [ [ [ "Pulitzer Prize for Music-1" ] ], [ [ "Cheb Mami-1" ] ], [ "operation" ] ], [ [ [ "Pulitzer Prize for Music-1" ] ], [ [ "Cheb Mami-2" ] ], [ "operation" ] ], [ [ [ "Pulitzer Prize for Music-2" ] ], [ [ "Cheb Mami-2" ] ], [ "operation" ] ] ] }, { "qid": "55bcf8e81e775bac5a4e", "term": "DARPA", "description": "Agency of the U.S. Department of Defense responsible for the development of new technologies", "question": "Did DARPA influence Albert Einstein? ", "answer": false, "facts": [ "DARPA is an agency in the US focused on defense and new technologies.", "DARPA was founded in 1958 under Dwight D Eisenhower.", "Albert Einstein was a famous physicist who died in 1955." ], "decomposition": [ "When was DARPA formed?", "When did Albert Einstein die?", "Is #1 before #2?" ], "evidence": [ [ [ [ "DARPA-2" ] ], [ [ "Albert Einstein-1" ] ], [ "operation" ] ], [ [ [ "DARPA-2" ] ], [ [ "Albert Einstein-1" ] ], [ "operation" ] ], [ [ [ "DARPA-2" ] ], [ [ "Albert Einstein-1" ] ], [ "operation" ] ] ] }, { "qid": "11e20bbf1f44625b8349", "term": "Family Guy", "description": "American animated sitcom", "question": "Does the art from Family Guy look a lot like the art in American Dad?", "answer": true, "facts": [ "Family Guy and American Dad are both Fox Animated Sitcoms animated by Seth MacFarlane.", "Family Guy and American Dad characters all share common facial features and movement styles." ], "decomposition": [ "Who is the animator for Family Guy?", "Who is the animator for American Dad?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Seth MacFarlane-14" ] ], [ [ "Seth MacFarlane-21" ] ], [ "operation" ] ], [ [ [ "Family Guy-1" ] ], [ [ "American Dad!-1" ] ], [ "operation" ] ], [ [ [ "Family Guy-2" ] ], [ [ "American Dad!-14" ] ], [ "operation" ] ] ] }, { "qid": "47ba45019129dd07cb55", "term": "Stephen King", "description": "American author", "question": "Could Stephen King join the NASA Astronaut Corps?", "answer": false, "facts": [ "NASA Astronaut Corps candidates must have a master's degree from an accredited institution in engineering, biological science, physical science or mathematics.", "Stephen King studied at the University of Maine, graduating in 1970 with a Bachelor of Arts in English." ], "decomposition": [ "What degrees are acceptable to meet the minimum requirement for admittance to the NASA Astronaut Corps?", "What degrees does Stephen King hold?", "Is #2 also in #1?" ], "evidence": [ [ [ [ "NASA Astronaut Corps-10" ] ], [ [ "Stephen King-6" ] ], [ "operation" ] ], [ [ [ "NASA Astronaut Corps-10" ] ], [ [ "Stephen King-6" ] ], [ "operation" ] ], [ [ [ "NASA Astronaut Corps-10" ] ], [ [ "Stephen King-6" ] ], [ "operation" ] ] ] }, { "qid": "faa475c30066e8b522f1", "term": "Alexander Graham Bell", "description": "scientist and inventor known for his work on the telephone", "question": "Would Alexander Graham Bell hypothetically support Nazi eugenics?", "answer": true, "facts": [ "Eugenics was the idea of selective breeding or sterilization to rid the human populace of certain traits.", "Nazis used eugenics to justify mass sterilization and mass murder.", "Alexander Graham Bell Alexander Graham Bell advocated against the use of sign language and hoped to eradicate deafness through selective breeding." ], "decomposition": [ "What did the Nazi's use to justify mass sterilization and mass murder?", "What is the definition of #1?", "What did Alexander Graham Bell advocate against the use of?", "Did Alexander Graham Bell use #2 to get rid of #3?" ], "evidence": [ [ [ [ "Nazi eugenics-3" ] ], [ [ "Eugenics-1" ] ], [ [ "History of eugenics-21" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Eugenics-4" ] ], [ [ "Eugenics-1" ] ], [ [ "History of eugenics-21" ] ], [ "operation" ] ], [ [ [ "Nazism and race-3", "Nazism-59", "Nazism-60" ] ], [ [ "Racial hierarchy-11" ] ], [ [ "History of eugenics-21" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "81cb69ab13c3195331c4", "term": "Black swan", "description": "species of bird", "question": "Can black swan's formation type help spell longest word in Dictionary?", "answer": true, "facts": [ "Black swan's fly in a \"V\" formation.", "The longest word in the dictionary is pneumonoultramicroscopicsilicovolcanoconiosis." ], "decomposition": [ "What letter does the formation of black swans in flight resemble?", "What is the longest word in English language?", "Can #1 be found in #2?" ], "evidence": [ [ [ [ "Black swan-6" ] ], [ [ "Longest word in English-4" ] ], [ "operation" ] ], [ [ [ "Black swan-6" ] ], [ [ "Pneumonoultramicroscopicsilicovolcanoconiosis-1" ] ], [ "operation" ] ], [ [ [ "Swan-6" ], "no_evidence" ], [ [ "Pneumonoultramicroscopicsilicovolcanoconiosis-1" ] ], [ "operation" ] ] ] }, { "qid": "636b557a0eb102ce04b5", "term": "Glutamic acid", "description": "amino acid", "question": "Do you find glutamic acid in a severed finger?", "answer": true, "facts": [ "Glutamic acid is an amino acid and neurotransmitter", "As a neurotransmitter, glutamic acid is the most abundant in the vertebrate nervous system", "A severed finger contains parts of a vertebrate's nervous system" ], "decomposition": [ "What kind of transmitter is glutamic acid?", "In which bodily system is #1 the most abundant?", "Does a severed finger contain #2?" ], "evidence": [ [ [ [ "Glutamic acid-1" ] ], [ [ "Glutamic acid-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Glutamic acid-1" ] ], [ [ "Glutamic acid-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Glutamic acid-1" ] ], [ [ "Neurotransmitter-16" ], "no_evidence" ], [ [ "Dendrite-3" ], "no_evidence", "operation" ] ] ] }, { "qid": "d723358c3932de9ce765", "term": "German Shepherd", "description": "Dog breed", "question": "Do German Shepherds worry about the Abitur?", "answer": false, "facts": [ "The Abitur is a qualification granted by university-preparatory schools in Germany, Lithuania, and Estonia.", "The Abitur is conferred on students who pass their final exams at the end of their secondary education.", "Students that attend university-preparatory schools are humans.", "German Shepherds are not humans." ], "decomposition": [ "The Abitur qualification is conferred after which achievement?", "What kind of animal is a German Shepherd?", "Are #2 capable of completing #1 which was meant for humans?" ], "evidence": [ [ [ [ "Abitur-1" ] ], [ [ "German Shepherd-1" ] ], [ "operation" ] ], [ [ [ "Abitur-28" ] ], [ [ "German Shepherd-25" ] ], [ "operation" ] ], [ [ [ "Abitur-4" ] ], [ [ "German Shepherd-11" ] ], [ "operation" ] ] ] }, { "qid": "e705505f353721ac0b59", "term": "PlayStation 4", "description": "Sony's eighth-generation home video game console", "question": "Did Tom Bosley enjoy video games on the PlayStation 4?", "answer": false, "facts": [ "The PlayStation 4 was launched in 2013.", "Tom Bosley died in 2010." ], "decomposition": [ "What year did Tom Bosley die?", "What year was the PlayStation 4 Launched?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Tom Bosley-13" ] ], [ [ "PlayStation 4-1" ] ], [ "operation" ] ], [ [ [ "Tom Bosley-13" ] ], [ [ "PlayStation 4-1" ] ], [ "operation" ] ], [ [ [ "Tom Bosley-1" ] ], [ [ "PlayStation 4-1" ] ], [ "operation" ] ] ] }, { "qid": "bc429593abdbd062e8d2", "term": "Toyota Hilux", "description": "Series of light commercial vehicles produced by the Japanese car-manufacturer Toyota.", "question": "Can a 2019 Toyota Hilux hypothetically support weight of thirty Big John Studd clones?", "answer": false, "facts": [ "The 2019 Toyota Hilux has a maximum carry load of 3500kg or, around 7,700 pounds.", "Big John Studd was a professional wrestler that weighed 364 pounds." ], "decomposition": [ "What is the maximum carry load weight of a Toyota Hilux?", "How much did Big John Studd weigh?", "What is #2 multiplied by 30?", "Is #1 greater than or equal to #3?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Toyota Hilux-1" ], "no_evidence" ], [ [ "Big John Studd-14" ], "no_evidence" ], [ "operation" ], [ "operation" ] ], [ [ [ "Toyota Hilux-1" ], "no_evidence" ], [ [ "Big John Studd-1", "NWA Mid-Atlantic Heavyweight Championship-4" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "01c3faf4915a44133f60", "term": "Iggy Pop", "description": "American rock singer-songwriter, musician, and actor", "question": "Was Iggy Pop named after his father?", "answer": true, "facts": [ "Iggy Pop's birth name was James Newell Osterberg Jr.", "The father of Iggy Pop was James Newell Osterberg Sr." ], "decomposition": [ "What is Iggy Pop's real name?", "What is Iggy Pop's father's name?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Iggy Pop-1" ] ], [ [ "Iggy Pop-5" ] ], [ "operation" ] ], [ [ [ "Iggy Pop-1" ] ], [ [ "Iggy Pop-5" ] ], [ "operation" ] ], [ [ [ "Iggy Pop-1" ] ], [ [ "Iggy Pop-5" ] ], [ "operation" ] ] ] }, { "qid": "f745cd36dda73188704f", "term": "Potato", "description": "plant species producing the tuber used as a staple food", "question": "Are potatoes native to the European continent?", "answer": false, "facts": [ "Potatoes originated in South America and spread throughout the Americas by indigenous tribes.", "European explorers discovered potatoes and brought them back to share at home in Europe." ], "decomposition": [ "Where did potatoes originate?", "Is #1 located in Europe?" ], "evidence": [ [ [ [ "Potato-13" ] ], [ [ "Potato-13" ] ] ], [ [ [ "Potato-2" ] ], [ "operation" ] ], [ [ [ "Potato-2" ] ], [ [ "Peru-1" ] ] ] ] }, { "qid": "3a373a74e76c72176e39", "term": "Börek", "description": "Stuffed phyllo pastry", "question": "Would Şerafeddin Sabuncuoğlu have eaten Börek?", "answer": true, "facts": [ "Börek originated in Ottoman cuisine", "Şerafeddin Sabuncuoğlu was an Ottoman scientist" ], "decomposition": [ "Where did Borek originate from?", "Was Serafeddin Sabuncuoglu from #1?" ], "evidence": [ [ [ [ "Börek-1" ] ], [ [ "Ottoman Empire-1", "Sabuncuoğlu Şerafeddin-1" ], "operation" ] ], [ [ [ "Börek-3" ] ], [ [ "Sabuncuoğlu Şerafeddin-2" ] ] ], [ [ [ "Börek-1" ] ], [ [ "Amasya-1", "Sabuncuoğlu Şerafeddin-2" ] ] ] ] }, { "qid": "7efac97ddb31c18cf77a", "term": "Zucchini", "description": "Edible summer squash, typically green in color", "question": "Can the original name of the zucchini be typed on the top row of a QWERTY keyboard?", "answer": false, "facts": [ "The original name for the zucchini in Mexican language or Nahuatl is ayokonetl.", "The top row of a QWERTY keyboard contains the keys q, w, e, r, t, y, u, i , o, and p." ], "decomposition": [ "What is the original name of the zucchini?", "What keys are on the top row of a QWERTY keyboard?", "Is every letter in #1 present in #2?" ], "evidence": [ [ [ [ "Zucchini-7" ] ], [ [ "QWERTY-9" ] ], [ [ "QWERTY-9", "Zucchini-7" ], "no_evidence" ] ], [ [ [ "Zucchini-4" ] ], [ [ "QWERTY-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Zucchini-1" ] ], [ [ "QWERTY-1" ] ], [ "operation" ] ] ] }, { "qid": "6bed2a566da292a36593", "term": "Rice pudding", "description": "Dish made from rice mixed with water or milk", "question": "Would Cyndi Lauper use milk substitute in her rice pudding?", "answer": true, "facts": [ "Cyndi Lauper wrote a song about lactose intolerance.", "Lactose intolerance leads to gastrointestinal discomfort upon eating dairy." ], "decomposition": [ "What conditions lead people to using milk substitutes?", "Does Cyndi Lauper suffer from any conditions listed in #1?" ], "evidence": [ [ [ [ "Lactose intolerance-1", "Veganism-1" ] ], [ [ "Cyndi Lauper-1" ], "no_evidence", "operation" ] ], [ [ [ "Milk allergy-9" ] ], [ [ "Cyndi Lauper-76" ], "no_evidence" ] ], [ [ [ "Milk substitute-17" ] ], [ "no_evidence" ] ] ] }, { "qid": "66dd7cd84b014a6d1f60", "term": "Eggplant", "description": "plant species Solanum melongena", "question": "Is eggplant deadly to most atopic individuals? ", "answer": false, "facts": [ "Atopic individuals have a genetic tendency to develop allergic reactions", "Eggplant allergies are usually not life-threatening " ], "decomposition": [ "What kind of reactions do atopic people have a tendency of getting?", "Are #1 caused by eggplant usually deadly in nature?" ], "evidence": [ [ [ [ "Atopy-4" ] ], [ [ "Eggplant-53" ], "operation" ] ], [ [ [ "Atopy-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Atopy-1", "Atopy-4", "Atopy-5" ] ], [ [ "Atopy-6" ], "no_evidence", "operation" ] ] ] }, { "qid": "a905e30e6cfc76377348", "term": "Bitcoin", "description": "decentralized cryptocurrency", "question": "Could a single bitcoin ever cover cost of a Volkswagen Jetta?", "answer": true, "facts": [ "The all time high price of bitcoin was $19,783 in 2017.", "The suggested retail price of a 2020 Volkswagen Jetta is $18,895." ], "decomposition": [ "What is the highest price for a bitcoin?", "What is the cheapest price of a Jetta?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Bitcoin-22" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bitcoin-22" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Economics of bitcoin-16" ] ], [ [ "Volkswagen Jetta-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "df5d1bdd7a56f0090f60", "term": "Drew Carey", "description": "American actor, comedian, game show host, libertarian and photographer", "question": "Has Drew Carey outshined Doug Davidson's tenure?", "answer": true, "facts": [ "Drew Carey has been the host of the Price is Right for over 13 years.", "Doug Davidson hosted the Price is Right from 1994-1995." ], "decomposition": [ "How long has Drew Carey hosted the Price is Right?", "How long did Doug Davidson host the Price is Right?", "Is #1 longer than #2?" ], "evidence": [ [ [ [ "Drew Carey-14" ] ], [ [ "Doug Davidson-2" ] ], [ "operation" ] ], [ [ [ "The Price Is Right-1" ] ], [ [ "The New Price Is Right (1994 game show)-1" ] ], [ "operation" ] ], [ [ [ "Drew Carey-1" ] ], [ [ "Doug Davidson-2" ] ], [ "operation" ] ] ] }, { "qid": "d92862b94dffc52378e4", "term": "Taco Bell", "description": "American fast-food chain", "question": "Can you purchase a dish with injera at Taco Bell?", "answer": false, "facts": [ "Taco Bell serves a variety of Mexican and Tex-Mex foods that include tacos, burritos, quesadillas, and nachos.", "Injera is a sour fermented flatbread with a slightly spongy texture, traditionally made out of teff flour.", "Injera is part of Ethiopian cuisine." ], "decomposition": [ "What kind of food is Taco Bell known to serve?", "Which country is #1 most associated with?", "Which country is Injera native to?", "Is #2 the same as #3?" ], "evidence": [ [ [ [ "Taco Bell-1" ] ], [ [ "Taco Bell-1" ] ], [ [ "Injera-1" ] ], [ "operation" ] ], [ [ [ "Taco Bell-1" ] ], [ [ "Taco Bell-1" ] ], [ [ "Injera-1" ] ], [ "operation" ] ], [ [ [ "Taco Bell-1" ] ], [ [ "Mexican cuisine-6", "Tex-Mex-1" ] ], [ [ "Pancake-7" ] ], [ "operation" ] ] ] }, { "qid": "913bb87c30feb6484679", "term": "Gettysburg Battlefield", "description": "site of the Battle of Gettysburg during the American Civil War", "question": "Would a Superbowl Football Game be crowded on the Gettysburg Battlefield?", "answer": false, "facts": [ "Football fields used in the Super Bowl are 100 yards long. ", "The Gettysburg Battlefield is over 5 miles long.", "There are 1760 yards in a mile." ], "decomposition": [ "How long is the football field superbowl?", "How long is the Gettysburg Battlefield?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Comparison of American football and rugby league-6" ] ], [ [ "Gettysburg Battlefield-2" ] ], [ "operation" ] ], [ [ [ "Football pitch-4" ] ], [ [ "Gettysburg Battlefield-2" ] ], [ "operation" ] ], [ [ [ "American football-11" ] ], [ [ "Gettysburg Battlefield-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "b4c61fdd96959a52f996", "term": "High Speed 1", "description": "high-speed railway between London and the Channel Tunnel", "question": "Would the tunnels at CERN fit onto the High Speed 1 rails?", "answer": true, "facts": [ "High Speed 1 (HS1), legally the Channel Tunnel Rail Link (CTRL), is a 67-mile (108 km) high-speed railway.", "The CERN collider is contained in a circular tunnel, with a circumference of 26.7 kilometres (16.6 mi).", "The circumference of a circle is the length of the enclosing boundary." ], "decomposition": [ "How long are the tunnels at the CERN collider?", "How long is the High Speed 1 railway?", "Is #1 less than or equal to #2?" ], "evidence": [ [ [ [ "Large Electron–Positron Collider-2" ] ], [ [ "High Speed 1-1" ] ], [ "operation" ] ], [ [ [ "CERN-18" ] ], [ [ "High Speed 1-1" ] ], [ "operation" ] ], [ [ [ "CERN-18" ] ], [ [ "High Speed 1-1" ] ], [ "operation" ] ] ] }, { "qid": "29551a9093ea19c1e7c3", "term": "2000", "description": "Year", "question": "Would 1996 leap year baby technically be 1 year old in 2000?", "answer": true, "facts": [ "A leap year happens once every 4 years and has 29 days in February.", "The years 1996, 2000, 2004, 2008, 2012, 2016, and 2020 are the last 7 leap years that have happened.", "1996 to 2000 is one leap year." ], "decomposition": [ "How many years apart are consecutive leap years?", "What is 2000 minus 1996?", "Is #2 divided by #1 equal to one?" ], "evidence": [ [ [ [ "Leap year-2" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Leap year-2" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Leap year-16" ] ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "82bde29394d6aba258a1", "term": "Preventive healthcare", "description": "Prevent and minimize the occurrence of diseases", "question": "Can preventive healthcare reduce STI transmission?", "answer": true, "facts": [ "Preventive healthcare includes screenings for STI/STD's. ", "Increases in testing for STI's allow for citizens to protect themselves from infection and contain outbreaks." ], "decomposition": [ "What are the effects of preventive measures on STI transmission?", "Does #1 involve a reduction in their spread?" ], "evidence": [ [ [ [ "Sexually transmitted infection-22" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Preventive healthcare-18" ] ], [ [ "Pre-exposure prophylaxis-8" ], "operation" ] ], [ [ [ "Condom-85" ] ], [ [ "Condom-85" ] ] ] ] }, { "qid": "e3f5b70bb16f336cc7fc", "term": "Snowdon", "description": "highest mountain in Wales", "question": "Would Snowdon mountain be a piece of cake for Tenzing Norgay?", "answer": true, "facts": [ "Tenzing Norgay was a mountaineer that climbed Mount Everest in 1953.", "Snowdon Mountain has a peak of 3,560 feet.", "Mount Everest has a peak of over 29,000 feet." ], "decomposition": [ "How high is Snowdon Mountain?", "What was the highest peak ever climbed by Tenzing Norgay", "How high is #2?", "Is #3 greater than #1?" ], "evidence": [ [ [ [ "Snowdon-1" ] ], [ [ "Tenzing Norgay-1" ] ], [ [ "Mount Everest-2" ] ], [ "operation" ] ], [ [ [ "Snowdon-1" ] ], [ [ "Mount Everest-1", "Tenzing Norgay-1" ] ], [ [ "Mount Everest-2" ] ], [ "operation" ] ], [ [ [ "Snowdon-1" ], "no_evidence" ], [ [ "Tenzing Norgay-1" ] ], [ [ "Mount Everest-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "5165ea01d214c4e74188", "term": "Reconstruction era", "description": "Era of military occupation in the Southern United States after the American Civil War (1865–1877)", "question": "Can a Reconstruction era coin buy DJI Mavic Pro Drone?", "answer": true, "facts": [ "The DJI Mavic Pro Drone retails for around $1,000 dollars.", "THE Reconstruction Era took place from 1865-1877.", "Mint condition 1870 Seated Liberty Silver Dollar's can sell for between $2,283 to $4,933." ], "decomposition": [ "How much does a DJI Mavic Pro Drone retail for?", "During what years did the Reconstruction era occur?", "Of the US coins minted during the years in #2, are any of them now worth at least as much as #1?" ], "evidence": [ [ [ [ "DJI-26" ], "no_evidence" ], [ [ "Reconstruction era-2" ] ], [ [ "Three-cent silver-28", "Two-cent piece (United States)-21" ], "operation" ] ], [ [ [ "DJI-26" ], "no_evidence" ], [ [ "Reconstruction era-2" ] ], [ [ "Three-cent piece-4" ], "no_evidence", "operation" ] ], [ [ [ "Mavic (UAV)-2" ], "no_evidence" ], [ [ "Reconstruction era-2" ] ], [ [ "Economic history of the United States-201" ], "no_evidence" ] ], [ [ [ "Mavic (UAV)-17" ], "no_evidence" ], [ [ "Reconstruction era-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "ea0fd9617596c799c33b", "term": "Sun bear", "description": "bear found in tropical forest habitats of Southeast Asia", "question": "Can an American black bear swallow a sun bear whole?", "answer": false, "facts": [ "Sun bears grow to only about half the size of an American black bear.", "The total length of adult bear skulls was found to average 262 to 317 mm (10.3 to 12.5 in).", "Black bears cannot open their mouths to half their body length." ], "decomposition": [ "How big is the skull of an adult american black bear?", "What is the size of an adult sun bear?", "Is #2 smaller than #1?" ], "evidence": [ [ [ [ "American black bear-19" ] ], [ [ "Sun bear-1" ] ], [ "operation" ] ], [ [ [ "American black bear-19" ] ], [ [ "Sun bear-1" ] ], [ "operation" ] ], [ [ [ "American black bear-22" ], "no_evidence" ], [ [ "Sun bear-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "1ac4c639f88bcaeccfbb", "term": "Scrabble", "description": "board game with words", "question": "Does monster name in West African Folklore that witches send into villages set Scrabble record?", "answer": false, "facts": [ "An obia is a monster in West African folklore described as being a massive animal that witches send into villages to kidnap young girls and wear their skin for a coat.", "Obia generates 6 points in Scrabble.", "Oxyphenbutazone is said to be the highest scoring scrabble word worth 1,458 points." ], "decomposition": [ "What is the name of the monster in West African Folklore that witches send into villages?", "What is the highest scoring word in Scrabble?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Obia (folklore)-2" ] ], [ [ "Scrabble-76" ] ], [ "operation" ] ], [ [ [ "Obia (folklore)-2" ] ], [ [ "Scrabble-76" ] ], [ "operation" ] ], [ [ [ "Obia (folklore)-2" ] ], [ [ "Scrabble-76" ] ], [ "operation" ] ] ] }, { "qid": "7460108c3996aca011d5", "term": "September", "description": "ninth month in the Julian and Gregorian calendars", "question": "Does Home Depot sell item in late September zodiac sign symbol?", "answer": true, "facts": [ "The zodiac sign for late September is Libra.", "The Libra zodiac sign is represented by scales. ", "Home Depot sells a number of scales including Pelouze and Ozeri brands." ], "decomposition": [ "What is the zodiac sign that represents late September?", "What is symbol of #1?", "What kinds of goods (and services) does The Home Depot deal in?", "Is #2 likely to be included in #3?" ], "evidence": [ [ [ [ "Libra (astrology)-1" ] ], [ [ "Weighing scale-1" ] ], [ [ "The Home Depot-1" ] ], [ "operation" ] ], [ [ [ "Libra (astrology)-1" ] ], [ [ "Libra (astrology)-2" ] ], [ [ "The Home Depot-1" ] ], [ "operation" ] ], [ [ [ "Libra-1" ] ], [ [ "Libra (astrology)-2" ] ], [ [ "The Home Depot-1" ] ], [ "operation" ] ] ] }, { "qid": "a97758e32a875a48e83a", "term": "Beauty and the Beast (1991 film)", "description": "1991 American animated musical fantasy romance film", "question": "Do inanimate objects come alive in Beauty and the Beast?", "answer": true, "facts": [ "Beauty and the Beast features a castle full of items that move and speak on their own. ", "An inanimate object is one that is not alive in any way.", "Main characters of Beauty and the Beast include a talking teacup and a sassy duster." ], "decomposition": [ "Who are the main characters in Beauty and the Beast?", "What type of entities are the characters listed in #1?", "Are any of the types listed in #2 usually inanimate objects?" ], "evidence": [ [ [ [ "Beauty and the Beast (2017 film)-6" ] ], [ [ "Beauty and the Beast (2017 film)-6" ] ], [ [ "Beauty and the Beast (2017 film)-6" ] ] ], [ [ [ "Beauty and the Beast (1991 film)-6", "Beauty and the Beast (1991 film)-7" ], "no_evidence" ], [ [ "Beauty and the Beast (1991 film)-7" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Beauty and the Beast (1991 film)-7" ] ], [ [ "Beauty and the Beast (1991 film)-10" ] ], [ "operation" ] ] ] }, { "qid": "3cb1925dc00aecc7ef83", "term": "Bandy", "description": "ballgame on ice played using skates and sticks", "question": "Would Bandy be likely to become popular in Texas?", "answer": false, "facts": [ "The American Bandy Association governs major Bandy play in the United States.", "There are no teams from Texas registered with the American Bandy Association.", "Sports involving ice and snow tend to be more popular in cold climates.", "Texas has an extremely hot climate." ], "decomposition": [ "What kind of climate favors long term playing of Bandy?", "Which US States have Bandy teams?", "Is Texas included in #2 or have #1 climate?" ], "evidence": [ [ [ [ "Sport in Russia-15" ] ], [ [ "Bandy in the United States-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Bandy-1" ] ], [ [ "Bandy-101" ] ], [ [ "Texas-30" ], "operation" ] ], [ [ [ "Bandy-1" ] ], [ [ "Bandy in the United States-1" ] ], [ "operation" ] ] ] }, { "qid": "bfb90783262ec0da7d31", "term": "Watergate scandal", "description": "Political scandal that occurred in the United States in the 1970s", "question": "Did the Watergate scandal help the Republican party?", "answer": false, "facts": [ "Watergate resulted in President Nixon's resignation.", "President Nixon was a Republican.", "Nixon's resignation resulted in major Democratic gains in Congress during the next election." ], "decomposition": [ "Which central figure resigned as a result of the Watergate scandal?", "Was #1 a member of the Republican party?", "Did the scandal affect the Republican party negatively in following elections?", "Is #2 or #3 negative?" ], "evidence": [ [ [ [ "Richard Nixon-4" ] ], [ [ "Richard Nixon-2" ] ], [ [ "Watergate scandal-79" ] ], [ "operation" ] ], [ [ [ "Watergate scandal-66" ] ], [ [ "Richard Nixon-122" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Watergate scandal-2" ] ], [ [ "Richard Nixon-1" ] ], [ [ "Watergate scandal-79" ] ], [ "operation" ] ] ] }, { "qid": "150492a2590b8447707b", "term": "Sesame", "description": "species of plant", "question": "Can a sesame seed grow in the human body?", "answer": false, "facts": [ "Seeds need water, oxygen, and light to grow.", "The human digestive system releases powerful acid that dissolves food." ], "decomposition": [ "What does a seed need in order to germinate?", "Can all of #1 be found inside the human body?" ], "evidence": [ [ [ [ "Sesame-12" ], "no_evidence" ], [ [ "Gastric acid-2" ], "no_evidence", "operation" ] ], [ [ [ "Germination-4" ] ], [ [ "Human body-6" ] ] ], [ [ [ "Seed-52" ] ], [ [ "Body water-3" ], "no_evidence" ] ] ] }, { "qid": "18689be402e947929bfe", "term": "Elizabeth II", "description": "Queen of the United Kingdom and the other Commonwealth realms", "question": "Did Elizabeth II frequently visit Queen Victoria?", "answer": false, "facts": [ "Queen Victoria died in 1901.", "Elizabeth II was born in 1926." ], "decomposition": [ "When did Queen Victoria die?", "When was Queen Elizabeth II born?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Queen Victoria-1" ] ], [ [ "Elizabeth II-1" ] ], [ "operation" ] ], [ [ [ "Queen Victoria-1" ] ], [ [ "Elizabeth II-1" ] ], [ "operation" ] ], [ [ [ "Queen Victoria-53" ] ], [ [ "Elizabeth II-5" ] ], [ "operation" ] ] ] }, { "qid": "3ca0a58b1697a63521b6", "term": "Rick and Morty", "description": "Animated sitcom", "question": "Can you watch Rick and Morty in Mariana Trench?", "answer": true, "facts": [ "Rick and Morty is available in blu-ray format.", "You can play blu-ray on a laptop computer ", "It is possible to go to Mariana Trench inside a deep-diving submersible vehicle with a laptop." ], "decomposition": [ "What portable media format is Rick and Morty available in?", "What electronics do deep-diving submersibles have?", "Can any of #1 be played on any of #2?" ], "evidence": [ [ [ [ "Rick and Morty-28" ] ], [ [ "Deep-submergence vehicle-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Rick and Morty-27" ] ], [ [ "DVD player-1", "Submersible-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Rick and Morty-28" ] ], [ [ "Deep diving-11" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "8141eefa48407dfb759b", "term": "Oprah Winfrey", "description": "American businesswoman, talk show host, actress, producer, and philanthropist", "question": "Does Oprah Winfrey have a degree from an Ivy League university?", "answer": true, "facts": [ "Oprah Winfrey has received honorary doctorates from Duke and Harvard Universities", "Harvard University is a member of the Ivy League" ], "decomposition": [ "What schools does Oprah Winfrey have honorary degrees from?", "Is #1 Ivy league?" ], "evidence": [ [ [ [ "Oprah Winfrey-4" ] ], [ [ "Harvard University-1" ], "operation" ] ], [ [ [ "Oprah Winfrey-4" ] ], [ [ "Ivy League-1" ], "operation" ] ], [ [ [ "Oprah Winfrey-4" ] ], [ [ "Outline of Harvard University-2" ] ] ] ] }, { "qid": "a40dafa5d3114a101432", "term": "Darth Vader", "description": "fictional character in the Star Wars franchise", "question": "Can Darth Vader hypothetically outdunk Bill Walton without using The Force?", "answer": false, "facts": [ "The Force allows a Jedi to move objects with their mind.", "Darth Vader is 6'2\" tall.", "Former basketball player Bill Walton is a towering 6'11\" tall.", "The NBA basketball rim is 10 feet high." ], "decomposition": [ "What characteristic determines someone's ability to dunk?", "What is Darth Vader's measurement of #1?", "What is Bill Walton's measurement of #1?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Human height-1" ] ], [ [ "Darth Vader-8" ], "no_evidence" ], [ [ "Bill Walton-5" ] ], [ "operation" ] ], [ [ [ "Slam dunk-5" ] ], [ [ "Darth Vader-3", "David Prowse-2" ], "no_evidence" ], [ [ "Bill Walton-5" ] ], [ "operation" ] ], [ [ [ "Slam dunk-5" ] ], [ [ "Darth Vader-8" ], "no_evidence" ], [ [ "Bill Walton-5" ] ], [ "operation" ] ] ] }, { "qid": "746b563db21efc4cfa92", "term": "Led Zeppelin", "description": "English rock band", "question": "Did the lead singer of Led Zepplin ever perform with Ernest Chataway?", "answer": true, "facts": [ "Robert Plant is the lead singer of Led Zepplin", "Robert Plant was in the band The Honeydrippers", "Ernest Chataway was in the band The Honeydrippers" ], "decomposition": [ "Who was the lead singer of Led Zepplin?", "Who are the members of the Honeydrippers?", "Is Ernest Chataway also part of #2?", "Is #1 in #2?", "Is #3 and #4 both yes?" ], "evidence": [ [ [ [ "Led Zeppelin-1" ] ], [ [ "The Honeydrippers-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Robert Plant-1" ] ], [ [ "The Honeydrippers-1" ] ], [ [ "The Honeydrippers-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "The Honeydrippers-1" ] ], [ [ "The Honeydrippers-1" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ] ] }, { "qid": "563394228ae6952dd267", "term": "Coca", "description": "group of plant varieties cultivated for coca production", "question": "Would someone with a nosebleed benefit from Coca?", "answer": true, "facts": [ "Coca constricts blood vessels.", "As a result, it serves to stop bleeding. ", "Someone with a nosebleed would want the bleeding to stop." ], "decomposition": [ "What does Coca do to blood vessels?", "What happens to blood when #1 occurs?", "Would someone with a nose want #2 to occur?" ], "evidence": [ [ [ [ "Coca-30" ] ], [ [ "Blood vessel-16" ] ], [ "operation" ] ], [ [ [ "Coca-30" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Coca-30" ] ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "48cac3b98391f6da285f", "term": "Wednesday", "description": "Day of the week", "question": "Will Communion be denied to Wednesday name origin followers?", "answer": true, "facts": [ "Communion is the body and blood of Christ given out during mass.", "Communion is only given to believers baptized in the Christian Church.", "Wednesday comes from Old English Wodnesdaeg referring to Woden, also called Odin.", "Odin was the pagan god of Norse mythology.", "Vikings, believers in Norse mythology, clashed with Christians in Wessex and Northumbria for hundreds of years." ], "decomposition": [ "Which deity is related to the origin of the name 'Wednesday'?", "Who are the worshipers of #1?", "Which group of people are allowed to take the Communion?", "Are #2 included in #3?" ], "evidence": [ [ [ [ "Odin-2" ] ], [ [ "Odin-2" ], "no_evidence" ], [ [ "Eucharist-1" ] ], [ "operation" ] ], [ [ [ "Wednesday-1" ] ], [ [ "Odin-2" ] ], [ [ "First Communion-1" ] ], [ "operation" ] ], [ [ [ "Wednesday-1" ] ], [ [ "Anglo-Saxon paganism-1" ] ], [ [ "Eucharist-1" ] ], [ "operation" ] ] ] }, { "qid": "18553c3fc528f6a38e5f", "term": "Kingdom of Hungary", "description": "former Central European monarchy (1000–1946)", "question": "Were Walkman's used in the Kingdom of Hungary?", "answer": false, "facts": [ "The Kingdom of Hungary ended in 1946. ", "The Walkman was invented in 1979." ], "decomposition": [ "When did the Kingdom of Hungary come to an end?", "When was Walkman invented?", "Is #2 before #1?" ], "evidence": [ [ [ [ "Kingdom of Hungary-1" ] ], [ [ "Walkman-1" ] ], [ "operation" ] ], [ [ [ "Kingdom of Hungary-1" ] ], [ [ "Walkman-5" ] ], [ "operation" ] ], [ [ [ "Kingdom of Hungary-1" ] ], [ [ "Walkman-2" ] ], [ "operation" ] ] ] }, { "qid": "b8976035f0804cbd356e", "term": "Philippine–American War", "description": "Armed conflict between the First Philippine Republic and the United States", "question": "Would a veteran of the Phillippine-American War come home craving SPAM?", "answer": false, "facts": [ "War veterans are often used to the rations they eat during war and crave similar items at home.", "The Philippine-American war took place before World War II, in 1899.", "Soldiers in World War II were given SPAM in their rations. ", "SPAM was released in 1937." ], "decomposition": [ "The Philippine-American war took place in what year?", "What year was SPAM invented in?", "Is #1 after #2?" ], "evidence": [ [ [ [ "Philippine–American War-1" ] ], [ [ "Spam (food)-3" ] ], [ "operation" ] ], [ [ [ "Philippine–American War-1" ] ], [ [ "Spam (food)-1" ] ], [ "operation" ] ], [ [ [ "Philippine–American War-1" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "198d416b99a4f41cb56d", "term": "Moulin Rouge", "description": "cabaret in Paris, France", "question": "Could Moulin Rouge have been hypothetically used as Spain's Spanish American War triage center?", "answer": true, "facts": [ "The Moulin Rouge cabaret in France had a capacity of 850 people.", "Spain had 700-800 injured during Spanish American War." ], "decomposition": [ "How many people can be seated in Moulin Rouge?", "How many Spaniards were injured during the Spanish American War?", "Ia #1 greater than #2?" ], "evidence": [ [ [ [ "Moulin Rouge-2" ], "no_evidence" ], [ [ "Spanish–American War-55" ] ], [ "operation" ] ], [ [ [ "Moulin Rouge-2" ], "no_evidence" ], [ [ "Spanish–American War-57" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Moulin Rouge! (musical)-3" ], "no_evidence" ], [ [ "Spanish–American War-55" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "5b223afd5634a367d859", "term": "Metallica", "description": "American heavy metal band", "question": "Did Metallica band members cutting their hair hurt their sales?", "answer": true, "facts": [ "Metallica famously cut their hair in 1996 which caused a huge divide in their fanbase.", "Metallica's best selling album, The Black Album, was released in 1991 and has sold over 20 million copies.", "Since 1996, Metallica have released 5 studio albums.", "Metalica's 5 studio albums since 1996 have sold around a combined 14 million copies" ], "decomposition": [ "When did Metallica band members cut their hair?", "How many copies of their best selling album has been sold?", "How many copies of their last five albums have been sold altogether?", "Is #1 after the release date of #2 and before those of #3, and #2 greater than #3?" ], "evidence": [ [ [ [ "Metallica-25" ] ], [ [ "Metallica (album)-22" ] ], [ [ "Death Magnetic-44", "Death Magnetic-45", "Hardwired... to Self-Destruct-15", "Load (album)-2", "Reload (Metallica album)-6", "St. Anger-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Metallica-25" ] ], [ [ "Metallica discography-1" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Load (album)-1", "Load (album)-13" ], "no_evidence" ], [ [ "Metallica (album)-3" ] ], [ [ "Death Magnetic-45", "Hardwired... to Self-Destruct-2", "Load (album)-2", "Reload (Metallica album)-6", "St. Anger-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Metallica-25" ] ], [ [ "Metallica (album)-22" ] ], [ [ "Death Magnetic-47", "Hardwired... to Self-Destruct-15", "Lulu (Lou Reed and Metallica album)-16", "Reload (Metallica album)-1", "St. Anger-3" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "35f6213b1bddcf156472", "term": "B", "description": "letter in the Latin alphabet", "question": "Does the letter B's place in alphabet exceed number of 2008 total lunar eclipses?", "answer": true, "facts": [ "The letter B is the second letter in the Latin Alphabet.", "There was one total lunar eclipse in 2008." ], "decomposition": [ "What number represents the position of letter B in the English alphabet?", "How many total lunar eclipses occurred in 2008?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "B-1" ] ], [ [ "August 2008 lunar eclipse-1" ] ], [ "operation" ] ], [ [ [ "B-1" ] ], [ [ "February 2008 lunar eclipse-9" ], "no_evidence" ], [ "operation" ] ], [ [ [ "B-1" ] ], [ [ "August 2008 lunar eclipse-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "6f1f8afdf44cf088997d", "term": "Cinco de Mayo", "description": "Annual celebration held on May 5", "question": "Would Emmanuel Macron celebrate Cinco de Mayo?", "answer": false, "facts": [ "Cinco de Mayo is observed to commemorate the Mexican Army's victory over the French Empire at the Battle of Puebla, on May 5, 1862.", "Emmanuel Macron is the current president of France.", "Emmanuel Macron was born in France and his ancestry traces back to France.", "People do not typically celebrate events in which their country was defeated." ], "decomposition": [ "Which countries usually celebrate the Cinco de Mayo?", "Which country is Emmanuel Macron from?", "Is #2 included in any of #1?" ], "evidence": [ [ [ [ "Cinco de Mayo-17" ] ], [ [ "Amiens-1", "Emmanuel Macron-5" ] ], [ "operation" ] ], [ [ [ "Battle of Puebla-11" ] ], [ [ "Emmanuel Macron-1" ] ], [ "operation" ] ], [ [ [ "Cinco de Mayo-12" ], "no_evidence" ], [ [ "Emmanuel Macron-63" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "63360dd6c8c5bdc31bbe", "term": "Gray whale", "description": "species of mammal", "question": "Would a baby gray whale fit in a tractor-trailer?", "answer": true, "facts": [ "Gray whales measure 4.9 m (16 ft) in length for newborns.", "A semi-trailer is 48 feet long." ], "decomposition": [ "How large is a baby gray whale?", "How large is a tractor-trailer?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Gray whale-8" ] ], [ [ "Trailer (vehicle)-4" ] ], [ [ "Trailer (vehicle)-4" ], "operation" ] ], [ [ [ "Gray whale-21" ] ], [ [ "Semi-trailer truck-25" ] ], [ "operation" ] ], [ [ [ "Gray whale-21" ] ], [ [ "Semi-trailer truck-18" ] ], [ "operation" ] ] ] }, { "qid": "3799fbe219d3a3eb0482", "term": "Twenty-third Amendment to the United States Constitution", "description": "Grants residents of Washington, D.C. the right to vote in U.S. presidential elections", "question": "Was Harry Truman's presidency unaffected by the twenty-third Amendment to the US Constitution?", "answer": true, "facts": [ "The 23rd Amendment to the US Constitution was passed in 1961.", "Harry Truman was the President from 1945-1953." ], "decomposition": [ "When was Harry Truman the president of the United States?", "When was the 23rd Amendment passed?", "Is #2 after #1?" ], "evidence": [ [ [ [ "Harry S. Truman-1" ] ], [ [ "Twenty-third Amendment to the United States Constitution-1" ] ], [ "operation" ] ], [ [ [ "Harry S. Truman-99" ], "no_evidence" ], [ [ "Twenty-third Amendment to the United States Constitution-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Harry S. Truman-1" ] ], [ [ "Twenty-third Amendment to the United States Constitution-1" ] ], [ "operation" ] ] ] }, { "qid": "ac55fc84d16659a1f0df", "term": "Joker (character)", "description": "Fictional character in the DC Universe", "question": "Could Bart Simpson have owned comics with The Joker?", "answer": true, "facts": [ "The first appearance of the Joker was in 1940.", "Bart Simpson first appeared as a child in \"The Simpsons\" in 1987." ], "decomposition": [ "When was the first appearance of the Joker?", "When did Bart Simpson first appear?", "Did #2 come after #1?" ], "evidence": [ [ [ [ "Joker (character)-1" ] ], [ [ "Bart Simpson-1" ] ], [ "operation" ] ], [ [ [ "Joker (character)-1" ] ], [ [ "Bart Simpson-1" ] ], [ "operation" ] ], [ [ [ "Joker (character)-59" ] ], [ [ "Bart Simpson-13" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "a2b0a30501a4fb9ae204", "term": "Do it yourself", "description": "building, modifying, or repairing something without the aid of experts or professionals", "question": "Do Do It Yourself channels online always show realistic projects?", "answer": false, "facts": [ "The Youtube channel '5 Minute Crafts' specializes in DIY projects for all ages.", "\"5 Minute Crafts\" has come under fire for posting videos that were fraudulent or dangerous in nature. " ], "decomposition": [ "What are some popular Do It Yourself media?", "Of #1, which are YouTube channels?", "Are all of #2 regarded as realistic projects?" ], "evidence": [ [ [ [ "Do it yourself-13" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Do it yourself-13", "Do it yourself-6" ] ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Do it yourself-10", "Do it yourself-11", "Do it yourself-12", "Do it yourself-6" ], "no_evidence" ], [ "operation" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "930b8d6638dcc19fce17", "term": "Mercury (planet)", "description": "Smallest and closest planet to the Sun in the Solar System", "question": "Would only warm weather attire be a good idea on Mercury?", "answer": false, "facts": [ "Warm weather attire would not protect your body in cold temperatures.", "Mercury can reach temperatures of −280 °F at night." ], "decomposition": [ "What are the best temperatures to wear warm weather attire?", "What is the average temperature of Mercury at night?", "Is there any overlap between #1 and #2?" ], "evidence": [ [ [ [ "Clothing-2" ] ], [ [ "Mercury (planet)-4" ] ], [ [ "Clothing-2" ] ] ], [ [ [ "Winter-18" ], "no_evidence" ], [ [ "Mercury (planet)-4" ] ], [ "operation" ] ], [ [ [ "Highest temperature recorded on Earth-4" ], "no_evidence" ], [ [ "Mercury (planet)-4" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "fc60a290ab2e9a467f1c", "term": "Spinach", "description": "species of plant", "question": "For bone growth, is kale more beneficial than spinach?", "answer": true, "facts": [ "Calcium is an important nutrient for bone health.", "Kale has more calcium per serving than spinach." ], "decomposition": [ "What nutrient is critical for bone growth?", "How much #1 does kale contain?", "How much #1 does spinach contain?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Bone growth factor-3" ] ], [ [ "Kale-11" ] ], [ [ "Spinach-7" ] ], [ "operation" ] ], [ [ [ "Calcium-36" ] ], [ [ "Kale-11" ] ], [ [ "Spinach-7" ] ], [ "operation" ] ], [ [ [ "Calcium-3" ] ], [ [ "Kale-11" ], "no_evidence" ], [ [ "Spinach-7" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "0357ef6f6b33d7c077d3", "term": "Caracal", "description": "Small wild cat", "question": "Can you measure a Caracal with a protractor?", "answer": false, "facts": [ "A caracal is a small wild cat", "Cats and other animals are measured in terms of properties like length, width, and weight", "Protractors measure angles" ], "decomposition": [ "What are protractors used to measure?", "What is a caracal?", "Are #1 and #2 similar (or the same)?" ], "evidence": [ [ [ [ "Protractor-1" ] ], [ [ "Caracal-1" ] ], [ "operation" ] ], [ [ [ "Protractor-1" ] ], [ [ "Caracal-1" ] ], [ "operation" ] ], [ [ [ "Protractor-4" ] ], [ [ "Caracal-1" ] ], [ "operation" ] ] ] }, { "qid": "c92281b901ba7765c2e2", "term": "French toast", "description": "bread soaked in beaten eggs and then fried", "question": "Can a goat be used for one of the ingredients in French toast?", "answer": true, "facts": [ "French toast is made from bread, eggs, milk, and cinnamon.", "Goats are able to produce milk, similar to cows.", "Goats milk is used in a variety of cheeses and milks sold in super markets." ], "decomposition": [ "What common dairy product can be obtained from goats?", "What are the typical ingredients of French toast?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Goat-46" ] ], [ [ "French toast-1" ] ], [ "operation" ] ], [ [ [ "Goat-1" ] ], [ [ "French toast-2" ] ], [ "operation" ] ], [ [ [ "Goat-46" ] ], [ [ "French toast-1" ] ], [ "operation" ] ] ] }, { "qid": "4007fbbc40908a309322", "term": "Dr. Seuss", "description": "American children's writer and illustrator", "question": "Did Dr. Seuss make himself famous?", "answer": false, "facts": [ "Dr. Seuss's wife was Helen Palmer.", "Helen Palmer suggested that Dr. Seuss become an artist rather than a professor.", "Helen Palmer inspired much of Dr. Seuss's work." ], "decomposition": [ "Who was Dr. Seuss' wife?", "Did #1 not serve as inspiration and give key suggestions to Dr. Seuss?" ], "evidence": [ [ [ [ "Dr. Seuss-7" ] ], [ [ "Dr. Seuss-7" ], "no_evidence", "operation" ] ], [ [ [ "Helen Palmer (author)-1" ] ], [ [ "Helen Palmer (author)-5" ] ] ], [ [ [ "Helen Palmer (author)-1" ] ], [ [ "Helen Palmer (author)-3" ], "operation" ] ] ] }, { "qid": "94a6b752d22abe932964", "term": "Saga", "description": "stories mostly ancient Nordic mythology and history of Germanic tribes. Written in the Old Norse language, mainly in Iceland", "question": "Was song of Roland protagonist friendly with group that had sagas?", "answer": false, "facts": [ "The sagas were Nordic mythological stories that were celebrated by the Vikings.", "The Song of Roland was an epic poem about the nephew of Charlemagne.", "Charlemagne was King of the Franks.", "The Franks fought many battles against Vikings including the Siege of Paris in 845 AD." ], "decomposition": [ "Sagas are found in which culture?", "What culture is the hero of Song of Roland from?", "Did #1 and #2 get along well?" ], "evidence": [ [ [ [ "Saga-2" ] ], [ [ "The Song of Roland-1" ] ], [ [ "Viking raids in the Rhineland-7" ], "operation" ] ], [ [ [ "Chivalric sagas-9" ], "no_evidence" ], [ [ "The Song of Roland-27" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Saga-2" ] ], [ [ "The Song of Roland-1" ] ], [ [ "Vikings-1" ], "operation" ] ] ] }, { "qid": "122724772ba385a713a6", "term": "Johns Hopkins University", "description": "Private research university in Baltimore, Maryland", "question": "Could the endowment of Johns Hopkins University pay off the MBTA debt?", "answer": false, "facts": [ "Johns Hopkins University had an endowment of $6.28 billion in 2019.", "The MBTA is in debt for approximately $9 billion." ], "decomposition": [ "How much was Johns Hopkins University endowment in 2019?", "How much is the MBTA debt?", "Is #1 greater than #2?" ], "evidence": [ [ [ "no_evidence" ], [ [ "Massachusetts Bay Transportation Authority-91" ] ], [ "operation" ] ], [ [ [ "Johns Hopkins University-11" ], "no_evidence" ], [ [ "Massachusetts Bay Transportation Authority-90", "Massachusetts Bay Transportation Authority-91" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "59f7a8f6031615bfd3f7", "term": "Winter", "description": "one of the Earth's four temperate seasons, occurring between autumn and spring", "question": "Is Christmas celebrated during winter?", "answer": true, "facts": [ "Winter begins around December 20.", "Christmas is celebrated on December 25, a few days later." ], "decomposition": [ "When is Christmas celebrated?", "When does winter begin in the US?", "When does winter end in the US?", "Is #1 between #2 and #3?" ], "evidence": [ [ [ [ "Christmas-1" ] ], [ [ "Winter-8" ] ], [ [ "Winter-8" ] ], [ "operation" ] ], [ [ [ "Christmas-1" ] ], [ [ "Winter-9" ] ], [ [ "Winter-9" ] ], [ "operation" ] ], [ [ [ "Christmas-28" ] ], [ [ "Winter-9" ], "no_evidence" ], [ [ "Winter-9" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "d2e19ba0ffb3418fc545", "term": "Sun bear", "description": "bear found in tropical forest habitats of Southeast Asia", "question": "Do sun bears stay active during winter?", "answer": true, "facts": [ "The sun bear is a species in the family Ursidae occurring in the tropical forests of Southeast Asia.", " Sun bears do not seem to hibernate.", "Hibernation is a seasonal heterothermy characterized by low body-temperature, slow breathing and heart-rate, and low metabolic rate. It most commonly occurs during winter months." ], "decomposition": [ "What characterizes the state of hibernation that some animals go into during winter?", "Are sun bears known to not exhibit the behavior described by #1?" ], "evidence": [ [ [ [ "Hibernation-3" ] ], [ [ "Sun bear-2" ], "operation" ] ], [ [ [ "Hibernation-1" ] ], [ [ "Sun bear-2" ], "operation" ] ], [ [ [ "Hibernation-1" ] ], [ [ "Sun bear-11" ] ] ] ] }, { "qid": "0251c3c8ca6b689efe0b", "term": "Caracal", "description": "Small wild cat", "question": "Could a student at the University of Houston see a caracal on campus?", "answer": false, "facts": [ "The caracal is native to Africa, the Middle East, Central Asia, and India.", "The University of Houston is located in the United States.", "The United States is located in North America." ], "decomposition": [ "What areas is the caracal native to?", "In what area is the University of Houston located?", "Is there an area present in both #1 and #2?" ], "evidence": [ [ [ [ "Caracal-1" ] ], [ [ "University of Houston-1" ] ], [ "operation" ] ], [ [ [ "Caracal-20" ] ], [ [ "University of Houston-11" ] ], [ [ "Houston-2", "Kyzylkum Desert-1" ], "operation" ] ], [ [ [ "Caracal-1" ] ], [ [ "University of Houston-1" ] ], [ "operation" ] ] ] }, { "qid": "dac61f77d13b59a9631e", "term": "Euro", "description": "European currency", "question": "Would someone pay for a coffee in NYC with Euros?", "answer": false, "facts": [ "New York City is located within the United States.", "The currency used in the United States is the United States dollar, not the Euro." ], "decomposition": [ "In what country is New York City?", "What is the currency for #1?", "Is #2 the Euro?" ], "evidence": [ [ [ [ "New York City-1" ] ], [ [ "United States dollar-1" ] ], [ "operation" ] ], [ [ [ "New York City-1" ] ], [ [ "United States dollar-1" ] ], [ "operation" ] ], [ [ [ "New York City-1" ] ], [ [ "United States dollar-1" ] ], [ "operation" ] ] ] }, { "qid": "130613016c3647a2f44b", "term": "Honey bee", "description": "Eusocial flying insect of genus Apis, producing surplus honey", "question": "Can a single honey bee sting multiple humans?", "answer": false, "facts": [ "When a honey bee stings a human, the stinger becomes stuck in the skin and detaches from the bee.", "This usually results in the bee's death.", "Even if it survives, it no longer has a stinger to attack another person with." ], "decomposition": [ "What happens to a bee's stinger when it stings a human?", "What happens to a bee when #1 occurs?", "Can #2 sting another person?" ], "evidence": [ [ [ [ "Bee sting-6" ] ], [ [ "Bee sting-6" ] ], [ [ "Bee sting-6" ], "operation" ] ], [ [ [ "Honey bee-61" ] ], [ [ "Honey bee-61" ] ], [ "operation" ] ], [ [ [ "Bee sting-8" ] ], [ [ "Bee sting-8" ] ], [ [ "Bee sting-8" ] ] ] ] }, { "qid": "3e0cf93865c58f15a841", "term": "666 (number)", "description": "Natural number", "question": "Would the number 666 appear in a church?", "answer": false, "facts": [ "A church is a place of worship in Christianity.", "Jesus Christ is worshiped by adherents of Christianity.", "666 is a symbolic representation of the Antichrist.", "An Antichrist is someone that opposes Jesus Christ." ], "decomposition": [ "What does the number 666 represent to Christians?", "Would Christians want to be associated with #1?" ], "evidence": [ [ [ [ "Number of the Beast-1" ] ], [ "operation" ] ], [ [ [ "Number of the Beast-1" ] ], [ "operation" ] ], [ [ [ "666 (number)-8" ] ], [ [ "Satan-1" ] ] ] ] }, { "qid": "2325fcbd3c9d829d366e", "term": "Cape Town", "description": "Legislative capital of South Africa", "question": "Is Cape Town south of the Equator?", "answer": true, "facts": [ "Cape Town is an important city in South Africa.", "South Africa is located entirely south of the Equator." ], "decomposition": [ "What country is Cape Town located in?", "Is #1 located south of the equator?" ], "evidence": [ [ [ [ "Cape Town-74" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Cape Town-3" ] ], [ [ "Equator-4" ] ] ], [ [ [ "Cape Town-1" ] ], [ [ "South Africa-2" ] ] ] ] }, { "qid": "5d4ae0455b8641ff0c03", "term": "Silicon", "description": "Chemical element with atomic number 14", "question": "Will silicon wedding rings outsell bromine wedding rings?", "answer": true, "facts": [ "Wedding rings are typically made of precious shiny stones such as diamonds.", "Silicon is a solid rock like element at room temperature that has a natural lustre.", "Bromine is a liquid at room temperature that is toxic to the touch." ], "decomposition": [ "What state of matter is necessary for something to be worn as a ring at room temperature?", "What state of matter is bromine at room temperature?", "Is #2 the same as #1?", "If #3 is no, then a bromide wedding ring does not exist.", "What state of matter is silicon at room temperature? (see comment for #4)" ], "evidence": [ [ [ [ "Ring (jewellery)-1", "Solid-1" ] ], [ [ "Bromine-1" ] ], [ "operation" ], [ "operation" ], [ [ "Silicon-1" ] ] ], [ [ "no_evidence" ], [ [ "Bromine-1" ] ], [ "operation" ], [ "no_evidence" ], [ [ "Silicon-1" ] ] ], [ [ [ "Solid-1" ] ], [ [ "Bromine-1" ] ], [ "operation" ], [ "operation" ], [ [ "Silicon-1" ] ] ] ] }, { "qid": "de4808ca911479cdba34", "term": "Ontology", "description": "study of the nature of being, becoming, existence or reality, as well as the basic categories of being and their relations", "question": "Does ontology require a scalpel?", "answer": false, "facts": [ "A scalpel is used during surgery.", "Ontology is a philosophical domain, not a medical one." ], "decomposition": [ "What are the areas of focus of ontology?", "Where does a scalpel find application?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Ontology-1" ] ], [ [ "Scalpel-1" ] ], [ "operation" ] ], [ [ [ "Ontology-1" ] ], [ [ "Scalpel-1" ] ], [ "operation" ] ], [ [ [ "Ontology-1" ] ], [ [ "Scalpel-1" ] ], [ "operation" ] ] ] }, { "qid": "28ae8f739494f6b0d307", "term": "Godzilla", "description": "Giant monster or kaiju", "question": "Is Godzilla's image likely grounds for a lawsuit in 2050?", "answer": false, "facts": [ "The copyright for Godzilla is owned by Toho Company Limited.", "The first Godzilla film was released by Toho in 1954.", "Works that are significantly old enter the public domain and can be used without copyright permission.", "Godzilla will enter the public domain in the year 2049." ], "decomposition": [ "When can a copyrighted item be used without permission?", "In what year will Godzilla as a creative piece of work attain #1 status?", "Is #2 after 2050?" ], "evidence": [ [ [ [ "Copyright term-2" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Copyright-4" ] ], [ [ "Godzilla-1", "Tomoyuki Tanaka-1" ] ], [ "operation" ] ], [ [ [ "Public domain-16" ] ], [ [ "Godzilla-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "033f7214d9f1aeddb04e", "term": "Hyphen", "description": "Punctuation mark used to join words", "question": "Is Olivia Newton-John hyphenated celebrity name with most letters?", "answer": false, "facts": [ "Olivia Newton-John has sixteen letters in her name.", "Actress Catherine Zeta-Jones has 18 letters in her name.", "Actor Joseph Gordon-Levitt has 18 letters in his name." ], "decomposition": [ "How many letters are in the name Olivia Newton-John?", "How many letters are in the name Catherine Zeta-Jones?", "How many letters are in the name Joseph Gordon-Levitt?", "Is #1 greater than both #2 and #3?" ], "evidence": [ [ [ "operation" ], [ "operation" ], [ "operation" ], [ [ "Letter (alphabet)-3" ], "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Olivia Newton-John-46" ], "no_evidence" ], [ [ "Catherine Zeta-Jones-8" ], "no_evidence" ], [ [ "Joseph Gordon-Levitt-5" ], "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "a5775fcc073499d35da1", "term": "Amazonas (Brazilian state)", "description": "State of Brazil", "question": "Does walking across Amazonas put a person's life at risk?", "answer": true, "facts": [ "Amazonas is mostly tropical jungle.", "Tropical jungles contain dangerous creatures.", "Dangerous creatures put people's lives at risk." ], "decomposition": [ "What is the Amazons made up of?", "Does #1 contain anything dangerous?", "Does #2 put people's life at risk?" ], "evidence": [ [ [ [ "Amazon River-1" ] ], [ [ "Amazon River-75" ], "no_evidence" ], [ [ "Piranha-16" ], "operation" ] ], [ [ [ "Amazon River-19" ] ], [ [ "Amazon River-76" ] ], [ [ "Shark attack-1" ] ] ], [ [ [ "Amazon basin-2" ] ], [ [ "Amazon basin-10" ] ], [ "operation" ] ] ] }, { "qid": "d439d1a70c443691145d", "term": "Daytona 500", "description": "Auto race held in Daytona, Florida, United States", "question": "Will electric car struggle to finish Daytona 500?", "answer": true, "facts": [ "The Daytona 500 is a 2.5 mile long race.", "The Daytona 500 requires 200 laps to complete.", "The best electric car engines last around 390 miles." ], "decomposition": [ "How long (in miles) is the Daytona 500 race?", "What is the maximum electric range (in miles) of the world's best selling electric car?", "Is #2 less than #1?" ], "evidence": [ [ [ [ "Daytona 500-1" ] ], [ [ "Tesla Model 3-1" ] ], [ "operation" ] ], [ [ [ "Daytona 500-7" ] ], [ [ "Electric car-20" ] ], [ "operation" ] ], [ [ [ "Daytona 500-7" ] ], [ [ "Electric car-3" ] ], [ "operation" ] ] ] }, { "qid": "42b892d5dd158c0fd26a", "term": "Swastika", "description": "a geometrical figure and an ancient religious icon in the cultures of Eurasia and 20th-century symbol of Nazism", "question": "Are swastikas used in the most common religion in India?", "answer": true, "facts": [ "The swastika is a religious symbol that is used in Hinduism, Buddhism, and Jainism.", "Almost 80% of people in India practice Hinduism." ], "decomposition": [ "Which religions use the swastika as a symbol?", "What is the most common religion in India?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Swastika-1" ] ], [ [ "Hinduism in India-1" ] ], [ "operation" ] ], [ [ [ "Swastika-1" ] ], [ [ "Hinduism-1" ] ], [ "operation" ] ], [ [ [ "Swastika-1" ] ], [ [ "Religion in India-1" ] ], [ "operation" ] ] ] }, { "qid": "30b5a9505949caa3c4cd", "term": "President of India", "description": "Ceremonial head of state of India", "question": "Is it more expensive to run for President of India than to buy a new iPhone 11?", "answer": false, "facts": [ "Candidates for the presidency of India must pay a deposit of Rs 15,000", "A brand new iPhone 11 costs Rs 67,300" ], "decomposition": [ "How much must a candidate pay to run for president in India?", "How much does a new iPhone 11 cost?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "President of India-63" ], "no_evidence" ], [ [ "IPhone-10" ], "no_evidence" ], [ "operation" ] ], [ [ [ "President of India-2" ], "no_evidence" ], [ [ "IPhone 11-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "President of India-57" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "67c5305550f1612a2f49", "term": "Rahul Dravid", "description": "Indian cricketer", "question": "Is it hard for Rahul Dravid to order food at a restaurant in Aurangabad?", "answer": false, "facts": [ "Aurangabad is located in Maharashtra.", "Marathi is an Indo-Aryan language spoken predominantly by around 100 million Marathi people of Maharashtra, India.", "Rahul Dravid is fluent in Marathi." ], "decomposition": [ "What languages can Rahul Dravid speak fluently?", "In which state is Aurangabad located?", "What is the official language of #2?", "Is #1 exclusive of #3?" ], "evidence": [ [ [ [ "Rahul Dravid-9" ] ], [ [ "Aurangabad-1" ] ], [ [ "Maharashtra-2" ] ], [ "operation" ] ], [ [ [ "Rahul Dravid-9" ] ], [ [ "Aurangabad-1" ] ], [ [ "Marathi language-1" ] ], [ "operation" ] ], [ [ [ "Rahul Dravid-9" ] ], [ [ "Aurangabad district, Maharashtra-1" ] ], [ [ "Maharashtra-2" ] ], [ "operation" ] ] ] }, { "qid": "02f5a6e6eedc57dfa22d", "term": "Quantum mechanics", "description": "Branch of physics that acts as an abstract framework formulating all the laws of nature", "question": "Did Terry Pratchett write about quantum mechanics?", "answer": true, "facts": [ "“What're quantum mechanics?\" \"I don't know. People who repair quantums, I suppose.”", "\"Granny Weatherwax wouldn’t know what a pattern of quantum inevitability was if she found it eating her dinner. If you mentioned the words ‘paradigm of space-time’ to her she’d just say ‘What?’ But that didn’t mean she was ignorant. It just meant that she didn’t have truck with words, especially gibberish.\"" ], "decomposition": [ "What was Terry Pratchett's occupation?", "Which subject deals with the study of quantum mechanics?", "Is #2 a part of #1's job?" ], "evidence": [ [ [ [ "Terry Pratchett-1" ] ], [ [ "Quantum mechanics-1" ] ], [ "no_evidence" ] ], [ [ [ "Terry Pratchett-1" ] ], [ [ "Quantum mechanics-1" ] ], [ "operation" ] ], [ [ [ "The Science of Discworld-1" ] ], [ [ "Quantum mechanics-1" ] ], [ "operation" ] ] ] }, { "qid": "20814715f495fc0e3dca", "term": "Reza Shah", "description": "Shah of Iran, Founder of the Imperial state of iran", "question": "Did number of Imams Reza Shah believed in exceed number of Jesus's disciples?", "answer": false, "facts": [ "Reza Shah, the founder of the Imperial state of Iran, was a Twelver Shia Muslim.", "Twelver Shia Muslims believe that there are 12 Imams.", "Jesus is typically represented as having 12 disciples." ], "decomposition": [ "What was the religion signature of Reza Shah?", "How many Imams do adherents of #1 believe in?", "How many disciples did Jesus have?", "Is #2 greater than #3?" ], "evidence": [ [ [ [ "Reza Shah-19" ] ], [ [ "Imam-2" ] ], [ [ "Disciple whom Jesus loved-4" ] ], [ "operation" ] ], [ [ [ "Reza Shah-26" ], "no_evidence" ], [ [ "Twelver-1" ] ], [ [ "Apostles-1" ] ], [ "operation" ] ], [ [ [ "Reza Shah-26" ], "no_evidence" ], [ [ "Twelver-1" ] ], [ [ "Apostles-1" ] ], [ "operation" ] ] ] }, { "qid": "ee3000c7feb6bfb4ef2f", "term": "Final Fantasy VI", "description": "1994 video game", "question": "Does Final Fantasy VI require electricity to play?", "answer": true, "facts": [ "Final Fantasy VI is a video game.", "Video games are played using a video game console and television.", "Video game consoles and televisions require electricity in order to function." ], "decomposition": [ "Which device(s) would be needed to play the video game Final Fantasy VI?", "Do any of #1 run on electricity?" ], "evidence": [ [ [ [ "Final Fantasy VI-2" ] ], [ [ "Super Nintendo Entertainment System-22" ] ] ], [ [ [ "Final Fantasy VI-1", "Super Nintendo Entertainment System-1" ] ], [ "operation" ] ], [ [ [ "Final Fantasy VI-1" ] ], [ [ "Video game console-1" ], "operation" ] ] ] }, { "qid": "5b941af0d08dc0ea0485", "term": "Newbie", "description": "slang term for a novice or newcomer", "question": "Would Dale Earnhardt Jr. be considered a newbie?", "answer": false, "facts": [ "Dale Earnhardt Jr. is a Nascar racer with 19 years of experience.", "The average Nascar career length is 3.7 years." ], "decomposition": [ "What is a newbie?", "How many years has Dale Earnhardt Jr been a racer?", "Are #1 and #2 similar?" ], "evidence": [ [ [ [ "Newbie-1" ] ], [ [ "Dale Earnhardt Jr.-3", "Dale Earnhardt Jr.-5" ] ], [ "operation" ] ], [ [ [ "Newbie-1" ] ], [ [ "Dale Earnhardt Jr.-1", "Dale Earnhardt Jr.-5" ] ], [ "operation" ] ], [ [ [ "Newbie-1" ] ], [ [ "Dale Earnhardt-11" ] ], [ "operation" ] ] ] }, { "qid": "50ee714386bc6323ae11", "term": "Pelvis", "description": "lower part of the trunk of the human body between the abdomen and the thighs (sometimes also called pelvic region of the trunk", "question": "Is cycling a high-risk activity for pelvis fractures?", "answer": false, "facts": [ "Cycling is a low-impact activity ", "Stress fractures in a pelvic bone often develop as a result of repetitive, high-impact activity that puts stress on the pelvis, such as long-distance running or ballet" ], "decomposition": [ "What type of activity can result in stress fractures?", "Would cycling be considered #1?" ], "evidence": [ [ [ [ "Stress fracture-6" ] ], [ [ "Stationary bicycle-7" ], "no_evidence", "operation" ] ], [ [ [ "Stress fracture-1" ] ], [ "operation" ] ], [ [ [ "Pelvic fracture-20" ] ], [ "operation" ] ] ] }, { "qid": "1647d6f49307d7678d18", "term": "Anorexia nervosa", "description": "Eating disorder characterized by refusal to maintain a healthy body weight, and fear of gaining weight due to a distorted self image", "question": "Would a person with Anorexia nervosa be more likely to break a bone than a regular person?", "answer": true, "facts": [ "People with Anorexia Nervosa restrict food and as a result lack essential nutrients.", "Many people with Anorexia Nervosa, are at high risk for osteoporosis(and to a lesser extent bulimia nervosa) will have low bone density and consequently reduced bone strength.", "People with Anorexia Nervosa, are at high risk for osteoporosis. " ], "decomposition": [ "What kind of eating behavior do people with anorexia nervosa exhibit?", "Does #1 lead to reduced bone strength?" ], "evidence": [ [ [ [ "Anorexia nervosa-1" ] ], [ [ "Osteoporosis-1" ], "operation" ] ], [ [ [ "Anorexia nervosa-1" ] ], [ [ "Anorexia nervosa-54" ] ] ], [ [ [ "Anorexia nervosa-4" ] ], [ [ "Malnutrition-3" ] ] ] ] }, { "qid": "44875993b96eb08c70b4", "term": "Jennifer Lawrence", "description": "American actress", "question": "Is Jennifer Lawrence's middle name similar to the name of a Scorsese collaborator?", "answer": true, "facts": [ "Jennifer Lawrence's middle name is Shrader.", "Paul Schrader is a screenwriter and director.", "Paul Schrader wrote the screenplay for Taxi Driver, Raging Bull, The Last Temptation of Christ, and Bringing Out the Dead.", "Martin Scorsese directed Taxi Driver, Raging Bull, The Last Temptation of Christ, and Bringing Out the Dead." ], "decomposition": [ "What is Jennifer Lawrence's middle name?", "Who has collaborated with Scorsese?", "Does #2 include someone with #1 in their name?" ], "evidence": [ [ [ [ "Jennifer Lawrence-1" ] ], [ [ "Paul Schrader-1" ] ], [ "operation" ] ], [ [ [ "Jennifer Lawrence-1" ] ], [ [ "Paul Schrader-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Jennifer Lawrence-1" ] ], [ [ "Paul Schrader-1" ] ], [ "operation" ] ] ] }, { "qid": "7e3bd1602e5d32ab1e59", "term": "Ten-pin bowling", "description": "sport", "question": "Can a ten-pin bowling pin be a deadly weapon?", "answer": true, "facts": [ "A regulation ten-pin bowling pin weighs around four pounds.", "The average rolling pin used in cooking weighs slighty over four pounds.", "A 2015 case covered in the BBC involved a rolling pin as the murder weapon." ], "decomposition": [ "How much does a ten-pin bowling pin weigh?", "What kind of pin has been used as a murder weapon?", "How much does #2 weigh?", "Is #3 roughly the same as #1?" ], "evidence": [ [ [ [ "Bowling pin-2" ] ], [ [ "Sammy White's Brighton Bowl-3" ] ], [ [ "Candlepin bowling-11" ] ], [ "operation" ] ], [ [ [ "Bowling pin-2" ] ], [ [ "Firing pin-1" ] ], [ [ "Lock time-4" ], "no_evidence" ], [ [ "Bowling pin-2" ] ] ], [ [ [ "Bowling pin-2" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Bowling pin-2" ] ], [ [ "Rolling pin-3" ], "no_evidence" ], [ [ "Rolling pin-2" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "990bc1e418fff10f8d51", "term": "Parachuting", "description": "action sport of exiting an aircraft and returning to Earth using a parachute", "question": "Can parachuting amateurs ignore hurricane force winds bulletins?", "answer": false, "facts": [ "A hurricane force wind warning is issued by the National Weather Service for winds above 74 mph ", "Solo student parachuters are prohibited from jumping in winds exceeding 14 mph" ], "decomposition": [ "What's the minimum wind speed above which the National Weather Service issues hurricane force wind warnings?", "What's the maximum wind speed in which a solo student parachuter can jump?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Hurricane force wind warning-1" ] ], [ [ "Parachuting-21" ] ], [ "operation" ] ], [ [ [ "Saffir–Simpson scale-2" ] ], [ [ "Parachuting-21" ] ], [ "operation" ] ], [ [ [ "Hurricane force wind warning-1" ] ], [ [ "Parachute-36" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "a21a4ba185d355de3e26", "term": "Molière", "description": "17th-century French playwright and actor", "question": "Was Moliere Queen Margot's ill fated lover?", "answer": false, "facts": [ "Queen Margot is a character in Alexande Dumas's La Reine Margot.", "Queen Margot keeps the head of her executed lover.", "Joseph Boniface de La Môle, nicknamed La Mole, was executed as a conspirator against Queen Margot's kingdom.", "Queen Margot is set during the St. Bartholomew's Day Massacre which occurred in 1572.", "Moliere was born in 1622." ], "decomposition": [ "In what work by Alexande Dumas does Queen Margot appear?", "When was #1 written?", "In what year was Moliere born?", "Is #3 before #2?" ], "evidence": [ [ [ [ "La Reine Margot (novel)-1" ] ], [ [ "La Reine Margot (novel)-1" ] ], [ [ "Molière-1" ] ], [ "operation" ] ], [ [ [ "La Reine Margot (novel)-1", "La Reine Margot (novel)-3" ] ], [ [ "La Reine Margot (novel)-1", "La Reine Margot (novel)-3" ] ], [ [ "Molière-1" ] ], [ "operation" ] ], [ [ [ "La Reine Margot (novel)-1", "La Reine Margot (novel)-5" ] ], [ [ "La Reine Margot (novel)-1" ] ], [ [ "Molière-1" ] ], [ "operation" ] ] ] }, { "qid": "f8cc260e3863b0be4b81", "term": "Hypothermia", "description": "A human body core temperature below 35.0°C", "question": "Would you be more likely to die of hypothermia in New York than Florida?", "answer": true, "facts": [ "Central New York Winters are between 12-30 degrees Fahrenheit.", "Florida winters are between 65 and 77 degrees Fahrenheit." ], "decomposition": [ "What is the typical temperature range of the coldest time of the year in New York?", "What is the typical temperature range of the coldest time of the year in Florida?", "Is #1 lower than #2?" ], "evidence": [ [ [ [ "New York City-62" ], "no_evidence" ], [ [ "Climate of Florida-7", "Climate of Florida-8" ], "no_evidence" ], [ "operation" ] ], [ [ [ "New York (state)-43" ] ], [ [ "Florida-45" ] ], [ "operation" ] ], [ [ [ "New York (state)-43" ] ], [ [ "Geography of Florida-5" ] ], [ "operation" ] ] ] }, { "qid": "3d08da50b6599ea99f94", "term": "Nintendo", "description": "Japanese multinational consumer electronics company", "question": "Did original Nintendo have games in same format as Playstation 3?", "answer": false, "facts": [ "Nintendo was originally released in 1983 and used games that were in a cartridge format.", "Sony Playstation 3 was released in 2006 and had games in a CD format." ], "decomposition": [ "What format were Nintendo games originally released in?", "What format were PlayStation 3 games released in?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Video game console-11" ] ], [ [ "PlayStation 3-23" ] ], [ "operation" ] ], [ [ [ "Nintendo Entertainment System-3" ] ], [ [ "PlayStation 3-2" ] ], [ "operation" ] ], [ [ [ "Nintendo Entertainment System-27" ] ], [ [ "PlayStation 3-2" ] ], [ "operation" ] ] ] }, { "qid": "a5c1b9607a17b329cdf1", "term": "Television", "description": "Telecommunication medium for transmitting and receiving moving images", "question": "Did the Democratic Party's nominee for President of the U.S. in 1908 watch TV?", "answer": false, "facts": [ "William Jennings Bryan was the Democratic Party's nominee for President of the U.S. in 1908", "William Jennings Bryan died Jul 26, 1925", "Television was invented in 1927" ], "decomposition": [ "Who was the Democratic Party's nominee for President of the U.S. in 1908?", "When did #1 die?", "When was the television invented?", "Is #3 before #2?" ], "evidence": [ [ [ [ "William Jennings Bryan 1908 presidential campaign-1" ] ], [ [ "William Jennings Bryan-56" ] ], [ [ "History of television-14" ] ], [ "operation" ] ], [ [ [ "1908 United States presidential election-1" ] ], [ [ "William Jennings Bryan-1" ] ], [ [ "Television-10" ] ], [ "operation" ] ], [ [ [ "William Jennings Bryan-30" ], "operation" ], [ "no_evidence" ], [ [ "Television Electronic Disc-2" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "5f1b1891c28dd8ef8bcf", "term": "Cuisine of Hawaii", "description": "Cuisine of Hawaii", "question": "Does the cuisine of Hawaii embrace foods considered gross in the continental US?", "answer": true, "facts": [ "SPAM has a reputation for being an unpleasant dish in the continental US.", "SPAM is so popular in Hawaii that it has made it onto McDonalds menus. " ], "decomposition": [ "Does SPAM have a reputation as an unpleasant dish in the continental US?", "Does SPAM have a reputation as a pleasant dish in Hawaii?", "Are #1 and #2 positive?" ], "evidence": [ [ [ [ "Spam (food)-34" ] ], [ [ "Spam (food)-43" ] ], [ [ "Spam (food)-34", "Spam (food)-43" ] ] ], [ [ [ "Spam (food)-34", "Spam (food)-8" ] ], [ [ "Spam (food)-10" ] ], [ "operation" ] ], [ [ [ "Spam (food)-8" ] ], [ [ "Spam (food)-10" ] ], [ [ "Spam (food)-10" ] ] ] ] }, { "qid": "4eef7889f6dc2815f41f", "term": "2010 United Kingdom general election", "description": "election of members to the House of Commons in 2010", "question": "Did John Kerry run in the 2010 United Kingdom general election?", "answer": false, "facts": [ "John Kerry is an American citizen and politician", "Only citizens of the UK, Ireland or a Commonwealth nation are eligible to run in the United Kingdom general elections" ], "decomposition": [ "In order to run in the UK general election, a person must be a citizen of one of which countries? ", "John Kerry is a citizen of what country?", "Is #2 listed in #1?" ], "evidence": [ [ [ [ "Elections in the United Kingdom-7" ] ], [ [ "John Kerry-1" ] ], [ "operation" ] ], [ [ [ "Member of parliament-34" ] ], [ [ "John Kerry-2" ] ], [ "operation" ] ], [ [ [ "Citizenship-38" ], "no_evidence" ], [ [ "John Kerry-5" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "1111510e448112ed3c85", "term": "One Thousand and One Nights", "description": "Collection of Middle Eastern stories and folk tales compiled in Arabic during the Islamic Golden Age", "question": "Was The Canterbury Tales written before One Thousand and One Nights?", "answer": false, "facts": [ "One Thousand and One Nights was compiled during the Islamic Golden Age.", "The Islamic Golden Age lasted from 800 AD to 1258 AD.", "The Canterbury Tales was written in 1392." ], "decomposition": [ "When were the The Canterbury Tales written?", "When was One Thousand and One Nights written?", "Which years are included in #2?", "Is #1 before #3?" ], "evidence": [ [ [ [ "The Canterbury Tales-7" ] ], [ [ "One Thousand and One Nights-1" ] ], [ [ "One Thousand and One Nights-1" ] ], [ [ "The Canterbury Tales-7" ] ] ], [ [ [ "The Canterbury Tales-1" ] ], [ [ "One Thousand and One Nights-13" ], "no_evidence" ], [ [ "One Thousand and One Nights-33" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Canterbury Tales-1" ] ], [ [ "One Thousand and One Nights-1" ] ], [ [ "Islamic Golden Age-1" ] ], [ "operation" ] ] ] }, { "qid": "dc06eea2fe093bed09df", "term": "Fibonacci number", "description": "integer in the infinite Fibonacci sequence", "question": "Is the Fibonacci number sequence longer than every number discovered in Pi?", "answer": true, "facts": [ "The Fibonaacci number is a sequence of numbers that adds a number to the one before it and goes on forever.", "Pi is a sequence of numbers and 2.7 trillion digits were discovered in 2010." ], "decomposition": [ "How many numbers are in Pi?", "How many numbers are in the Fibonacci number sequence?", "Is #2 larger than #1?" ], "evidence": [ [ [ [ "Pi-2" ], "no_evidence" ], [ [ "Fibonacci prime-3" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Piphilology-65" ] ], [ [ "Fibonacci-12" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Pi-2" ] ], [ [ "Random Fibonacci sequence-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "90e003dc429517ba3ebc", "term": "Lionel Richie", "description": "American singer-songwriter, musician, record producer and actor", "question": "Does Lionel Richie believe in holistic medicine?", "answer": true, "facts": [ "Lionel Richie suffered prolonged throat problems and had surgery four times in four years before being told by conventional doctors that he could lose his singing career. ", "Lionel Richie finally turned to a holistic doctor who said that the problem was simply acid reflux caused by foods he was eating before going to bed." ], "decomposition": [ "Which doctor diagnosed Lionel Richie satisfactorily after he had surgeries for a prolonged throat problem?", "Is #1 a holistic doctor?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "0dad30a3a3c7c915e0c2", "term": "Judo", "description": "modern martial art, combat and Olympic sport", "question": "Would a Germaphobia be able to participate in Judo?", "answer": false, "facts": [ "Germaphobia is a term used by psychologists to describe a pathological fear of germs, bacteria, microbes, contamination and infection.", "If you’re training Judo, you will also find yourself getting into close proximity to the people you are training with, and rolling around on matted floors too. ", "Additionally, you will sweat and roll around on a shared mat when practicing judo." ], "decomposition": [ "What are some common symptoms of Germaphobia?", "What kind of contact and actions does judo training involve?", "Is avoidance of #2 not included in #1?" ], "evidence": [ [ [ [ "Mysophobia-1" ] ], [ [ "Judo-1" ] ], [ [ "Mysophobia-4" ], "operation" ] ], [ [ [ "Mysophobia-1" ] ], [ [ "Judo-1" ] ], [ "operation" ] ], [ [ [ "Mysophobia-1", "Mysophobia-4" ] ], [ [ "Judo-1" ] ], [ "operation" ] ] ] }, { "qid": "cb8c31219e06be31e9cc", "term": "Superman", "description": "Fictional superhero", "question": "Did villain that killed Superman murder Robin?", "answer": false, "facts": [ "Superman was killed by the villain Doomsday in Superman #75.", "Robin is killed by The Joker in the Batman comics." ], "decomposition": [ "Who was Superman killed by?", "Who was Robin killed by?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Doomsday (DC Comics)-2" ] ], [ [ "Joker (character)-28" ] ], [ "operation" ] ], [ [ [ "The Death of Superman-3" ] ], [ [ "Jason Todd-28" ] ], [ "operation" ] ], [ [ [ "The Death of Superman-3" ] ], [ [ "Jason Todd-2" ] ], [ "operation" ] ] ] }, { "qid": "2856480f93e5e9596f83", "term": "Ocelot", "description": "Small wild cat", "question": "Is an ocelot a good present for a kindergartener?", "answer": false, "facts": [ "An ocelot is a small wild cat native to South America, Mexico, and the southern US.", "Ocelots are carnivores that hunt other animals and are aggressive, and strong for their size.", "Kindergarteners are usually 5 years old and weigh around 39 pounds." ], "decomposition": [ "What is the behavior and social nature of an ocelot?", "Is #1 suitable for kindergartners considering their age?" ], "evidence": [ [ [ [ "Ocelot-2" ] ], [ [ "Kindergarten-1" ], "operation" ] ], [ [ [ "Exotic felids as pets-9" ] ], [ [ "Kindergarten-1" ], "no_evidence", "operation" ] ], [ [ [ "Ocelot-25" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "3502d54b8241e79903a9", "term": "Missionary", "description": "member of a religious group sent into an area to do evangelism", "question": "Were the first missionaries required to attend mass on Sundays?", "answer": true, "facts": [ "The word \"mission\" originates from 1598 when the Jesuits sent members abroad.", "Jesuits are a Roman Catholic order of religious men", "The Roman Catholic religion requires members to attend mass on Sundays" ], "decomposition": [ "What religion were the first missionaries?", "Does #1 require mass attendance?" ], "evidence": [ [ [ [ "Missionary-4" ] ], [ [ "Mass (liturgy)-1", "Mass (liturgy)-13" ], "operation" ] ], [ [ [ "Missionary-4" ] ], [ [ "Eucharist in the Catholic Church-76" ], "no_evidence" ] ], [ [ [ "Guadalupe Missionaries-1" ] ], [ [ "Mass (liturgy)-13" ], "operation" ] ] ] }, { "qid": "519626e90b19ecb3886f", "term": "Donald Duck", "description": "Disney cartoon character", "question": "Will Donald Duck hypothetically purchase bell-bottoms for himself?", "answer": false, "facts": [ "Bell-bottoms were a style of pants that were popular in the 60s and 70s.", "Donald Duck is an animated Disney character that never wears pants." ], "decomposition": [ "What article of clothing are bell-bottoms?", "What articles of clothing does Donald Duck wear?", "Is #1 listed in #2?" ], "evidence": [ [ [ [ "Bell-bottoms-1" ] ], [ [ "Donald Duck-1", "Donald Duck-4" ] ], [ "operation" ] ], [ [ [ "Bell-bottoms-1" ] ], [ [ "Donald Duck-1" ] ], [ "operation" ] ], [ [ [ "Bell-bottoms-1" ] ], [ [ "Donald Duck-1" ] ], [ "operation" ] ] ] }, { "qid": "0ea87e725225e7ce5c51", "term": "Christina Aguilera", "description": "American singer, songwriter, actress, and television personality", "question": "Was Christina Aguilera born in the forgotten borough?", "answer": true, "facts": [ "Christina Maria Aguilera was born on December 18, 1980, in Staten Island, New York.", "Staten Island has sometimes been called \"the forgotten borough\" by inhabitants who feel neglected by the city government." ], "decomposition": [ "Where was Christina Aguilera born?", "What place is known as the forgotten borough?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Christina Aguilera-4" ] ], [ [ "Staten Island-2" ] ], [ "operation" ] ], [ [ [ "Christina Aguilera-4" ] ], [ [ "Staten Island-2" ] ], [ "operation" ] ], [ [ [ "Christina Aguilera-2" ] ], [ [ "Staten Island-2" ] ], [ "operation" ] ] ] }, { "qid": "29cbf28f323d10a53c4d", "term": "Heart failure", "description": "condition in which the heart is unable to provide sufficient pump action", "question": "Would ramen be bad for someone with heart failure?", "answer": true, "facts": [ "People with heart failure have to limit their sodium intake.", "Ramen is notorious for having incredibly high sodium levels. " ], "decomposition": [ "What is the recommended maximum daily sodium allowance for someone with heart failure?", "How much sodium is in a bowl of ramen?", "Is #2 greater than #1 divided by three?" ], "evidence": [ [ [ [ "Reference Daily Intake-15" ], "no_evidence" ], [ [ "Ramen-11" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Reference Daily Intake-14" ], "no_evidence" ], [ [ "Instant noodle-23" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Sodium-35" ], "no_evidence" ], [ [ "Instant noodle-12" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "30a4396aa0743165d8d6", "term": "Ohio University", "description": "public university in Athens, Ohio, United States", "question": "Would the current president of Ohio University hypothetically wear a jockstrap?", "answer": true, "facts": [ "The current president of Ohio University is Duane Nellis.", "Duane Nellis is a man.", "A jockstrap is an undergarment for protecting the testes and penis during cycling, contact sports or other vigorous physical activity.", "The testes and penis are the sexual organs of men." ], "decomposition": [ "Which gender wears jockstrap?", "Who is the current President of Ohio University?", "Does #2 identify with the gender #1?" ], "evidence": [ [ [ [ "Jockstrap-1" ] ], [ [ "Duane Nellis-1" ] ], [ "operation" ] ], [ [ [ "Jockstrap-1" ] ], [ [ "Kristina M. Johnson-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Thong-27" ] ], [ [ "Duane Nellis-1" ] ], [ "operation" ] ] ] }, { "qid": "d912709b7341dd86ba39", "term": "Mile", "description": "Unit of length", "question": "Would an Olympic athlete be tired out after running a mile?", "answer": false, "facts": [ "The Olympic standard for men in running one mile is 4 minutes, 19 seconds. ", "The Olympic standard for women in running one mile is at least 4 minutes and 40 seconds. " ], "decomposition": [ "What is the Olympic standard time in running one mile for men?", "What is the Olympic standard time in running one mile for women?", "Is #1 or #2 a very long period of time?" ], "evidence": [ [ [ [ "Mile run-4" ], "no_evidence" ], [ [ "Mile run-4" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Mile run world record progression-1" ], "no_evidence" ], [ [ "Mile run world record progression-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Mile run-4" ], "no_evidence" ], [ [ "Mile run-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "deea92d9c38f95fef4a8", "term": "Swallow", "description": "family of birds", "question": "Can an ostrich fit into the nest of a swallow?", "answer": false, "facts": [ "Swallows weigh less than an ounce.", "An ostrich can weigh over 200 pounds." ], "decomposition": [ "How much does a swallow weigh?", "How much does an ostrich weigh?", "Is #2 within 20% of #1?" ], "evidence": [ [ [ [ "Swallow-7" ] ], [ [ "Common ostrich-5" ] ], [ "operation" ] ], [ [ [ "Swallow-7" ] ], [ [ "Common ostrich-5" ] ], [ "operation" ] ], [ [ [ "Swallow-7" ] ], [ [ "Common ostrich-5" ] ], [ "operation" ] ] ] }, { "qid": "d9a89b17f569834014a1", "term": "Daytona 500", "description": "Auto race held in Daytona, Florida, United States", "question": "Did Dale Jr hug his dad after their last Daytona 500 together?", "answer": false, "facts": [ "Dale Jr. and his father Dale Sr. last raced together at the Daytona 500 in 2001.", "During the 2001 Daytona 500 Dale Sr. suffered a basilar skull fracture and died. " ], "decomposition": [ "Which race did Dale Jr and his father participate in last together?", "Which notable incident took place during #1?", "Was Dale Jr.'s father well enough to hug his son after #2?" ], "evidence": [ [ [ [ "Dale Earnhardt Jr.-6" ] ], [ [ "Dale Earnhardt Jr.-6" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Dale Earnhardt-23" ] ], [ [ "Dale Earnhardt-23" ] ], [ "operation" ] ], [ [ [ "Dale Earnhardt Jr.-10" ] ], [ [ "Dale Earnhardt-23" ] ], [ "operation" ] ] ] }, { "qid": "d9b4f68792ef86249d18", "term": "Wednesday", "description": "Day of the week", "question": "Did Wednesday have something to do with Thor?", "answer": true, "facts": [ "Wednesday is the middle of the modern work week and comes from the name Wodan.", "The Germanic god Woden is also known as Wodanaz or Odin.", "Odin, in Norse mythology, was the father of Thor." ], "decomposition": [ "Which Germanic god is the name 'Wednesday' etymologically related to?", "Is #1 related to Thor?" ], "evidence": [ [ [ [ "Wednesday-5" ] ], [ [ "Odin-4" ] ] ], [ [ [ "Wednesday-1" ] ], [ [ "Thor-3" ], "operation" ] ], [ [ [ "Wednesday-5" ] ], [ [ "Odin-4" ] ] ] ] }, { "qid": "21ebbfdbb95747f3763f", "term": "Autumn", "description": "one of the Earth's four temperate seasons, occurring between summer and winter", "question": "Is Autumn a good time to collect bear pelts in US?", "answer": false, "facts": [ "Autumn runs from September to the end of December in the US.", "Bears go into hibernation from September through April and are scarcely seen." ], "decomposition": [ "What months does Autumn occur in the US?", "Where do bear pelts come from?", "What months can #2 be easily seen in the US?", "Do #1 and #3 overlap?" ], "evidence": [ [ [ [ "Autumn-3" ] ], [ [ "Bear hunting-17" ] ], [ [ "Brown bear-27" ] ], [ [ "Autumn-1", "Brown bear-27" ] ] ], [ [ [ "Autumn-3" ] ], [ [ "Bear hunting-23" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Autumn-1" ] ], [ [ "American black bear-1", "Grizzly bear-1" ] ], [ [ "Bear-39" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "a1dabe439511af470303", "term": "Lil Wayne", "description": "American rapper, record executive and actor from Louisiana", "question": "Will AC/DC album sales buy more B-52 bombers than Lil Wayne's?", "answer": true, "facts": [ "The B-52 bomber plane cost 60 million dollars in 2018.", "AC/DC has sold over 200 million albums.", "Lil Wayne has sold 120 million records worldwide." ], "decomposition": [ "How much does one B-52 bomber cost?", "How much is AC/DC worth due to the sales of their albums?", "Lil Wayne has made how much from his album sales?", "Is #2 more than both #1 and #3?" ], "evidence": [ [ [ [ "Boeing B-52 Stratofortress-6" ], "no_evidence" ], [ [ "AC/DC-5" ], "no_evidence" ], [ [ "Lil Wayne-4" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ [ "AC/DC-5" ], "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Bomber B-5" ], "no_evidence" ], [ [ "AC/DC-2" ], "no_evidence" ], [ [ "Lil Wayne-3" ], "operation" ], [ "no_evidence" ] ] ] }, { "qid": "1ab9f6651469645c0573", "term": "Mount Sharp", "description": "mountain on Mars", "question": "Do mountain goats inhabit the summit of Mount Sharp?", "answer": false, "facts": [ "Mountain goats are animals", "Animals require oxygen in order to live", "Mount Sharp is located on Mars", "The atmosphere of Mars contains only trace amounts of oxygen" ], "decomposition": [ "Where is Mount Sharp located?", "Does #1 have enough atmospheric oxygen to support the life of animals, such as mountain goats?" ], "evidence": [ [ [ [ "Mount Sharp-1" ] ], [ [ "Life on Mars-9" ] ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Mars-59" ], "operation" ] ], [ [ [ "Mount Sharp-1" ] ], [ [ "Life on Mars-1" ] ] ] ] }, { "qid": "e0bfafb25cde72664a3a", "term": "Larry King", "description": "American television and radio host", "question": "Could Larry King's marriages be counted on two feet?", "answer": true, "facts": [ "The typical person has 10 toes spread across their two feet.", "Larry King has been married 8 times.", "You can count each marriage on each toe." ], "decomposition": [ "How many times has Larry King been married?", "How many toes do most people have?", "Is #2 at least as much as #1?" ], "evidence": [ [ [ [ "Larry King-37" ] ], [ [ "Toe-2" ] ], [ "operation" ] ], [ [ [ "Larry King-43" ] ], [ [ "Toe-2" ] ], [ "operation" ] ], [ [ [ "Larry King-37" ] ], [ [ "Toe-2" ] ], [ "operation" ] ] ] }, { "qid": "41e98899dde8ec95e2d1", "term": "Gladiator", "description": "combatant who entertained audiences in the Roman Republic and Roman Empire", "question": "Were gladiators associated with the Coloseum?", "answer": true, "facts": [ "The Colosseum was a stadium in the ancient city of Rome, large by even today's standards.", "During the Roman era, it was primarily used to host large spectacles including gladiatorial combat, mock battles, and executions." ], "decomposition": [ "What kind of building was the Colosseum and which city did it exist in?", "In the era of #1, which kind of events were held in such buildings?", "Did any of #2 involve gladiators?" ], "evidence": [ [ [ [ "Colosseum-1" ] ], [ [ "Colosseum-2" ] ], [ [ "Colosseum-2" ], "operation" ] ], [ [ [ "Colosseum-1" ] ], [ [ "Colosseum-1" ] ], [ [ "Colosseum-2" ], "operation" ] ], [ [ [ "Colosseum-1" ] ], [ [ "Colosseum-2" ] ], [ [ "Inaugural games of the Flavian Amphitheatre-2" ], "operation" ] ] ] }, { "qid": "daf439d2f206dfc1c662", "term": "Pink (singer)", "description": "American singer, songwriter, and actress", "question": "Are there Pink music videos that are triggering for eating disorder patients?", "answer": true, "facts": [ "The video for 'Stupid Girls' features a scene where Pink and a woman share a toothbrush to induce vomiting in the bathroom.", "Images or discussion of purging activity can be triggering for people with Eating Disorders." ], "decomposition": [ "What are the depictions in Pink's music video 'Stupid Girls'?", "What are some situations that can be triggering for people with eating disorders?", "Are any of #2 included in #1?" ], "evidence": [ [ [ [ "Stupid Girls-12" ] ], [ [ "Eating disorder-7" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Stupid Girls-12", "Stupid Girls-13" ] ], [ [ "Eating disorder-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Stupid Girls-13" ] ], [ [ "Eating disorder-9" ] ], [ [ "Stupid Girls-13" ] ] ] ] }, { "qid": "4648058ffbc40905a042", "term": "Rupert Murdoch", "description": "Australian-born American media mogul", "question": "Does Rupert Murdoch's alma mater have more history than the USA?", "answer": true, "facts": [ "Rupert Murdoch's alma mater is Worcester College.", "Worcester College was founded in 1714.", "The first documented use of the term the United States of America was in a January 2, 1776 letter." ], "decomposition": [ "What is Rupert Murdoch's alma mater?", "When was #1 founded?", "When was the United States founded?", "Is #2 prior to #3?" ], "evidence": [ [ [ [ "Rupert Murdoch-8" ] ], [ [ "Worcester College, Oxford-1" ] ], [ [ "United States Declaration of Independence-1" ] ], [ "operation" ] ], [ [ [ "Rupert Murdoch-8" ] ], [ [ "Worcester College, Oxford-1" ] ], [ [ "United States Declaration of Independence-1" ] ], [ "operation" ] ], [ [ [ "Rupert Murdoch-8" ] ], [ [ "Worcester College, Oxford-1" ] ], [ [ "United States-27" ] ], [ "operation" ] ] ] }, { "qid": "27fa0790fcdc646e80cc", "term": "Ubuntu", "description": "Linux distribution based on Debian", "question": "Do the Ubuntu people speak Ubuntu?", "answer": false, "facts": [ "Ubuntu is a of free and open-source software used on computers.", "The Ubuntu people are a tribe that lives in Africa.", "The Ubuntu people derive their language from Nguni Bantu.", "As of 2019 only 10% of households in Africa have a computer." ], "decomposition": [ "What is Ubuntu?", "Can people speak #1?" ], "evidence": [ [ [ [ "Ubuntu-1" ] ], [ [ "Linux distribution-1", "Operating system-1", "Spoken language-1" ] ] ], [ [ [ "Ubuntu-1" ] ], [ [ "Programming language-1" ] ] ], [ [ [ "Ubuntu-1" ] ], [ [ "Ubuntu-1" ] ] ] ] }, { "qid": "0e2f272f72871428cc90", "term": "Space Race", "description": "Competition between the USSR and the USA to explore space", "question": "Did Al Unser Jr. win the Space Race?", "answer": false, "facts": [ "Al Unser Jr. is a race car driver", "The Space Race was the competition between the Soviet Union and United States over space exploration" ], "decomposition": [ "What two entities were part of the Space Race?", "Is Al Unser Jr. either of #1?" ], "evidence": [ [ [ [ "Space Race-1" ] ], [ [ "Al Unser Jr.-1" ], "operation" ] ], [ [ [ "Space Race-1" ] ], [ [ "Al Unser Jr.-1" ], "operation" ] ], [ [ [ "Space Race-1" ] ], [ "operation" ] ] ] }, { "qid": "b10fd3838186167614f2", "term": "Miami", "description": "City in Florida, United States", "question": "Can native wolverines be found in Miami?", "answer": false, "facts": [ "Wolverines are native to northern boreal forests", "Miami is not a northern boreal habitat" ], "decomposition": [ "What is the native range of wolverines?", "What state is Miami located in?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Wolverine-16" ] ], [ [ "Miami-1" ] ], [ "operation" ] ], [ [ [ "Wolverine-2" ] ], [ [ "Miami-1" ] ], [ "operation" ] ], [ [ [ "Wolverine-2" ] ], [ [ "Miami-1" ] ], [ "operation" ] ] ] }, { "qid": "10353348b0d7077924be", "term": "Paparazzi", "description": "profession", "question": "Were paparazzi involved in the death of a member of the royal family?", "answer": true, "facts": [ "Diana Spencer was being pursued by paparazzi when her vehicle was involved in a fatal accident.", "Diana Spencer was known as 'Princess Diana' and was the Princess of Wales." ], "decomposition": [ "What were the circumstances surrounding the death of Diana Spencer?", "Is Diana Spencer a member of the royal family?", "Was paparazzi involved in #1?", "Are #2 and #3 positive?" ], "evidence": [ [ [ [ "Diana, Princess of Wales-4" ] ], [ [ "Diana, Princess of Wales-3" ] ], [ [ "Death of Diana, Princess of Wales-2" ] ], [ "operation" ] ], [ [ [ "Death of Diana, Princess of Wales-1" ] ], [ [ "Diana, Princess of Wales-1", "Diana, Princess of Wales-26" ] ], [ [ "Death of Diana, Princess of Wales-2" ] ], [ "operation" ] ], [ [ [ "Diana, Princess of Wales-53" ] ], [ [ "Diana, Princess of Wales-1" ] ], [ [ "Diana, Princess of Wales-53" ] ], [ "operation" ] ] ] }, { "qid": "c426b03932f052ae91af", "term": "Moon Jae-in", "description": "President of South Korea", "question": "Was Moon Jae-in born outside of Khanbaliq?", "answer": true, "facts": [ "Khanbaliq was the winter capital of the Mongol Empire. ", "Khanbaliq was located at the center of what is now modern day Beijing, China.", "Moon Jae-In was born in Geoje, South Korea." ], "decomposition": [ "Where was Moon Jae-in born?", "What is the modern day location of Khanbaliq?", "Is #1 different from #2?" ], "evidence": [ [ [ [ "Moon Jae-in-5" ] ], [ [ "Khanbaliq-1" ] ], [ "operation" ] ], [ [ [ "Moon Jae-in-2" ] ], [ [ "Khanbaliq-1" ] ], [ "operation" ] ], [ [ [ "Moon Jae-in-5" ] ], [ [ "Khanbaliq-1" ] ], [ "operation" ] ] ] }, { "qid": "8a36239c2251abc536ce", "term": "Greyhound", "description": "Dog breed used in dog racing", "question": "Can a greyhound walk on two legs?", "answer": false, "facts": [ "Greyhounds are dogs.", "Dogs walk on four legs. " ], "decomposition": [ "What type of animal is a greyhound?", "Does #1 walk on two legs?" ], "evidence": [ [ [ [ "Greyhound-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Greyhound-1" ] ], [ "no_evidence" ] ], [ [ [ "Greyhound-1" ] ], [ [ "Bipedalism-22", "Quadrupedalism-1" ] ] ] ] }, { "qid": "1127f625b634b9371ee1", "term": "Sloth", "description": "tree dwelling animal noted for slowness", "question": "Do moths that live on sloths have family dinners?", "answer": false, "facts": [ "Algae grows on sloths", "Sloth moths feed on algae that grows on sloths", "Sloth moth caterpillars feed on sloth dung ", "Sloths defecate far from their ususl abode" ], "decomposition": [ "What do sloth moths enjoy eating from the body of sloths?", "Where is #1 found on the sloth?", "What do baby or caterpillar sloth moths enjoy eating from the body of sloths?", "Where is #3 found relative to the sloth?", "Is #2 found in the same location as #4?" ], "evidence": [ [ [ [ "Sloth moth-2" ], "no_evidence" ], [ [ "Sloth moth-3" ], "no_evidence" ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Sloth moth-4" ] ], [ [ "Sloth moth-1" ] ], [ [ "Sloth moth-2" ] ], [ [ "Arthropods associated with sloths-12" ] ], [ "operation" ] ], [ [ [ "Sloth moth-1", "Sloth moth-4" ] ], [ [ "Sloth-2" ] ], [ [ "Sloth moth-2" ], "no_evidence" ], [ "no_evidence", "operation" ], [ "operation" ] ] ] }, { "qid": "90538ec7aa1100a34829", "term": "New Year's Day", "description": "Holiday", "question": "Do Jehovah's Witnesses celebrate day before New Year's Day?", "answer": false, "facts": [ "The Day before New Year's Day is New Year's Eve.", "Jehovah's Witnesses do not celebrate holidays, citing in many cases that they have pagan origins.", "New Year's has origins in pagan Babylonia." ], "decomposition": [ "Which holidays do Jehovah's Witnesses refrain from celebrating or participating in?", "What is the day before New Year's Day known as?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Jehovah's Witnesses practices-42", "Jehovah's Witnesses-3" ] ], [ [ "New Year's Eve-1" ] ], [ "operation" ] ], [ [ [ "Jehovah's Witnesses-3" ] ], [ [ "New Year's Eve-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Jehovah's Witnesses-41" ] ], [ [ "New Year's Eve-41" ] ], [ [ "Jehovah's Witnesses-41", "New Year's Eve-41" ], "operation" ] ] ] }, { "qid": "acbc6f68c37f4f9dc601", "term": "Caracal", "description": "Small wild cat", "question": "Would a caracal be defeated by Javier Sotomayor in a high jump competition?", "answer": false, "facts": [ "The caracal can leap higher than 12 feet in the air.", "Javier Sotomayor is the current men's high jump record holder with a jump of 2.45 m (8 ft 1⁄4 in)." ], "decomposition": [ "How high was Javier Sotomayor's highest jump?", "How high are caracals known to jump?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Javier Sotomayor-11" ] ], [ [ "Caracal-24" ] ], [ "operation" ] ], [ [ [ "Javier Sotomayor-1" ] ], [ [ "Caracal-2" ] ], [ "operation" ] ], [ [ [ "Javier Sotomayor-1" ] ], [ [ "Caracal-2" ] ], [ "operation" ] ] ] }, { "qid": "73520a07f183b7111236", "term": "Karaoke", "description": "form of entertainment involving singing to recorded music", "question": "Were karaoke and the turtle power tiller patented in the same country?", "answer": true, "facts": [ "Roberto L. del Rosario holds the patent for the karaoke system", "del Rosario is Filipino", "Magdalena Smith Villaruz patented the turtle power tiller", "Villaruz is Filipino " ], "decomposition": [ "Who is the patent holder of the karaoke system?", "Which country is #1 from?", "Who patented the turtle power tiller?", "Which country is #3 from?", "Is #2 the same as #4?" ], "evidence": [ [ [ [ "Karaoke-8" ] ], [ [ "Roberto del Rosario-1" ] ], [ [ "Magdalena Villaruz-2" ] ], [ [ "Magdalena Villaruz-1" ] ], [ "operation" ] ], [ [ [ "Karaoke-5" ], "no_evidence" ], [ [ "Daisuke Inoue-3" ] ], [ [ "Magdalena Villaruz-1" ] ], [ [ "Magdalena Villaruz-1" ] ], [ "operation" ] ], [ [ [ "Roberto del Rosario-2" ] ], [ [ "Philippines-1", "Roberto del Rosario-1" ] ], [ [ "Magdalena Villaruz-1" ] ], [ [ "Magdalena Villaruz-1" ] ], [ "operation" ] ] ] }, { "qid": "5951a77c6a4a0bbc7718", "term": "Guru", "description": "A \"teacher, guide, expert, or master\" in Sanskrit", "question": "Is Kim Kardashian a guru?", "answer": false, "facts": [ "A guru is a teacher or guide, particularly a spiritual one.", "Kim Kardashian is a socialite and a model." ], "decomposition": [ "What makes someone a guru?", "Does Kim Kardashian satisfy all the conditions of #1?" ], "evidence": [ [ [ [ "Guru-1" ] ], [ [ "Kim Kardashian-1", "Kim Kardashian-14", "Kim Kardashian-4" ] ] ], [ [ [ "Guru-1" ] ], [ "operation" ] ], [ [ [ "Guru-1" ] ], [ [ "Kim Kardashian-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "f58d326656e75b07d288", "term": "Conducting", "description": "Directing a musical performance by way of visible gestures", "question": "Do solo pianists require a conductor?", "answer": false, "facts": [ "Conductors direct a group of musicians by ensuring they all keep the same beat and place in the music.", "A solo pianist performing alone can keep their own pace." ], "decomposition": [ "What is the minimum number of music performers that would need a conductor?", "Is #1 less than or equal to the number in a solo performance?" ], "evidence": [ [ [ [ "Conducting-32" ] ], [ [ "Conducting-32" ] ] ], [ [ [ "Conducting-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Conducting-1" ], "no_evidence" ], [ [ "Solo performance-1" ], "operation" ] ] ] }, { "qid": "8f936e225ee1fe225f66", "term": "Shaggy (musician)", "description": "Reggae singer and former U.S. Marine", "question": "Would Shaggy and Redenbacher popcorn founder both raise hand during first name roll call?", "answer": true, "facts": [ "Roll call is when teachers call the names of students and they raise their hand to show they are present.", "The founder of Redenbacher popcorn was Orville Redenbacher.", "Reggae musician Shaggy was born Orville Richard Burrell." ], "decomposition": [ "What is the first name of the person who founded Redenbacher popcorn?", "What is the first name of Reggae musician Shaggy?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Orville Redenbacher-1" ] ], [ [ "Shaggy (musician)-1" ] ], [ "operation" ] ], [ [ [ "Orville Redenbacher-1" ] ], [ [ "Shaggy (musician)-1" ] ], [ "operation" ] ], [ [ [ "Orville Redenbacher-1" ] ], [ [ "Shaggy (musician)-1" ] ], [ "operation" ] ] ] }, { "qid": "e4a0eeb26daef5087fe9", "term": "Moustache", "description": "Facial hair grown on the upper lip", "question": "Has a baby ever had a moustache?", "answer": true, "facts": [ "When babies are in the womb, many have hair known as lanugo.", "Lanugo is unpigmented, downy hair that is sometimes found on the body of fetuses and babies.", "Lanugo can grow anywhere on a baby's body but is usually shed before the baby is born and is reabsorbed by the mother." ], "decomposition": [ "What kind of hair does a baby have when it is in the womb?", "Can #1 grow anywhere on the body?" ], "evidence": [ [ [ [ "Lanugo-1" ] ], [ [ "Lanugo-1" ] ] ], [ [ [ "Lanugo-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Lanugo-1" ] ], [ [ "Lanugo-1" ], "no_evidence" ] ] ] }, { "qid": "5055ac2e84f806669a3f", "term": "Metallica", "description": "American heavy metal band", "question": "Did the original lead guitarist of Metallica fail after parting from the band?", "answer": false, "facts": [ "Metallica's original lead guitarist was Dave Mustaine.", "Dave Mustaine was fired from Metallica in 1983.", "Dave Mustaine formed the band Megadeth in 1983 and is the lead vocalist.", "Megadeth has sold over 38 million records worldwide." ], "decomposition": [ "Who was the original lead guitarist of Metallica?", "What band did #1 start after leaving Metallica?", "Is #2 an unsuccessful band?" ], "evidence": [ [ [ [ "Metallica-6" ] ], [ [ "Dave Mustaine-1" ] ], [ [ "Megadeth-4" ], "operation" ] ], [ [ [ "Metallica-1" ] ], [ [ "Megadeth-1" ] ], [ [ "Megadeth-88" ], "operation" ] ], [ [ [ "Dave Mustaine-1" ] ], [ [ "Dave Mustaine-1" ] ], [ [ "Megadeth-4" ] ] ] ] }, { "qid": "ea9b3204830068115fcb", "term": "John Muir", "description": "Scottish-born American naturalist and author", "question": "Would John Muir not likely have a vitamin D deficiency?", "answer": true, "facts": [ "John Muir frequently spent time exploring various places in nature.", "Spending time in nature increases your exposure to sunlight.", "Skin exposure to sunlight increases vitamin D levels in the body." ], "decomposition": [ "What is the most common cause of vitamin D deficiency?", "What was the nature of John Muir's life's work?", "Does #2 ensure that he does not experience #1?" ], "evidence": [ [ [ [ "Vitamin D-17" ] ], [ [ "John Muir-2" ] ], [ [ "John Muir-2" ], "operation" ] ], [ [ [ "Vitamin D-13" ] ], [ [ "John Muir-52" ] ], [ "operation" ] ], [ [ [ "Vitamin D deficiency-1" ] ], [ [ "John Muir-1" ] ], [ "operation" ] ] ] }, { "qid": "5a2e62e3c0ded7a4bddd", "term": "Monogamy", "description": "Relationship form where each individual has only one partner during their lifetime or at any one time", "question": "Did Thomas Greenhill's parents violate the concept of monogamy?", "answer": false, "facts": [ "Thomas Greenhill was a surgeon born to William and Elizabeth Greenhill.", "William and Elizabeth Greenhill had 39 children.", "Monogamy is a committed relationship between two people where usually they remain together for life.", "Thomas Greenhill was the last of his parents 39 children and was born shortly after his father died." ], "decomposition": [ "Who was Thomas Greenhill's father?", "How many wives did #1 marry in his lifetime?", "Is #2 greater than one?" ], "evidence": [ [ [ [ "Thomas Greenhill (surgeon)-4" ] ], [ [ "Thomas Greenhill (surgeon)-5" ] ], [ "operation" ] ], [ [ [ "Thomas Greenhill (surgeon)-4" ] ], [ [ "Thomas Greenhill (surgeon)-7" ] ], [ "operation" ] ], [ [ [ "Thomas Greenhill (surgeon)-1" ] ], [ [ "William Greenhill-5" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "06361fd92f628fe402e6", "term": "World of Warcraft", "description": "video game by Blizzard Entertainment", "question": "Is World of Warcraft heavier than a loaf of bread?", "answer": false, "facts": [ "World of Warcraft is a piece of software.", "Software is digital.", "Digital items do not have weight. " ], "decomposition": [ "What does World of Warcraft refer to?", "Is #1 a tangible item that has weight?" ], "evidence": [ [ [ [ "World of Warcraft-2" ] ], [ "no_evidence", "operation" ] ], [ [ [ "World of Warcraft-2" ] ], [ [ "Software-1" ] ] ], [ [ [ "World of Warcraft-1" ] ], [ "operation" ] ] ] }, { "qid": "0c90fc4f1a55004ea832", "term": "Bull shark", "description": "Species of fish", "question": "Is the bull shark more bull than shark?", "answer": false, "facts": [ "The bull shark is a fish species that lives in warm shallow waters along coasts and rivers.", "Bull sharks feed on bony fish and other smaller sharks.", "A bull is an adult male mammal that lives on land.", "Bulls feed on plants located on land." ], "decomposition": [ "What is the main diet of bulls and where do they find their food?", "What is the main diet of sharks and where do they find their food?", "What is the main diet of bull sharks and where do they find their food?", "Is #3 more similar to #1 than to #2?" ], "evidence": [ [ [ [ "Cattle feeding-1" ], "no_evidence" ], [ [ "Fish jaw-31" ], "no_evidence" ], [ [ "Bull shark-23" ] ], [ "operation" ] ], [ [ [ "Cattle-43" ] ], [ [ "Shark-59" ] ], [ [ "Bull shark-21" ] ], [ "operation" ] ], [ [ [ "Bull-1", "Cattle-19" ] ], [ [ "Shark-59", "Shark-60", "Shark-62" ] ], [ [ "Bull shark-21" ] ], [ "operation" ] ] ] }, { "qid": "56b31f6fe1e5163d2382", "term": "Liberty Bell", "description": "bell that serves as a symbol of American independence and liberty", "question": "Will a Holstein cow and the Liberty Bell balance out a giant scale?", "answer": false, "facts": [ "The Liberty Bell weighs 2,080 pounds.", "A mature Holstein cow weighs around 1,500 pounds." ], "decomposition": [ "What is the average weight of a mature Holstein cow?", "What is the weight of the Liberty Bell?", "Is #1 closely the same as #2?" ], "evidence": [ [ [ [ "Holstein Friesian cattle-7" ] ], [ [ "Liberty Bell-27" ] ], [ "operation" ] ], [ [ [ "Holstein Friesian cattle-7" ] ], [ [ "Liberty Bell-27" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Holstein Friesian cattle-7" ] ], [ [ "Liberty Bell-36" ] ], [ "operation" ] ] ] }, { "qid": "3b62e4e18ce9ec7015c0", "term": "Citrus", "description": "genus of fruit-bearing plants (source of fruit such as lemons and oranges)", "question": "Can citrus grow in Ulaanbaatar?", "answer": false, "facts": [ "Citrus can withstand short periods down to as cold as −10 °C (14 °F), but realistically temperatures not falling below −2 °C (28 °F) are required for successful cultivation.", "Ulaanbaatar has an average annual temperature of −0.4 °C or 31.3 °F." ], "decomposition": [ "What climates are suitable for growing citrus?", "What is the climate of Ulaanbaatar?", "Is #2 similar to #1?" ], "evidence": [ [ [ [ "Citrus-34" ] ], [ [ "Ulaanbaatar-39" ] ], [ [ "Citrus-34" ] ] ], [ [ [ "Citrus-26", "Citrus-31" ] ], [ [ "Ulaanbaatar-39" ] ], [ "operation" ] ], [ [ [ "Citrus-31" ] ], [ [ "Ulaanbaatar-40" ] ], [ "operation" ] ] ] }, { "qid": "50b58835d8ab6da72c32", "term": "Snow leopard", "description": "species of mammal", "question": "Can a snow leopard swim?", "answer": true, "facts": [ "except for giraffes and apes, all four legged mammals can swim", "a snow leopard is a mammal", "snow leopards have four legs" ], "decomposition": [ "Is a snow leopard a four legged mammal?" ], "evidence": [ [ [ [ "Cat-1", "Snow leopard-1" ], "no_evidence" ] ], [ [ "no_evidence", "operation" ] ], [ [ [ "Felidae-1", "Quadrupedalism-1", "Snow leopard-1" ] ] ] ] }, { "qid": "1c7e71e10fc88c8c8f2c", "term": "LinkedIn", "description": "Social networking website for people in professional occupations", "question": "Are LinkedIn and LeafedIn related companies?", "answer": false, "facts": [ "LinkedIn successfully sued LeafedIn for their choice of name.", "LeafedIn changed their company name to LeafedOut" ], "decomposition": [ "Who owns LinkedIn?", "Who owns LeafedIn?", "IS #1 the same as #2?" ], "evidence": [ [ [ [ "LinkedIn-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "LinkedIn-10" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "LinkedIn-1" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7bee5f4847890e925f3e", "term": "Dance", "description": "A performing art consisting of movement of the body", "question": "Is waltz less injurious than slam dance?", "answer": true, "facts": [ "The waltz is a rhythmic dance performed in triple time by a couple.", "A slam dance is a type of dance in which leaping dancers collide against each other." ], "decomposition": [ "What kinds of body movements are involved in waltz?", "What kinds of body movements are involved in slam dance?", "Is #1 less likely to cause injuries than #2?" ], "evidence": [ [ [ [ "Waltz-2" ], "no_evidence" ], [ [ "Moshing-1" ] ], [ [ "Moshing-4" ], "operation" ] ], [ [ [ "Waltz-5" ] ], [ [ "Moshing-1" ] ], [ [ "Moshing-4" ], "operation" ] ], [ [ [ "Ballroom dance-28", "Waltz-1" ] ], [ [ "Moshing-1" ] ], [ [ "Moshing-4" ], "operation" ] ] ] }, { "qid": "43f5550e1447823f8290", "term": "Squid", "description": "order of molluscs", "question": "Is one blast from double-barreled shotgun likely to kill all squid brains?", "answer": false, "facts": [ "A double-barreled shotgun fires two rounds in one single blast.", "Squids have three brains." ], "decomposition": [ "How many rounds are fired in one blast from a double-barreled shotgun?", "How many brains do squid have?", "Is #1 greater than or equal to #2?" ], "evidence": [ [ [ [ "Double-barreled shotgun-1" ] ], [ [ "Squid-20" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Double-barreled shotgun-7" ], "no_evidence" ], [ [ "Brain-15" ] ], [ "operation" ] ], [ [ [ "Double-barreled shotgun-1" ] ], [ [ "Squid-20" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "6dbb550c50c49264c8c3", "term": "Viscosity", "description": "Resistance of a fluid to shear deformation", "question": "Is viscosity unimportant in making jello shots?", "answer": false, "facts": [ "Jello shots are a combination of alcohol and jello to create an edible intoxicant. ", "If the liquid for the Jello shots has too low a viscosity, it will not become a semi-solid. " ], "decomposition": [ "What are the ingredients used in making jello shots?", "Which properties of liquids among #1 are important for good results?", "Is viscosity not included in #2?" ], "evidence": [ [ [ [ "Jell-O-30" ] ], [ [ "Mixed drink-1" ], "no_evidence" ], [ [ "Viscosity-1" ], "operation" ] ], [ [ [ "Jell-O-30" ] ], [ [ "Jell-O-30" ], "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Jell-O-30" ] ], [ [ "Jell-O-29" ] ], [ "operation" ] ] ] }, { "qid": "420837c63ba49f76d2a7", "term": "Earth Day", "description": "Annual event on 22 April", "question": "Is Earth Day celebrated in summer?", "answer": false, "facts": [ "Earth Day is celebrated on April 22.", "Summer runs from about June 20 to September 20." ], "decomposition": [ "What is summer?", "What is the date of Earth day?", "Is #2 in #1?" ], "evidence": [ [ [ [ "Summer-2" ] ], [ [ "Earth Day-30" ] ], [ "operation" ] ], [ [ [ "Summer-5" ] ], [ [ "Earth Day-1" ] ], [ "operation" ] ], [ [ [ "Summer-2" ] ], [ [ "Earth Day-1" ] ], [ "operation" ] ] ] }, { "qid": "f6b966be1495ead0fe7f", "term": "Metroid", "description": "Video game series", "question": "Did Electronic Arts profit from Metroid sales?", "answer": false, "facts": [ "Metroid was created and published by Nintendo.", "Electronic Arts is a video game company that is a competitor to Nintendo.", "Companies cannot profit of the work owned by another company typically.", "Companies do not typically share profits with their competitors." ], "decomposition": [ "What company created and published Metroid?", "What is the relationship between #1 and Electronic Arts?", "Do two entities engaged in #2 directly benefit each other?" ], "evidence": [ [ [ [ "Metroid-10" ] ], [ [ "Electronic Arts-15" ] ], [ "operation" ] ], [ [ [ "Metroid-1" ] ], [ [ "Electronic Arts-15" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Metroid (video game)-8" ] ], [ [ "Electronic Arts-15" ] ], [ "operation" ] ] ] }, { "qid": "5ebb9c0cde0231021409", "term": "Smooth jazz", "description": "category of music", "question": "Are you likely to hear Rammstein playing in smooth jazz clubs?", "answer": false, "facts": [ "Smooth jazz is a combination of jazz with easy-listening pop music and lightweight R&B.", "Smooth jazz began in the United States in the 1970s.", "Rammstein is a German band that plays heavy metal music." ], "decomposition": [ "What kinds of music is played at a smooth jazz club?", "What kinds of music does Rammstein play?", "Is there an overlap between #1 and #2?" ], "evidence": [ [ [ [ "Jazz club-2" ] ], [ [ "Rammstein-55" ] ], [ "operation" ] ], [ [ [ "Smooth jazz-2" ] ], [ [ "Rammstein-45" ] ], [ "operation" ] ], [ [ [ "Jazz club-2" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "6e16eb72a2aceb3dc9cf", "term": "New Testament", "description": "Second division of the Christian biblical canon", "question": "Would a kindergarten teacher make a lesson of the New Testament?", "answer": false, "facts": [ "The majority of Kindergarten teachers work in public schools.", "Public schools abide by a separation of church and state, and do not have any overall religion.", "Students of all religions are welcome to attend public school." ], "decomposition": [ "Where do the majority of kindergarten teachers work?", "Do students in #1 follow a paritcular religion?" ], "evidence": [ [ [ [ "Kindergarten-1" ] ], [ [ "Kindergarten Playway-3" ] ] ], [ [ [ "Kindergarten-89" ] ], [ [ "Education in the United States-2" ], "no_evidence" ] ], [ [ [ "Education in the United States-47" ] ], [ [ "School prayer in the United States-1" ], "operation" ] ] ] }, { "qid": "268310929871f5913f9f", "term": "Moon Jae-in", "description": "President of South Korea", "question": "Did Moon Jae-in earn the Abitur as a teenager?", "answer": false, "facts": [ "Moon Jae-in attended high school in South Korea.", "The Abitur is a qualification granted by university-preparatory schools in Germany, Lithuania, and Estonia." ], "decomposition": [ "Which countries' schools award Abitur to their students?", "Which country did Moon Jae-in school in as a teenager?", "Is #2 included in #1?" ], "evidence": [ [ [ [ "Abitur-1" ] ], [ [ "Moon Jae-in-7" ] ], [ "operation" ] ], [ [ [ "Abitur-1" ] ], [ [ "Moon Jae-in-7" ] ], [ "operation" ] ], [ [ [ "Abitur-1" ] ], [ [ "Kyungnam High School-2", "Moon Jae-in-7" ] ], [ "operation" ] ] ] }, { "qid": "8c36b27e75cee7f0ec0c", "term": "French toast", "description": "bread soaked in beaten eggs and then fried", "question": "Can French Toast hypothetically kill a Lannister?", "answer": true, "facts": [ "The Lannister's are a wealthy family in the Song of Ice and Fire book series.", "French Toast is made from bread dipped in an egg batter.", "Salmonella is a deadly bacteria that can be carried by spoiled eggs." ], "decomposition": [ "What are the ingredients in French toast?", "Which things in #1 can spoil?", "What diseases can be carried by spoiled #2?", "What species are susceptible to #3?", "Are the Lannisters members of a species listed in #4?" ], "evidence": [ [ [ [ "French toast-1" ] ], [ [ "Egg as food-35" ] ], [ [ "Salmonella enterica-3" ] ], [ [ "Salmonella-18" ] ], [ [ "World of A Song of Ice and Fire-44" ], "operation" ] ], [ [ [ "French toast-1" ] ], [ [ "French toast-1" ] ], [ [ "Dairy-58" ], "no_evidence" ], [ [ "Raw milk-14" ], "no_evidence" ], [ [ "World of A Song of Ice and Fire-44" ], "no_evidence", "operation" ] ], [ [ [ "French toast-7" ] ], [ [ "Milk-118" ] ], [ [ "Foodborne illness-6" ] ], [ [ "Foodborne illness-31" ] ], [ [ "Game of Thrones-10" ], "operation" ] ] ] }, { "qid": "59adb0e59a4f961f1c6f", "term": "Agnosticism", "description": "view that the existence of any deity is unknown or unknowable", "question": "Can a believer in agnosticism become pope?", "answer": false, "facts": [ "The pope is the head of the Catholic Church.", "The pope is required to be a devout follower of Christ.", "Popes preach about the teachings of Christ and the belief in one god.", "Agnostics do not acknowledge the existence of god and instead state that no one knows if there is a god or not." ], "decomposition": [ "What do agnostics believe about the existence of God?", "Which religious group does a pope head?", "What are the beliefs held by #2 concerning God's existence?", "Is #1 in agreement with #3?" ], "evidence": [ [ [ [ "Agnosticism-1" ] ], [ [ "Pontifex maximus-41" ] ], [ [ "Catholic Church-3" ] ], [ "operation" ] ], [ [ [ "Agnosticism-1" ] ], [ [ "Pope-1" ] ], [ [ "Catholic Church-38" ] ], [ "operation" ] ], [ [ [ "Agnosticism-1" ] ], [ [ "Pope-37" ] ], [ [ "Pope-74" ] ], [ "operation" ] ] ] }, { "qid": "6b7feff09dde2f64fdd5", "term": "Ronda Rousey", "description": "American professional wrestler, actress, author, mixed martial artist and judoka", "question": "Will Ronda Rousey hypothetically defeat X-Men's Colossus in a fight?", "answer": false, "facts": [ "Ronda Rousey is a mixed martial artist and wrestler.", "Ronda Rousey relies on striking moves and submission tactics to dominate her opponents.", "X-Men's Colossus has the ability to change his appearance.", "Colossus's mutation allows him to create an organic steel layer, that acts as an impenetrable external shell." ], "decomposition": [ "What type of profession is Ronda Rousey in?", "What moves do #1 use to beat their opponents?", "What special ability does X-men have?", "Can someone with #2 easily beat someone with #3?" ], "evidence": [ [ [ [ "Ronda Rousey-1" ] ], [ [ "Ronda Rousey-43" ] ], [ [ "X-Men-2" ] ], [ "no_evidence" ] ], [ [ [ "Ronda Rousey-1" ] ], [ [ "Professional wrestling-1" ] ], [ [ "Colossus (comics)-55" ] ], [ "operation" ] ], [ [ [ "Ronda Rousey-1" ] ], [ [ "Grappling position-5" ], "no_evidence" ], [ [ "Colossus (comics)-2" ] ], [ "operation" ] ] ] }, { "qid": "910b0e2092a5074851b7", "term": "Brewing", "description": "production of beer", "question": "Can brewing occur in a prison environment?", "answer": true, "facts": [ "Pruno is a product made almost exclusively in prisons.", "Pruno is a fermented beverage that is made of fruit to produce alcohol." ], "decomposition": [ "What is Pruno?", "Where is #1 made?", "Is #2 same as prison?" ], "evidence": [ [ [ [ "Pruno-1" ] ], [ [ "Pruno-1" ] ], [ "operation" ] ], [ [ [ "Pruno-1" ] ], [ [ "Pruno-1" ] ], [ [ "Pruno-1" ] ] ], [ [ [ "Pruno-1" ] ], [ [ "Pruno-1" ] ], [ "operation" ] ] ] }, { "qid": "1d4a642a03f37de1b5d0", "term": "Wheelchair", "description": "chair with wheels, used by people for whom walking is difficult or impossible due to illness, injury, or disability", "question": "Do American wheelchair users know what the ADA is?", "answer": true, "facts": [ "The ADA is the Americans with Disabilities Act.", "Non-ADA compliant businesses include those without wheelchair access points." ], "decomposition": [ "Which areas of interest are affected by the ADA?", "Is any of #1 of particular interest to wheelchair users in America?" ], "evidence": [ [ [ [ "Americans with Disabilities Act of 1990-1" ] ], [ [ "Disability-4" ], "operation" ] ], [ [ [ "American Association of People with Disabilities-1" ] ], [ [ "American Association of People with Disabilities-1" ] ] ], [ [ [ "Americans with Disabilities Act of 1990-6" ] ], [ [ "Americans with Disabilities Act of 1990-6" ] ] ] ] }, { "qid": "b18b7cbde476888d0059", "term": "Family of Barack Obama", "description": "List of members of the family of Barack Obama", "question": "Does Lupita Nyongo have citizenship in paternal Family of Barack Obama's origin country?", "answer": true, "facts": [ "Actress Lupita Nyongo has dual citizenship in Kenya and Mexico.", "Barack Obama's father was born in Nyang’oma Kogelo, Rachuonyo District, Kenya Colony.", "Barack Obama's father was a Kenyan of the Luo tribe." ], "decomposition": [ "In what nations does Lupita Nyongo have citizenship?", "Who is Barack Obama's father?", "In what nations does #2 have citizenship?", "Is at least one country in #1 also found in #3?" ], "evidence": [ [ [ [ "Lupita Nyong'o-7" ] ], [ [ "Barack Obama-6" ] ], [ [ "Barack Obama-6" ] ], [ "operation" ] ], [ [ [ "Lupita Nyong'o-7" ] ], [ [ "Barack Obama Sr.-1" ] ], [ [ "Barack Obama-6" ] ], [ "operation" ] ], [ [ [ "Lupita Nyong'o-7" ] ], [ [ "Barack Obama Sr.-1" ] ], [ [ "Barack Obama Sr.-3" ] ], [ "operation" ] ] ] }, { "qid": "85fc2b42b1fb20762db4", "term": "Amy Winehouse", "description": "English singer and songwriter", "question": "Did Amy Winehouse always perform live perfectly?", "answer": false, "facts": [ "Amy Winehouse was known for getting intoxicated before an during sets.", "Amy Winehouse forgot the lyrics to her songs during her last performance." ], "decomposition": [ "Is Amy Winehouse known for always being sober and coordinated on stage?" ], "evidence": [ [ [ [ "Amy Winehouse-16" ] ] ], [ [ [ "Amy Winehouse-16" ] ] ], [ [ [ "Amy Winehouse-16" ] ] ] ] }, { "qid": "2c0ad6834e39382c2e4f", "term": "Equator", "description": "Intersection of a sphere's surface with the plane perpendicular to the sphere's axis of rotation and midway between the poles", "question": "Is most coffee produced South of the Equator?", "answer": true, "facts": [ "The countries with the highest coffee production are in South America.", "Almost all of South America is in the Southern Hemisphere." ], "decomposition": [ "Which countries produce the most coffee?", "Which hemisphere are most of #1 located?", "Is #2 south of the equator?" ], "evidence": [ [ [ [ "Coffee-45" ], "no_evidence" ], [ "no_evidence" ], [ [ "Brazil-43" ], "no_evidence", "operation" ] ], [ [ [ "Coffee-5" ], "no_evidence" ], [ [ "Southern Hemisphere-9" ] ], [ [ "Southern Hemisphere-1" ], "operation" ] ], [ [ [ "Coffee-5" ] ], [ [ "Southern Hemisphere-9" ] ], [ [ "Southern Hemisphere-9" ] ] ] ] }, { "qid": "69e6a75f195a68d1bf7e", "term": "New Mexico", "description": "U.S. state in the United States", "question": "Is the largest city in New Mexico also known as Yootó?", "answer": false, "facts": [ "Yootó stands for Bead Water Place.", "The area Santa Fe occupied was known by the Navajo people as Yootó.", "The largest city in New Mexico is Albuquerque." ], "decomposition": [ "What is the largest city in New Mexico?", "Is #1 known as Yootó?" ], "evidence": [ [ [ [ "Albuquerque, New Mexico-1" ] ], [ "operation" ] ], [ [ [ "Albuquerque, New Mexico-1" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Santa Fe, New Mexico-1" ] ] ] ] }, { "qid": "c681171e816f3117df0b", "term": "Tongue", "description": "mouth organ that tastes and facilitates speech", "question": "Is the tongue part of a creature's head?", "answer": true, "facts": [ "A creature's tongue is inside its mouth.", "A creature's mouth is part of its head." ], "decomposition": [ "In what body part is the tongue located?", "Is #1 located in the head?" ], "evidence": [ [ [ [ "Tongue-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Tongue-5" ] ], [ [ "Tongue-5" ] ] ], [ [ [ "Tongue-1" ] ], [ [ "Head-1" ], "operation" ] ] ] }, { "qid": "307c3ae5cff437eee937", "term": "Rush Limbaugh", "description": "American radio talk show host, commentator, author, and television personality", "question": "Does Coast to Coast AM have more longevity than the Rush Limbaugh show?", "answer": true, "facts": [ "As of 2020, The Rush Limbaugh Show has been on the airwaves since 1988.", "As of 2020, Coast to Coast AM has been on the airwaves since 1984." ], "decomposition": [ "When did the Rush Limbaugh show first air?", "When did Coast to Coast AM first air?", "Is #2 before #1?" ], "evidence": [ [ [ [ "The Rush Limbaugh Show-1" ] ], [ [ "Coast to Coast AM-2" ] ], [ "operation" ] ], [ [ [ "The Rush Limbaugh Show-1" ] ], [ [ "Coast to Coast AM-2" ] ], [ "operation" ] ], [ [ [ "Rush Limbaugh-1" ] ], [ [ "Coast to Coast AM-2" ] ], [ "operation" ] ] ] }, { "qid": "f39895e719acd479e7ba", "term": "E.T. the Extra-Terrestrial", "description": "1982 American science fiction film directed by Steven Spielberg", "question": "Is the E.T. the Extra-Terrestrial Atari Landfill story an urban legend?", "answer": true, "facts": [ "An urban legend is a humorous or horrifying story based on hearsay that is circulated as true.", "E.T. the Extra Terrestrial was panned as one of the worst video games ever made.", "A widespread story stated that thousands of copies of E.T. the Extra Terrestrial video game were buried in a landfill", "A former Atari manager stated that 728,000 Atari games were in fact buried in a landfill.", "The Atari landfill was dug up and nearly 900 games were recovered, but there was only one copy of E.T. included." ], "decomposition": [ "Was what the widespread landfill rumor concerning copies of E.T. the Extra Terrestial video game?", "When the landfill was dug up, were the claims in #1 found to be false?", "Considering #2, does the rumor fit the description of an urban legend?" ], "evidence": [ [ [ [ "Atari video game burial-2" ] ], [ [ "Atari video game burial-17" ] ], [ [ "Legend-15" ] ] ], [ [ [ "Atari video game burial-1" ] ], [ [ "Atari video game burial-18" ] ], [ "operation" ] ], [ [ [ "E.T. the Extra-Terrestrial (video game)-3" ] ], [ "operation" ], [ [ "Urban legend-1" ], "operation" ] ] ] }, { "qid": "4ce7336b43da9f13fbd1", "term": "Argon", "description": "Chemical element with atomic number 18", "question": "Can you chew argon?", "answer": false, "facts": [ "Chewing is the act of breaking down solid objects with your teeth", "Under normal conditions, argon exists as a gas" ], "decomposition": [ "What kind of substance is argon?", "Do humans usually chew #1?" ], "evidence": [ [ [ [ "Argon-1" ] ], [ [ "Chewing-1" ], "operation" ] ], [ [ [ "Argon-1" ] ], [ [ "Chewing-1" ] ] ], [ [ [ "Argon-1" ] ], [ "operation" ] ] ] }, { "qid": "78f2f99d04b9acd8bada", "term": "Alice's Adventures in Wonderland", "description": "book by Lewis Carroll", "question": "Would a Jehovah's witness approve of Alice's Adventures in Wonderland?", "answer": false, "facts": [ "Jehovah's Witness is a religious group that strictly forbids tobacco and smoking.", "A prominent character in Alice's Adventures in Wonderland, the caterpillar, blows rings of smoke from a large pipe." ], "decomposition": [ "What are Jehovah's Witnesses?", "What items do #1's forbid?", "In Alice's Adventures in Wonderland, what is the caterpillar seen doing with a pipe?", "Is #2 different from #3?" ], "evidence": [ [ [ [ "Jehovah's Witnesses-1" ] ], [ [ "Religious views on smoking-6" ] ], [ [ "Caterpillar (Alice's Adventures in Wonderland)-6" ] ], [ "operation" ] ], [ [ [ "Jehovah's Witnesses-1" ] ], [ [ "Jehovah's Witnesses-36" ] ], [ [ "Alice's Adventures in Wonderland-13" ] ], [ "operation" ] ], [ [ [ "Jehovah's Witnesses-1" ] ], [ [ "Jehovah's Witnesses practices-27" ], "no_evidence" ], [ [ "Alice's Adventures in Wonderland-13" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "2a9cf64b15df1b841f25", "term": "Santa Claus", "description": "Folkloric figure, said to deliver gifts to children on Christmas Eve", "question": "Does Santa Claus hypothetically give Joffrey Baratheon presents?", "answer": false, "facts": [ "Santa Claus is a figure in folklore that gives good children presents and bad children coal.", "Joffrey Baratheon is a character in the Game of Thrones TV series.", "Joffrey is a young royal that has a man's tongue cut out.", "Joffrey shoots his crossbow into the flesh of innocent people for his amusement." ], "decomposition": [ "What kind of children would Santa Claus give presents to?", "What is Joffrey Baratheon's character like?", "Do #1 usually exhibit #2?" ], "evidence": [ [ [ [ "Santa Claus-1" ] ], [ [ "Joffrey Baratheon-2" ] ], [ "operation" ] ], [ [ [ "Santa Claus-1" ] ], [ [ "Joffrey Baratheon-2" ] ], [ "operation" ] ], [ [ [ "Santa Claus-3" ] ], [ [ "Joffrey Baratheon-6" ] ], [ "no_evidence" ] ] ] }, { "qid": "1e0f0676ee91a330262a", "term": "Lapidary", "description": "gemstone cutter", "question": "Does a lapidary work with items that are studied by geologists?", "answer": true, "facts": [ "Some of the things geologists study include gemstones, minerals, and stone", "Lapidarists work with stone, minerals and gemstones" ], "decomposition": [ "What are the materials a lapidary works with?", "What do geologists study?", "Is any of #1 derived from #2?" ], "evidence": [ [ [ [ "Lapidary-1" ] ], [ [ "Geologist-9" ] ], [ "operation" ] ], [ [ [ "Lapidary-1" ] ], [ [ "Geology-1" ] ], [ "operation" ] ], [ [ [ "Lapidary-1" ] ], [ [ "Geology-1" ] ], [ "operation" ] ] ] }, { "qid": "89da908a20fe2c0cc7b1", "term": "The Great Gatsby", "description": "1925 novel by F. Scott Fitzgerald", "question": "Was The Great Gatsby inspired by the novel 1984?", "answer": false, "facts": [ "The Great Gatsby was published in 1925.", "The novel 1984 was published in 1949." ], "decomposition": [ "When was the Great Gatsby published?", "When was 1984 written?", "Is #2 before #1?" ], "evidence": [ [ [ [ "The Great Gatsby-3" ] ], [ [ "Nineteen Eighty-Four-1" ] ], [ "operation" ] ], [ [ [ "The Great Gatsby-1" ] ], [ [ "Nineteen Eighty-Four-1" ] ], [ "operation" ] ], [ [ [ "The Great Gatsby-1" ] ], [ [ "Nineteen Eighty-Four-1" ] ], [ "operation" ] ] ] }, { "qid": "3fe8e6070086a6cb0a8c", "term": "Tibia", "description": "larger of the two bones of the leg below the knee for vertebrates", "question": "Is the tibia required for floor exercises?", "answer": true, "facts": [ "The tibia is a bone in the lower leg", "Floor exercises are a program in gymnastics competitions", "Gymnastics requires use of arms and legs, as well as other parts of the body" ], "decomposition": [ "What sport are floor exercises part of? ", "What body parts does #1 require?", "What part of the body part is the tibia?", "Is #3 in #2?" ], "evidence": [ [ [ [ "Floor (gymnastics)-17" ] ], [ [ "Leg-6" ] ], [ [ "Tibia-1" ] ], [ "operation" ] ], [ [ [ "Floor (gymnastics)-1" ] ], [ [ "Gymnastics-1" ] ], [ [ "Tibia-1" ] ], [ "operation" ] ], [ [ [ "Floor (gymnastics)-1" ] ], [ [ "Floor (gymnastics)-2" ] ], [ [ "Tibia-1" ] ], [ "operation" ] ] ] }, { "qid": "463faa7aaa7d9e1e366b", "term": "Europa (moon)", "description": "The smallest of the four Galilean moons of Jupiter", "question": "Is Europa linked to Viennese waltzes?", "answer": true, "facts": [ "Europa is a moon of Jupiter", "Europa played an important role in Stanley Kubrick's film 2001: A Space Odyssey", "The soundtrack to 2001: A Space Odyssey prominently featured The Blue Danube", "The Blue Danube is a famous Viennese waltz composed by Johan Strauss II" ], "decomposition": [ "Which moon of Jupiter played an important role in the film '2001: A Space Odyssey'?", "Is #1 Europa?", "Which soundtrack was prominently featured in the movie?", "Is #3 a Viennese waltz?", "Are #2 and #4 positive?" ], "evidence": [ [ [ "no_evidence" ], [ "operation" ], [ [ "2001: A Space Odyssey (film)-2", "The Blue Danube-14" ] ], [ [ "The Blue Danube-1" ] ], [ "operation" ] ], [ [ [ "2001: A Space Odyssey (film)-7" ], "no_evidence" ], [ [ "Europa (moon)-1" ], "operation" ], [ [ "2001: A Space Odyssey (film)-2" ] ], [ [ "The Blue Danube-1" ] ], [ "operation" ] ], [ [ [ "2001: A Space Odyssey (film)-7" ], "no_evidence" ], [ "no_evidence", "operation" ], [ [ "2001: A Space Odyssey (film)-2" ] ], [ [ "Johann Strauss II-1", "The Blue Danube-1" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d667c4a43fea660f1a5f", "term": "Cholera", "description": "Bacterial infection of the small intestine", "question": "Is a platypus immune from cholera?", "answer": true, "facts": [ "Cholera is a bacteria that damages the small intestines in humans.", "The intestines are part of the stomach of humans.", "A platypus does not have a stomach." ], "decomposition": [ "What parts of the body does Cholera damage?", "Does a platypus not have #1?" ], "evidence": [ [ [ [ "2016–2020 Yemen cholera outbreak-6" ] ], [ [ "Platypus-4" ], "no_evidence" ] ], [ [ [ "Vibrio cholerae-6" ] ], [ "no_evidence" ] ], [ [ [ "Diseases and epidemics of the 19th century-10" ] ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e40cbe4b89942e256377", "term": "Jerry Seinfeld", "description": "American comedian and actor", "question": "Did Jerry Seinfeld have reason to cheer in 1986?", "answer": true, "facts": [ "Jerry Seinfeld is a fan of the New York Mets baseball team", "The New York Mets won a World Series title in 1986" ], "decomposition": [ "Do fans cheer if their team wins?", "Is Jerry Seinfeld a NY Mets fan?", "Did the NY Mets win the World Series in 1986?", "Is #1, #2 and #3 \"yes\"?" ], "evidence": [ [ [ [ "Cheering-20" ] ], [ [ "Jerry Seinfeld-28" ] ], [ [ "1986 World Series-4" ] ], [ "operation" ] ], [ [ [ "Cheering-18" ], "no_evidence" ], [ [ "Jerry Seinfeld-28" ] ], [ [ "1986 World Series-1" ] ], [ "operation" ] ], [ [ [ "Cheering-1" ] ], [ [ "The Boyfriend (Seinfeld)-2" ] ], [ [ "1986 World Series-1" ] ], [ "operation" ] ] ] }, { "qid": "4148e19a53dfbb020484", "term": "Anchovy", "description": "Family of fishes", "question": "Are there bones in an anchovy pizza?", "answer": true, "facts": [ "Anchovies used on pizza are typically packed whole in oil or water. ", "Anchovies on pizza are not usually cut or filleted in any way." ], "decomposition": [ "Which fishes are used in anchovy pizza?", "Are #1 usually packed whole into the pizza?" ], "evidence": [ [ [ [ "Anchovies as food-3" ] ], [ [ "Anchovies as food-2" ] ] ], [ [ [ "Anchovies as food-2" ] ], [ [ "Anchovies as food-2" ], "no_evidence", "operation" ] ], [ [ [ "Anchovy-3" ] ], [ [ "Anchovy-3" ] ] ] ] }, { "qid": "691c8df42c886d6db9d4", "term": "John the Baptist", "description": "1st-century Jewish preacher and later Christian saint", "question": "Would John the Baptist be invited to a hypothetical cephalophore reunion in heaven?", "answer": false, "facts": [ "John the Baptist was a preacher that became a Catholic Saint.", "John the Baptist was beheaded by king Herod.", "A cephalophore is a Saint martyred by beheading, and is depicted in art as carrying their own head.", "Saint Denis was one of several beheaded saints that is said to have carried his own head and is depicted as such in art.", "John the Baptist did not carry his head, since it was on a plate owned by King Herod's stepdaughter." ], "decomposition": [ "What does one carry for one to be considered a cephalophore?", "Did John the Baptist carry #1?" ], "evidence": [ [ [ [ "Cephalophore-1" ] ], [ [ "Cephalophore-4" ], "operation" ] ], [ [ [ "Cephalophore-1" ] ], [ [ "Cephalophore-5" ], "operation" ] ], [ [ [ "Cephalophore-1" ] ], [ [ "John the Baptist-188" ], "no_evidence", "operation" ] ] ] }, { "qid": "6ea453908d54c4d7e3e7", "term": "Common carp", "description": "Species of fish", "question": "Are common carp sensitive to their environments?", "answer": false, "facts": [ "Common carp are a type of fish.", "Common carp are considered a destructive invasive species. ", "Common carp are tolerant of most conditions. ", "Common carp are able to survive frozen over ponds and low oxygenated waters. " ], "decomposition": [ "What kinds of environments are common carp native to?", "What kinds of environments have common carp expanded into?", "Do #1 and #2 include similar climate conditions?" ], "evidence": [ [ [ [ "Common carp-1" ] ], [ [ "Common carp-10" ] ], [ "no_evidence" ] ], [ [ [ "Common carp-1" ] ], [ [ "Common carp-1" ] ], [ "operation" ] ], [ [ [ "Carp-1" ] ], [ [ "Common carp-16" ] ], [ "operation" ] ] ] }, { "qid": "9aed4083afb75e9f247c", "term": "Douglas fir", "description": "species of tree", "question": "Have Douglas fir been used to fight wars?", "answer": true, "facts": [ "Douglas fir are a type of tree.", "Douglas fir are used to make ships.", "The Minesweeper is a small warship made from Douglas fir.", "The Minesweeper was made of wood to reduce it's risk magnetic signature and likely hood of detonating mines." ], "decomposition": [ "What are the uses of Douglas Fir?", "Does #1 include vehicles of war?" ], "evidence": [ [ [ [ "Douglas fir-16", "Douglas fir-23" ], "no_evidence" ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "War-1" ], "no_evidence" ] ], [ [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e8cc7615cfaf45069eb5", "term": "Ludacris", "description": "American rapper and actor", "question": "Can you watch the Borgia's World of Wonders before Ludacris's Release Therapy finishes?", "answer": true, "facts": [ "World of Wonders is an episode of the Showtime TV series The Borgias, with a run time of 49 minutes.", "Ludacris's 2006 album Release Therapy has a run time of 62 minutes." ], "decomposition": [ "What is the run time of the Borgia's World of Wonders?", "What is the run time of Ludacris's Release Therapy?", "Is #1 shorter than #2?" ], "evidence": [ [ [ [ "The Borgias (2011 TV series)-14" ], "no_evidence" ], [ [ "Release Therapy-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "The Borgias (2011 TV series)-1", "The Borgias (2011 TV series)-4" ], "no_evidence" ], [ [ "Release Therapy-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ "no_evidence" ], [ "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "7d310e9ebc2025febdd6", "term": "Winter", "description": "one of the Earth's four temperate seasons, occurring between autumn and spring", "question": "Is winter solstice in Northern Hemisphere closer to July than in Southern Hemisphere? ", "answer": false, "facts": [ "The winter solstice in the Northern Hemisphere happens in December.", "The winter solstice in the Southern Hemisphere happens in June." ], "decomposition": [ "When does the winter solstice occur in the Northern Hemisphere?", "When does the winter solstice occur in the Southern Hemisphere?", "How many days are in between #1 and July?", "How many days are between #2 and July?", "Is #4 greater than #3?" ], "evidence": [ [ [ [ "Winter solstice-2" ] ], [ [ "Winter solstice-2" ] ], [ "operation" ], [ "operation" ], [ "operation" ] ], [ [ [ "Winter solstice-2" ] ], [ [ "Winter solstice-2" ] ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ], [ "no_evidence", "operation" ] ], [ [ [ "Winter solstice-2" ] ], [ [ "Winter solstice-2" ] ], [ "no_evidence" ], [ "no_evidence" ], [ "operation" ] ] ] }, { "qid": "da76a093d3b1cc47c6f2", "term": "Traffic collision", "description": "occurs when a vehicle collides with another vehicle, pedestrian, animal, road debris, or other stationary obstruction, such as a tree, pole or building or drives off the road", "question": "Can a traffic collision make someone a millionaire?", "answer": true, "facts": [ "Traffic collisions sometimes result in extremely expensive physical damage.", "Physical damage is compensated by insurance companies in the form of monetary payment.", "Million dollar verdicts are sometimes awarded for traffic collisions that result in major damage. " ], "decomposition": [ "What can kind of damage can traffic collisions cause?", "If #1 occurs, how would insurance companies react?", "Can #2 sometimes occur in a million dollar verdict?" ], "evidence": [ [ [ [ "Personal injury-1" ], "no_evidence" ], [ [ "Personal injury-26" ], "no_evidence" ], [ [ "Pain and suffering-5" ], "no_evidence", "operation" ] ], [ [ [ "Traffic collision-1" ] ], [ [ "Vehicle insurance in the United States-1" ] ], [ [ "Traffic collision-84" ] ] ], [ [ [ "Traffic collision-1" ] ], [ [ "Vehicle insurance-1" ] ], [ "no_evidence" ] ] ] }, { "qid": "e439fdf801899e9baead", "term": "Deciduous", "description": "Trees or shrubs that lose their leaves seasonally", "question": "Are Christmas trees typically deciduous?", "answer": false, "facts": [ "Christmas trees are almost always pine trees.", "Christmas trees are green all year round. " ], "decomposition": [ "What types of trees are used as Christmas trees?", "Are any of #1 deciduous?" ], "evidence": [ [ [ [ "Christmas tree-1" ] ], [ [ "Deciduous-1" ], "operation" ] ], [ [ [ "Christmas tree-1" ] ], [ [ "Deciduous-1" ], "operation" ] ], [ [ [ "Christmas tree-1" ] ], [ [ "Evergreen-1" ] ] ] ] }, { "qid": "b69c53d2e6196bee9a92", "term": "Swan Lake", "description": "Ballet by Pyotr Ilyich Tchaikovsky", "question": "Can you drown in a Swan Lake performance?", "answer": false, "facts": [ "Drowning is defined as respiratory impairment as a result of being in or under a liquid.", "Swan Lake is not a body of water, but rather an example of performance dance." ], "decomposition": [ "What is a necessary condition for drowning?", "Does Swan Lake possess #1?" ], "evidence": [ [ [ [ "Drowning-1" ] ], [ [ "Swan Lake-1" ], "operation" ] ], [ [ [ "Drowning-1" ] ], [ [ "Ballet-1", "Swan Lake-1" ] ] ], [ [ [ "Drowning-2" ] ], [ "no_evidence" ] ] ] }, { "qid": "5d2e46bc031e1ccf88b4", "term": "Spaghetti", "description": "Type of pasta", "question": "Is it unusual to eat spaghetti without a fork?", "answer": true, "facts": [ "Spaghetti noodles are long and thin, they are difficult to scoop and must be twirled.", "Spaghetti is never served in a restaurant without a fork." ], "decomposition": [ "Is Spaghetti usually eaten using a fork?" ], "evidence": [ [ [ [ "Spaghetti-1" ], "no_evidence" ] ], [ [ [ "Italian cuisine-17", "Spaghetti-1" ], "no_evidence", "operation" ] ], [ [ [ "Spaghetti-15" ], "no_evidence" ] ] ] }, { "qid": "ae7daa98394767229b6d", "term": "Narcissism", "description": "Personality trait of self love of a fake perfect self.", "question": "Is narcissism's origin a rare place to get modern words from?", "answer": false, "facts": [ "Narcissism comes from the ancient Greek story of Narcissus, who fell in love with his own reflection.", "Aphrodisiac comes from stories about the ancient Greek goddess Aphrodite.", "Europe comes from Europa, an ancient Greek princess.", "The word stygian relates to the river of Hades in Greek mythology.", "Hypnosis comes from Hypnos, the Greek god of sleep." ], "decomposition": [ "From what culture does the word \"narcissism\" come? ", "What percent of English words come from #1?", "Is #2 small enough to be considered \"rare\"?" ], "evidence": [ [ [ [ "Narcissism-5" ] ], [ [ "English words of Greek origin-34" ] ], [ "operation" ] ], [ [ [ "Narcissism-1" ] ], [ [ "English language-105", "English language-108", "English words of Greek origin-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Narcissism-5" ] ], [ [ "English words of Greek origin-4" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "94f996bf88b05741bb07", "term": "Kobe", "description": "Designated city in Kansai, Japan", "question": "Is Kobe's famous animal product used in a BLT?", "answer": false, "facts": [ "Kobe's famous animal product is Kobe beef.", "The animal product used in a BLT is bacon.", "Beef is derived from cows.", "Bacon is derived from pigs." ], "decomposition": [ "What animal product is Kobe, Japan most famous for?", "What animal product comes is used in a BLT?", "What animal does #1 come from?", "What animal does #2 come from?", "Is #3 the same as #4?" ], "evidence": [ [ [ [ "Kobe beef-3" ] ], [ [ "BLT-1" ] ], [ [ "Beef-57" ] ], [ [ "Bacon-39" ] ], [ "operation" ] ], [ [ [ "Kobe-3" ] ], [ [ "BLT-1" ] ], [ [ "Bacon-7", "Pork belly-1" ] ], [ [ "Beef-1", "Cattle-1" ] ], [ "operation" ] ], [ [ [ "Kobe beef-1" ] ], [ [ "BLT-1" ] ], [ [ "Japanese Black-1" ] ], [ [ "Bacon-1", "Pork-1" ] ], [ "operation" ] ] ] }, { "qid": "818a301e9962bdf987a9", "term": "Chinese Americans", "description": "Ethnic group", "question": "Do Chinese Americans face discrimination at a Federal level in the US?", "answer": true, "facts": [ "The President of the United States frequently referred to the COVID-19 pandemic as a 'Chinese Virus' and 'Kung Flu', encouraging the use of derogatory language towards Chinese Americans.", "The President of the United States has not called for the violence and hate towards Chinese Americans in response to COVID-19 to end." ], "decomposition": [ "Who is the head of the US Federal Government?", "Does #1 behave in a discriminatory way toward Chinese Americans?" ], "evidence": [ [ [ [ "Federal government of the United States-17" ] ], [ [ "Anti-Chinese sentiment in the United States-28" ], "no_evidence" ] ], [ [ [ "Donald Trump-1" ] ], [ [ "Donald Trump-128" ], "no_evidence", "operation" ] ], [ [ [ "Donald Trump-1" ] ], [ [ "Donald Trump-128", "Donald Trump-154" ], "operation" ] ] ] }, { "qid": "45c1de8a7cfc9ad51786", "term": "Asteroid", "description": "Minor planet that is not a comet", "question": "Could largest asteroid crush a whole city?", "answer": true, "facts": [ "The largest asteroids are the size of miniature planets.", "Mercury is the smallest planet and has a radius of 1,516 miles.", "New York City is 13.4 miles long and 2.3 miles wide. ", "Mercury weighs 3.285 × 10^23 kg." ], "decomposition": [ "What is the size of the largest asteroid?", "What is the size of New York City?", "Is #1 bigger than #2?" ], "evidence": [ [ [ [ "Ceres (dwarf planet)-1" ] ], [ [ "New York City-1" ] ], [ "operation" ] ], [ [ [ "Asteroid-38" ] ], [ [ "New York City-1" ] ], [ [ "New York City-1", "Tunguska event-1" ], "operation" ] ], [ [ [ "Ceres (dwarf planet)-1" ] ], [ [ "New York metropolitan area-1" ] ], [ "operation" ] ] ] }, { "qid": "f332153ae800b78d0959", "term": "Sand cat", "description": "Small wild cat", "question": "Can you hide a basketball in a sand cat's ear?", "answer": false, "facts": [ "The diameter of a standard NBA basketball is around 9.5 inches", "A sand cat's ear grows to 2.8 inches tall" ], "decomposition": [ "On average, how large is a sand cat's ear?", "What is the size of a standard NBA basketball?", "Is #1 greater than #2?" ], "evidence": [ [ [ [ "Sand cat-1" ] ], [ [ "Basketball-1" ] ], [ "operation" ] ], [ [ [ "Sand cat-1" ], "no_evidence" ], [ [ "Basketball-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Sand cat-19" ], "no_evidence" ], [ [ "Basketball (ball)-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "e4c256ba753cb8d4fca3", "term": "Evander Holyfield", "description": "American boxer", "question": "Did Mike Tyson do something very different than McGruff's slogan to Evander Holyfield in 1997?", "answer": false, "facts": [ "McGruff was an animated dog spokesman for the National Crime Prevention Council.", "McGruff's slogan was, \"Take a bite out of crime.\"", "Mike Tyson was disqualified in a 1997 boxing bout against Evander Holyfield for taking a bite out of his ear." ], "decomposition": [ "What is the slogan of McGruff?", "What did Mike Tyson do to Evander Holyfield during their match?", "Is #2 an action that occurs in #1?" ], "evidence": [ [ [ [ "McGruff the Crime Dog-7" ] ], [ [ "Evander Holyfield-4" ] ], [ "operation" ] ], [ [ [ "McGruff the Crime Dog-7" ] ], [ [ "Mike Tyson-37" ] ], [ "operation" ] ], [ [ [ "McGruff the Crime Dog-7" ] ], [ [ "Evander Holyfield vs. Mike Tyson II-1" ] ], [ "operation" ] ] ] }, { "qid": "606181aacf5722b85a0a", "term": "Mona Lisa", "description": "Painting by Leonardo da Vinci", "question": "After viewing the Mona Lisa, could you get lunch nearby on foot?", "answer": true, "facts": [ "The Mona Lisa is housed in The Louvre.", "There are many restaurants within walking distance of The Louvre." ], "decomposition": [ "Where is the Mona Lisa located?", "Is #1 a place likely to have at least a restaurant/hotel nearby?" ], "evidence": [ [ [ [ "Mona Lisa-54" ] ], [ [ "Louvre-60" ] ] ], [ [ [ "Mona Lisa-29" ] ], [ [ "Louvre-60" ] ] ], [ [ [ "Louvre-1", "Mona Lisa-2" ] ], [ "operation" ] ] ] }, { "qid": "72e87f1268a08de8f84d", "term": "Aloe", "description": "genus of plants", "question": "Do all parts of the aloe vera plant taste good?", "answer": false, "facts": [ "There is a layer of yellow latex liquid between the outside of an aloe leaf and the gel inside.", "The latex inside aloe tastes very bitter." ], "decomposition": [ "How do the various parts of the Aloe vera taste?", "Is all of #1 pleasant?" ], "evidence": [ [ [ [ "Aloe vera-1" ], "no_evidence" ], [ [ "Aloe vera-16" ], "no_evidence", "operation" ] ], [ [ [ "Aloe vera-21" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Aloe vera-17" ], "no_evidence" ], [ [ "Aloe vera-21" ], "no_evidence", "operation" ] ] ] }, { "qid": "33d65a4b34005b4ddd62", "term": "Fever", "description": "common medical sign characterized by elevated body temperature", "question": "Is a fever cured by listening to a cowbell?", "answer": false, "facts": [ "A fever is an increase in body temperature above the normal range", "Fever can be treated with medication or will usually disappear if left alone", "A cowbell is a musical instrument" ], "decomposition": [ "What are some common ways of treating a fever?", "Is listening to a cowbell included in #1?" ], "evidence": [ [ [ [ "Fever-2" ] ], [ [ "Cowbell-1" ], "operation" ] ], [ [ [ "Fever-37" ] ], [ [ "Cowbell-1" ] ] ], [ [ [ "Fever-2" ] ], [ "operation" ] ] ] }, { "qid": "953c8cb086cf494fb849", "term": "Saint Kitts and Nevis", "description": "country in Central America and Caribbean", "question": "Are brown rock fish found in the waters surrounding Saint Kitts and Nevis?", "answer": false, "facts": [ "Saint Kitts and Nevis is located in the Caribbean Sea and Atlantic Sea", "Brown rock fish are found in the Pacific Ocean" ], "decomposition": [ "What waters surround Saint Kitts and Nevis?", "In what body of water are brown rock fish found?", "Is #1 the same as #2?" ], "evidence": [ [ [ [ "Saint Kitts and Nevis-1" ] ], [ [ "Brown rockfish-3" ] ], [ "operation" ] ], [ [ [ "Saint Kitts-1" ] ], [ [ "Brown rockfish-3" ] ], [ "operation" ] ], [ [ [ "Saint Kitts-1" ] ], [ [ "Brown rockfish-2" ] ], [ "operation" ] ] ] }, { "qid": "1d67cd8e5b32229ebb04", "term": "Welfare", "description": "Means-oriented social benefit", "question": "Do Republicans reject all forms of welfare?", "answer": false, "facts": [ "Welfare is all of the social programs that provide benefits to citizens for little or no money.", "Republicans have traditionally voted against welfare benefits in the form of food stamps and medicaid expansion.", "Public roads are a form of welfare since people are not required to build their own road each time they need to get to work." ], "decomposition": [ "What welfare policies are on the Republican platform?", "Are government-funded public works absent from #1?" ], "evidence": [ [ [ [ "Political positions of the Republican Party-3" ] ], [ [ "Political positions of the Republican Party-3" ] ] ], [ [ [ "Political positions of the Republican Party-3" ] ], [ "operation" ] ], [ [ [ "Republican Party (United States)-22" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "d354d1594bf431bf9ee8", "term": "Old English", "description": "Early form of English; Anglo-Saxon", "question": "Did the confederate states speak Old English before the Civil War?", "answer": false, "facts": [ "Old English is the earliest form of English during the middle ages.", "Modern English replaced old English during the seventeenth century.", "American English was created and spoken during the formation of the first US colonies. ", "The civil war started in 1861, and was a war between the northern states and southern states. " ], "decomposition": [ "In approximately which years was Old English spoken?", "In which years did the Confederate States exist? ", "Was any part of #2 within #1?" ], "evidence": [ [ [ [ "Old English-1" ] ], [ [ "Confederate States of America-1" ] ], [ "operation" ] ], [ [ [ "Old English-6" ] ], [ [ "Confederate States of America-9" ] ], [ "operation" ] ], [ [ [ "Old English-1" ] ], [ [ "Confederate States of America-1" ] ], [ "operation" ] ] ] }, { "qid": "777828272c5e0915f4d1", "term": "Ivan the Terrible", "description": "Grand Prince of Moscow and 1st Tsar of Russia", "question": "Did Ivan the Terrible's father and grandfather have nicer nicknames?", "answer": true, "facts": [ "Ivan the Terrible was nicknamed terrible because of his harsh rule.", "Ivan the Terrible's father, Vasili III Ivanovich, was nicknamed Vasili the Adequate.", "Ivan the Terrible's grandfather, Ivan III Vasilyevich, was nicknamed Ivan the Great." ], "decomposition": [ "Who was Ivan the Terrible's father?", "Who was the father of #1?", "Do #1 and #2 have nicer nicknames than \"the Terrible\"?" ], "evidence": [ [ [ [ "Vasili III of Russia-1" ] ], [ [ "Ivan III of Russia-1", "Vasili III of Russia-1" ] ], [ "operation" ] ], [ [ [ "Vasili III of Russia-1" ] ], [ [ "Vasili III of Russia-1" ] ], [ [ "Vasili III of Russia-1" ], "operation" ] ], [ [ [ "Vasili III of Russia-1" ] ], [ [ "Ivan III of Russia-1" ] ], [ "operation" ] ] ] }, { "qid": "d5cd043d6f15fb43bd04", "term": "Armenians", "description": "ethnic group native to the Armenian Highland", "question": "Do Armenians tend to dislike System of a Down?", "answer": false, "facts": [ "System of a Down is an Armenian-American rock band.", "System of a Down has numerous songs bringing light to the plight of Armenian people and the Armenian Genocide." ], "decomposition": [ "Is System of a Down an Armenian-American rock band?", "Would members of #1 rock band dislike where they are from?" ], "evidence": [ [ [ [ "System of a Down-1" ] ], [ [ "Armenian Genocide in culture-37" ], "no_evidence" ] ], [ [ [ "System of a Down-1" ] ], [ "operation" ] ], [ [ [ "System of a Down-6" ] ], [ [ "Patriotism-1" ], "no_evidence", "operation" ] ] ] }, { "qid": "7a096b2fc559fd5c7919", "term": "Green", "description": "Additive primary color visible between blue and yellow", "question": "Is a paleo dieter unlikely to color beverages green for St. Patrick's Day?", "answer": true, "facts": [ "There is no natural source for green food coloring approved by the FDA", "A paleo diet avoids artificial colors and flavors" ], "decomposition": [ "What are some common FDA approved sources of green color applied to beverages?", "What kind of foods would a paleo dieter avoid?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Food coloring-11" ] ], [ [ "Paleolithic diet-3" ] ], [ "operation" ] ], [ [ [ "Food coloring-15" ] ], [ [ "Paleolithic diet-12" ] ], [ [ "Paleolithic diet-12" ], "operation" ] ], [ [ [ "Fast Green FCF-1" ] ], [ [ "Paleolithic diet-3" ] ], [ "operation" ] ] ] }, { "qid": "5554641266a87ca41ef9", "term": "French Defence", "description": "Chess opening", "question": "Can French Defence initial move defend against four move checkmate?", "answer": false, "facts": [ "The French Defence involves moving pawn in front of the queen forward two spaces.", "The four move checkmate involves moving the queen and bishop to crowd the king.", "The four move checkmate cannot be defended by pawn in front of queen." ], "decomposition": [ "Which move is first played in the French defense in chess?", "What are some common techniques for making a four move checkmate in chess?", "Can #1 be used to defend against any of #2?" ], "evidence": [ [ [ [ "French Defence-3" ] ], [ [ "Scholar's mate-10", "Scholar's mate-3" ], "no_evidence" ], [ "operation" ] ], [ [ [ "French Defence-2" ] ], [ [ "Scholar's mate-2" ] ], [ [ "Scholar's mate-8" ], "operation" ] ], [ [ [ "French Defence-2" ], "no_evidence" ], [ [ "Scholar's mate-10", "Scholar's mate-2", "Scholar's mate-9" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "e42e88dce04b7f53ee4f", "term": "Pound sterling", "description": "Official currency of the United Kingdom and other territories", "question": "Was Emperor Commodus paid tribute in Pound sterling?", "answer": false, "facts": [ "Commodus was Roman Emperor until 192 AD.", "Coins featuring the image of Commodus were the currency during the late second century AD.", "The Pound sterling has origins with the fifth century AD Anglo Saxon pound." ], "decomposition": [ "When was Commodus Roman emperor?", "When did the Pound sterling originate?", "Was #1 before #2?" ], "evidence": [ [ [ [ "Commodus-1" ] ], [ [ "Pound sterling-19" ] ], [ "operation" ] ], [ [ [ "Commodus-11" ] ], [ [ "Pound sterling-22" ] ], [ "operation" ] ], [ [ [ "Commodus-1" ] ], [ [ "Pound sterling-19" ] ], [ "operation" ] ] ] }, { "qid": "7f06a5428c7ddf781d7b", "term": "3D printing", "description": "Additive process used to make a three-dimensional object", "question": "Do you need a large room if you want to get into 3D printing?", "answer": false, "facts": [ "Home 3D printers are sized to be able to sit on a desk or table.", "The accessories and materials needed for 3D Printers can be stored easily and efficiently in a box or tote." ], "decomposition": [ "What are the equipment needed for 3D printing?", "How were #1 designed to be accommodated?", "Would #2 require a larger-than-average sized room?" ], "evidence": [ [ [ [ "3D printing-47" ], "no_evidence" ], [ [ "3D printing-47" ], "no_evidence" ], [ [ "3D printing-47" ], "no_evidence" ] ], [ [ [ "3D printing-47" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "3D printing processes-39" ] ], [ [ "3D printing processes-39" ] ], [ "operation" ] ] ] }, { "qid": "c47897304b87477d7622", "term": "Second Coming", "description": "Christian and Islamic belief regarding the future (or past) return of Jesus after his ascension", "question": "Does Woody Allen await the Second Coming?", "answer": false, "facts": [ "The Second Coming refers to Jesus Christ returning to earth", "Christians and Muslims believe in Jesus Christ", "Woody Allen is Jewish" ], "decomposition": [ "Which religious groups believe in the second coming?", "Does Woody Allen belong to any of #1?" ], "evidence": [ [ [ [ "Second Coming-1" ] ], [ [ "Woody Allen-4" ], "operation" ] ], [ [ [ "Second Coming-1" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Second Coming-1" ] ], [ [ "Woody Allen-4" ] ] ] ] }, { "qid": "147a3c2249183b05d721", "term": "Lust", "description": "Human emotion", "question": "Do you have to pass through circle of lust to find Saladin in Dante's Inferno?", "answer": false, "facts": [ "Dante's Inferno was a book written by Dante Alighieri that outlines 9 circles of hell.", "The circle of lust is the second circle in Dante's Inferno.", "Saladin is placed in the first circle of hell in Dante's Inferno.", "The first circle of hell is limbo which is reserved for virtuous unbaptized pagans." ], "decomposition": [ "In Dante's Inferno, what circle is for people guilty of lust?", "In Dante's Inferno, what circle is Saladin in?", "Would someone traversing the Inferno pass through #2 before #1?" ], "evidence": [ [ [ [ "Inferno (Dante)-13" ] ], [ [ "Inferno (Dante)-8", "Inferno (Dante)-9", "Limbo-1" ] ], [ "operation" ] ], [ [ [ "Dante's Inferno (song)-3" ] ], [ [ "Dante's Inferno: An Animated Epic-5" ] ], [ "operation" ] ], [ [ [ "Inferno (Dante)-13" ] ], [ [ "Inferno (Dante)-8", "Inferno (Dante)-9" ] ], [ "operation" ] ] ] }, { "qid": "b1d517cad0745d8618d2", "term": "Intel", "description": "American semiconductor chip manufacturer", "question": "Could Intel products be purchased at McDonald's?", "answer": false, "facts": [ "Intel is a technology company that produces computer products such as processors, chipsets, and GPUs.", "McDonald's is a fast food franchise that sells food and beverage products." ], "decomposition": [ "What type of products does Intel produce?", "What kind of products does McDonald's sell?", "Is #1 included in #2?" ], "evidence": [ [ [ [ "Intel-1" ] ], [ [ "McDonald's-2" ] ], [ "operation" ] ], [ [ [ "Intel-44" ], "no_evidence" ], [ [ "McDonald's-29" ], "operation" ], [ "no_evidence" ] ], [ [ [ "Intel-1" ] ], [ [ "McDonald's-2" ] ], [ "operation" ] ] ] }, { "qid": "68046e03ebdc1a6c1d2a", "term": "Yellow pages", "description": "Telephone directory of businesses by category", "question": "Is the Yellow Pages the fastest way to find a phone number?", "answer": false, "facts": [ "The Yellow Pages is a book that contains alphabetized phone listings.", "Yellow pages involves going through many listings and remembering your alphabet.", "Google allows a person to type in a name quickly and look for a phone number.", "Household AI assistants like Echo allow people to merely speak a name and ask for number." ], "decomposition": [ "How are the phone numbers organized in the Yellow Pages?", "To find a phone number in #1, what does one have to do?", "To find a phone number on Google, what does one have to do?", "Is #2 faster than #3?" ], "evidence": [ [ [ [ "Yellow pages-1" ] ], [ [ "Yellow pages-5" ], "no_evidence" ], [ [ "Google Search-3" ] ], [ "operation" ] ], [ [ [ "Yellow pages-1" ] ], [ [ "Yellow pages-5" ] ], [ [ "Google Search-16" ] ], [ "operation" ] ], [ [ [ "Yellow pages-1" ] ], [ "no_evidence" ], [ [ "Web search engine-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "b931fa0b6de758b4f1f8", "term": "Shinto", "description": "Ethnic religion of Japan", "question": "Do Shinto practitioners keep to a kosher diet?", "answer": false, "facts": [ "Shinto is a religion originating from Japan.", "Shinto does not have any dietary restrictions.", "Seafood including shellfish is a staple of the Japanese diet.", "Jewish kosher laws prohibit the consumption of shellfish." ], "decomposition": [ "Which type of seafood does the Jewish kosher laws prohibit?", "Are Shinto practitioners restricted from eating #1?" ], "evidence": [ [ [ [ "Kashrut-3" ] ], [ [ "Seafood-17", "Shinto-1" ], "no_evidence", "operation" ] ], [ [ [ "Kashrut-3" ] ], [ "no_evidence", "operation" ] ], [ [ [ "Seafood-33" ] ], [ [ "Shinto-65" ], "no_evidence", "operation" ] ] ] }, { "qid": "6cc5056659f843124e0b", "term": "Waiting staff", "description": "staff serving in restaurant or private homes", "question": "Are there some countries where waiting staff need no tip?", "answer": true, "facts": [ "In Japan, leaving a tip for a server is considered rude.", "In Denmark, servers and wait staff are well paid and tipping is very uncommon." ], "decomposition": [ "In how many countries is it socially acceptable to not tip the waiting staff?", "Is #1 greater than one?" ], "evidence": [ [ [ [ "Gratuity-15", "Gratuity-18", "Gratuity-36", "Gratuity-48" ] ], [ "operation" ] ], [ [ [ "Gratuity-2" ], "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Gratuity-19" ] ], [ "operation" ] ] ] }, { "qid": "7b75cf06f6fe3cdf08b4", "term": "Hamster", "description": "subfamily of mammals", "question": "Could a hamster experience two leap years?", "answer": false, "facts": [ "Pet hamsters typically have a maximum lifespan of three years.", "Leap years are typically separated by four years." ], "decomposition": [ "How long is the lifespan of a hamster?", "How many years are between two leap years?", "Is #1 longer than #2?" ], "evidence": [ [ [ [ "Hamster-27" ] ], [ [ "Leap year-16" ] ], [ "operation" ] ], [ [ [ "Hamster-27" ] ], [ [ "Leap year-16" ] ], [ "operation" ] ], [ [ [ "Hamster-27" ] ], [ [ "Leap year-6" ] ], [ "operation" ] ] ] }, { "qid": "e1ebc4b698eb5bd4fbbe", "term": "Citrus", "description": "genus of fruit-bearing plants (source of fruit such as lemons and oranges)", "question": "Is there a Marvel villain with the same name as a kind of citrus fruit?", "answer": true, "facts": [ "Mandarins are a type of orange popular in Asian cuisine.", "The Mandarin is also the name of a villain associated with Iron Man in the Marvel universe." ], "decomposition": [ "Which popular villains has Marvel's Ironman faced off against?", "Do any of #1's name also refer to a citrus fruit?" ], "evidence": [ [ [ [ "Iron Man-24" ] ], [ [ "Mandarin orange-1" ] ] ], [ [ [ "Iron Man-24" ] ], [ [ "Mandarin orange-1" ], "operation" ] ], [ [ [ "Captain Citrus-1" ], "no_evidence" ], [ "no_evidence", "operation" ] ] ] }, { "qid": "423d83b2533271615c06", "term": "Breast cancer", "description": "cancer that originates in the mammary gland", "question": "Are amoebas safe from breast cancer?", "answer": true, "facts": [ "Breast cancer is a disease that occurs in the mammary tissues of mammals.", "Amoebas are single cell organisms that lack mammary tissue." ], "decomposition": [ "What tissue does breast cancer affect?", "Is having #1 a necessary condition for breast cancer?", "By #2, is it the case that if an organism lacks #1 they cannot get breast cancer?" ], "evidence": [ [ [ [ "Breast cancer-1" ] ], [ [ "Breast cancer-2" ] ], [ [ "Breast cancer-2" ] ] ], [ [ [ "Breast cancer-1" ] ], [ "operation" ], [ "operation" ] ], [ [ [ "Breast cancer-2" ] ], [ [ "Breast cancer-8" ] ], [ "operation" ] ] ] }, { "qid": "79595ffe6b93c5e84056", "term": "Selfie", "description": "Photographic self-portrait", "question": "Are selfies more dangerous than plague in modern times?", "answer": true, "facts": [ "There are an average of 7 human plague cases reported each year according to the CDC.", "Selfies have caused people to fall off of cliffs while trying to get the perfect picture.", "From October 2011 and November 2017, there were 259 selfie deaths in 137 incidents." ], "decomposition": [ "How many cases of the plague are there yearly?", "How many people die yearly while taking selfies?", "Is #2 greater than #1?" ], "evidence": [ [ [ [ "Epidemiology of plague-23" ] ], [ "no_evidence" ], [ "operation" ] ], [ [ [ "Epidemiology of plague-23" ], "operation" ], [ "no_evidence" ], [ "no_evidence" ] ], [ [ [ "Epidemiology of plague-1" ] ], [ [ "Selfie-53" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "a0c8f44c4b3ad35df66e", "term": "Catfish", "description": "order of fishes", "question": "Is a cory catfish likely to eat another living fish?", "answer": false, "facts": [ "The cory catfish is a fish that is described as a bottom feeder.", "The cory catfish feeds on food located at the bottom of an ocean.", "Fish cannot live too deep in oceans for very long because of the intense water pressure.", "The bottom of oceans is populated by algae, coral, and microorganisms." ], "decomposition": [ "What do cory catfish eat?", "Is fish part of #1?" ], "evidence": [ [ [ [ "Corydoras-5" ] ], [ [ "Corydoras-5" ], "operation" ] ], [ [ [ "Corydoras-5" ] ], [ "operation" ] ], [ [ [ "Corydoras-4", "Corydoras-5" ] ], [ "operation" ] ] ] }, { "qid": "2079d6c0fea33860fbc3", "term": "Achilles", "description": "Greek mythological hero", "question": "Does Thiago Moises May 13 2020 submission move hypothetically hurt Achilles?", "answer": true, "facts": [ "Thiago Moises is a mixed martial arts fighter in the UFC.", "Thiago Moises beat Michael Johnson by a heel hook submission.", "Greek hero Achilles had one weakness, his heel." ], "decomposition": [ " What was Thiago Moises' winning move the match he played on May 13 2020?", "Which part of the opponent's body did #1 affect?", "Which part of Achilles' body is his weaknes?", "Is #2 the same as #3?" ], "evidence": [ [ [ "no_evidence" ], [ "no_evidence" ], [ [ "Achilles-2" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Heel-1" ] ], [ [ "Achilles-2" ] ], [ "operation" ] ], [ [ "no_evidence" ], [ [ "Paul Sass-6" ], "no_evidence" ], [ [ "Achilles-2" ] ], [ "operation" ] ] ] }, { "qid": "60fe2d35b5abf0f0652a", "term": "Alec Baldwin", "description": "American actor, writer, producer, and comedian", "question": "Does Alec Baldwin have more children than Clint Eastwood?", "answer": false, "facts": [ "Alec Baldwin has 5 children as of 2020.", "Actor Clint Eastwood has 7 children as of 2020." ], "decomposition": [ "How many children does Alec Baldwin presently have?", "How many children does Clint Eastwood presently have?", "Is #1 more than #2?" ], "evidence": [ [ [ [ "Alec Baldwin-42", "Alec Baldwin-47" ] ], [ [ "Clint Eastwood-65", "Clint Eastwood-66", "Clint Eastwood-67" ] ], [ "operation" ] ], [ [ [ "Alec Baldwin-47" ] ], [ "no_evidence" ], [ "no_evidence", "operation" ] ], [ [ [ "Alec Baldwin-47", "Kim Basinger-24" ] ], [ [ "Clint Eastwood-66", "Clint Eastwood-67", "Clint Eastwood-68", "Personal life of Clint Eastwood-1" ], "no_evidence" ], [ "operation" ] ] ] }, { "qid": "8c599ce83178d5c0f480", "term": "Prime Minister of the United Kingdom", "description": "Head of UK Government", "question": "Does highest US Court have enough seats for every Prime Minister of the United Kingdom since 1952?", "answer": false, "facts": [ "The highest court in the US is the Supreme Court.", "There are nine seats on the Supreme Court.", "There have been fifteen Prime Ministers of the United Kingdom since 1952." ], "decomposition": [ "What is the highest United States court?", "How many positions are there in #1?", "How many United Kingdom Prime Ministers have there been since 1952?", "Is #2 equal to or greater than #3?" ], "evidence": [ [ [ [ "Supreme Court of the United States-1" ] ], [ [ "Supreme Court of the United States-20" ] ], [ [ "Anthony Eden-1", "Boris Johnson-1" ], "no_evidence" ], [ "operation" ] ], [ [ [ "Supreme Court of the United States-1" ] ], [ [ "Supreme Court of the United States-20" ] ], [ [ "Alec Douglas-Home-1", "Anthony Eden-1", "Boris Johnson-1", "David Cameron-1", "Edward Heath-1", "Gordon Brown-1", "Harold Macmillan-1", "Harold Wilson-1", "James Callaghan-1", "John Major-1", "Margaret Thatcher-1", "Theresa May-1", "Tony Blair-1", "Winston Churchill-1" ] ], [ "operation" ] ], [ [ [ "Supreme Court of the United States-1" ] ], [ [ "Supreme Court of the United States-20" ] ], [ "no_evidence" ], [ "no_evidence" ] ] ] }, { "qid": "76f183dc037e7b30e2a2", "term": "Pi", "description": "Ratio of the circumference of a circle to its diameter", "question": "Was Pi an acceptable number of children in 1980s China?", "answer": false, "facts": [ "Pi, the ratio of a circle's circumference to diameter, is equal to 3.14.", "In the 1980's China instituted a one-child policy.", "People that violated China's one child policy were fined heavily and some were sterilized." ], "decomposition": [ "How many children were Chinese parents limited to by the One-child policy in the 1980s?", "What is the value of the number pi?", "Is #2 less than or equal to #1?" ], "evidence": [ [ [ [ "One-child policy-1" ] ], [ [ "Pi-1" ] ], [ "operation" ] ], [ [ [ "One-child policy-1" ] ], [ [ "Pi-1" ] ], [ "operation" ] ], [ [ [ "One-child policy-1" ] ], [ [ "Pi-1" ] ], [ "operation" ] ] ] } ]