Posts List Oldapi View
We shared this request example with FAB participants: url_qparams = { "limit": count, "offset": offset, "has_group": "false", "order_by": "-activity", "forecast_type": "binary", "project": tournament_id, "status": "open", "type": "forecast", "include_description": "true", } url = f"{api_info.base_url}/questions/" response = requests.get( url, headers={"Authorization": f"Token {api_info.token}"}, params=url_qparams )
But we don't want to support all these parameters, and the ones relevant are: - order_by - status - project - forecast_type - we ignore this, but assume it's binary - FAB only supports binary for now.
GET /api2/questions/?format=api&offset=5900
{ "count": 6412, "next": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5920", "previous": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5880", "results": [ { "id": 1314, "title": "Will interest in Veganism decrease prior to 2020?", "short_title": "", "url_title": "", "slug": "will-interest-in-veganism-decrease-prior-to-2020", "author_id": 103733, "author_username": "jzima", "coauthors": [], "created_at": "2018-07-27T15:56:10.162654Z", "published_at": "2018-07-30T07:00:00Z", "edited_at": "2025-09-05T17:29:25.480770Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-30T07:00:00Z", "comment_count": 10, "status": "resolved", "resolved": true, "actual_close_time": "2018-12-31T12:00:00Z", "scheduled_close_time": "2018-12-31T12:00:00Z", "scheduled_resolve_time": "2019-01-31T23:52:00Z", "actual_resolve_time": "2019-01-31T23:52:00Z", "open_time": "2018-07-30T07:00:00Z", "nr_forecasters": 141, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15867, "name": "Environment & Climate", "slug": "climate", "emoji": "🌎", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3697, "name": "Environment & Climate", "slug": "environment-climate", "emoji": "🌱", "description": "Environment & Climate", "type": "category" }, { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" }, { "id": 3700, "name": "Social Sciences", "slug": "social-sciences", "emoji": "🧑🤝🧑", "description": "Social Sciences", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" } ] }, "question": { "id": 1314, "title": "Will interest in Veganism decrease prior to 2020?", "created_at": "2018-07-27T15:56:10.162654Z", "open_time": "2018-07-30T07:00:00Z", "cp_reveal_time": "2018-07-31T12:23:25.200243Z", "spot_scoring_time": "2018-07-31T12:23:25.200243Z", "scheduled_resolve_time": "2019-01-31T23:52:00Z", "actual_resolve_time": "2019-01-31T23:52:00Z", "resolution_set_time": "2019-01-31T23:52:00Z", "scheduled_close_time": "2018-12-31T12:00:00Z", "actual_close_time": "2018-12-31T12:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "It was previously asked <a href='https://www.metaculus.com/questions/581/will-interest-in-veganism-further-increase-in-2018/'> whether the interest in Veganism would further increase in 2018</a>. While that question has not resolved yet (as of the writing of this question), it would be interesting to know whether, if interest in Veganism should no longer be increasing, it will be decreasing by any substantial amount.\n\nOnce again, we shall be using Google Trends and will be looking at the <a href=' https://trends.google.com/trends/explore?date=all&q=%2Fm%2F07_hy'>Topic of Veganism</a>. Note that this is to be differentiated from the Search Term 'Veganism' and is the Worldwide interest, rather than just the interest in the United States.\n\nIt is asked:\n\n<strong>Will the interest in Veganism fail to register a monthly peak that beats the current record set in January 2018 prior to 2020?</strong>\n\nAND \n\n<strong>Will there be at least one monthly peak (or rather trough) with an interest less than half of that of January 2018 (<50) prior to 2020?</strong>\n\nA positive resolution requires both questions to be answered in the affirmative. Note that the last time (as of the writing of this question) that there was a monthly peak/trough with a value less than 50 was September 2015. The question shall not be closed prior to the set closing time, even if negative resolution is triggered.", "fine_print": "", "post_id": 1314, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1546241902.832057, "end_time": null, "forecaster_count": 141, "interval_lower_bounds": [ 0.04 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.26 ] } ], "latest": { "start_time": 1546241902.832057, "end_time": null, "forecaster_count": 141, "interval_lower_bounds": [ 0.04 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.26 ], "forecast_values": [ 0.9, 0.1 ], "means": [ 0.15176110192667672 ], "histogram": [ [ 0.0, 4.122380193858158, 0.8387340141219749, 0.01143516713121781, 1.0477115763045182, 1.3303295506291641, 0.143426288429764, 0.8028304409073989, 0.7301680579778405, 0.0, 2.9189650640854046, 0.0007239887072712132, 0.4215937083529515, 1.0599663452719668, 0.04424484460599778, 0.017565158505588555, 0.7463665495119066, 0.0, 0.020384638741694616, 0.1253828884413072, 1.5091194562579995, 0.0, 0.0, 0.0015197988629243285, 0.0, 0.693013985237275, 1.524269888468738, 0.20093738782205067, 1.2102086706793107, 0.0, 1.2826976703033934, 0.0008430278500080044, 0.010109980267243999, 0.0, 0.0, 0.09681531611681833, 0.0, 0.274903035196283, 0.001994248806695997, 0.0023734986418368956, 0.030318448378044826, 0.0, 0.0, 0.0009346274086930763, 0.008322702847735605, 0.30201945746018216, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2881724266640099, 0.0, 0.0, 0.0, 0.01950492748404346, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.000544608623388499, 0.0, 0.22717440747750223, 0.0, 0.0, 0.18241147015581644, 0.0, 0.00011787156455005717, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0016663552678108826 ] ] }, "score_data": { "peer_score": 16.98577720521654, "coverage": 0.998083987887811, "baseline_score": 81.7333335457777, "spot_peer_score": 5.33511813225531, "peer_archived_score": 16.98577720521654, "baseline_archived_score": 81.7333335457777, "spot_peer_archived_score": 5.33511813225531 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1545319040.96333, "end_time": null, "forecaster_count": 136, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1545319040.96333, "end_time": null, "forecaster_count": 136, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8463840737153643, 0.15361592628463577 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 15, "user_vote": null }, "forecasts_count": 205, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1313, "title": "Will Daenerys Targaryen die in season eight of Game of Thrones?", "short_title": "", "url_title": "", "slug": "will-daenerys-targaryen-die-in-season-eight-of-game-of-thrones", "author_id": 105802, "author_username": "Pythia", "coauthors": [], "created_at": "2018-07-26T12:23:01.779576Z", "published_at": "2018-07-29T07:00:00Z", "edited_at": "2025-09-05T17:29:01.673291Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-29T07:00:00Z", "comment_count": 5, "status": "resolved", "resolved": true, "actual_close_time": "2019-04-01T12:00:00Z", "scheduled_close_time": "2019-04-01T12:00:00Z", "scheduled_resolve_time": "2019-05-21T10:03:00Z", "actual_resolve_time": "2019-05-21T10:03:00Z", "open_time": "2018-07-29T07:00:00Z", "nr_forecasters": 125, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32606, "name": "2018-2019 Leaderboard", "slug": "2018_2019_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 1313, "title": "Will Daenerys Targaryen die in season eight of Game of Thrones?", "created_at": "2018-07-26T12:23:01.779576Z", "open_time": "2018-07-29T07:00:00Z", "cp_reveal_time": "2018-07-31T07:00:00Z", "spot_scoring_time": "2018-07-31T07:00:00Z", "scheduled_resolve_time": "2019-05-21T10:03:00Z", "actual_resolve_time": "2019-05-21T10:03:00Z", "resolution_set_time": "2019-05-21T10:03:00Z", "scheduled_close_time": "2019-04-01T12:00:00Z", "actual_close_time": "2019-04-01T12:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "At the end of <a href=\"https://en.wikipedia.org/wiki/Game_of_Thrones_(season_7)\" target=\"_blank\">season seven</a> of HBO's drama Game of Thrones, to fight the <a href=\"http://gameofthrones.wikia.com/wiki/Night_King\" target=\"_blank\">Night King</a>, <a href=\"https://en.wikipedia.org/wiki/Daenerys_Targaryen\" target=\"_blank\">Daenerys Targaryen</a> has allied herself with <a href=\"https://en.wikipedia.org/wiki/Cersei_Lannister\" target=\"_blank\">Cersei Lannister</a>, who plots to betray her, and <a href=\"https://en.wikipedia.org/wiki/Jon_Snow_(character)\" target=\"_blank\">Jon Snow</a>, with whom she has slept, unaware that Snow is her nephew. To the north, the Night King has revived one of her dragons and breached the <a href=\"http://gameofthrones.wikia.com/wiki/The_Wall\" target=\"_blank\">Wall</a>.\n\nTargaryen is one of the more heroic characters and a potential monarch. On the other hand, the show is known for killing protagonists and she has several strong competitors.\n\n<strong><em>Will Daenerys Targaryen die?</em></strong>\n\nThe question resolves positively if Targaryen dies in season eight, even if she is later resurrected or turned into a <a href=\"http://gameofthrones.wikia.com/wiki/Wights\" target=\"_blank\">wight</a> or a <a href=\"https://en.wikipedia.org/wiki/White_Walker\" target=\"_blank\">white walker</a>.", "fine_print": "", "post_id": 1313, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1554076839.924845, "end_time": null, "forecaster_count": 125, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.45 ], "interval_upper_bounds": [ 0.52 ] } ], "latest": { "start_time": 1554076839.924845, "end_time": null, "forecaster_count": 125, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.45 ], "interval_upper_bounds": [ 0.52 ], "forecast_values": [ 0.55, 0.45 ], "means": [ 0.4266698146994178 ], "histogram": [ [ 0.0, 0.16219960354727092, 0.0, 0.0, 0.0, 7.882431206593951e-05, 0.0, 0.0, 0.0, 0.0, 0.6636434323397195, 0.0010901489464489615, 0.0, 0.0, 0.0, 0.03022172810477587, 0.04706143124907007, 0.0, 0.0, 0.4143526466223599, 1.261791388899668, 0.0, 0.0, 0.0, 0.9141108990921084, 1.894699979069873, 0.003042197750969866, 0.12976911087747617, 0.24793625726914664, 0.0, 0.3153353349295456, 5.736212445568014e-05, 0.0009704999563037674, 0.7089255426264287, 0.0, 0.3202720777319017, 0.0, 0.0, 0.0, 0.00044553343099167526, 0.3525968667820022, 0.5018951760516415, 0.5586141251405704, 0.016064093336548183, 0.821262823375041, 1.8616501989150303, 1.3257205206527143, 0.0, 0.0, 0.7497090569299235, 1.2775285086152917, 0.19312054463551276, 1.1062787659234932, 0.006111534710645336, 0.0, 0.9570854468719441, 0.500448088044274, 0.0008611565368432893, 0.0, 0.3228833177507363, 0.24358893720676744, 0.002770195827676131, 0.0, 0.0, 0.08045269961657854, 0.017243276230636304, 1.3429008553539088, 0.27780247181278345, 0.0, 0.0, 0.9578655256121333, 0.034386535139458996, 0.10104943057971563, 0.0, 0.00028010671917520346, 0.01048913320399153, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09550477610540184, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0007614090047632364, 0.0, 0.0, 0.0, 0.031494265802121765 ] ] }, "score_data": { "peer_score": 35.61062066500118, "coverage": 0.9945212497375009, "baseline_score": -18.51175513813644, "spot_peer_score": 40.21393365772635, "peer_archived_score": 35.61062066500118, "baseline_archived_score": -18.51175513813644, "spot_peer_archived_score": 40.21393365772635 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1554076285.104066, "end_time": null, "forecaster_count": 125, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1554076285.104066, "end_time": null, "forecaster_count": 125, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.745627743014524, 0.254372256985476 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 193, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1301, "title": "Will the UK leave the EU by the end of June 2020?", "short_title": "Brexit before July 1, 2020", "url_title": "Brexit before July 1, 2020", "slug": "brexit-before-july-1-2020", "author_id": 100626, "author_username": "gjm", "coauthors": [], "created_at": "2018-07-24T10:34:18.927096Z", "published_at": "2018-07-27T07:00:00Z", "edited_at": "2025-09-05T17:29:20.308229Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-27T07:00:00Z", "comment_count": 14, "status": "resolved", "resolved": true, "actual_close_time": "2019-01-01T00:00:00Z", "scheduled_close_time": "2019-01-01T00:00:00Z", "scheduled_resolve_time": "2020-02-01T00:53:00Z", "actual_resolve_time": "2020-02-01T00:53:00Z", "open_time": "2018-07-27T07:00:00Z", "nr_forecasters": 208, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32606, "name": "2018-2019 Leaderboard", "slug": "2018_2019_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 1301, "title": "Will the UK leave the EU by the end of June 2020?", "created_at": "2018-07-24T10:34:18.927096Z", "open_time": "2018-07-27T07:00:00Z", "cp_reveal_time": "2018-07-27T17:33:29.548981Z", "spot_scoring_time": "2018-07-27T17:33:29.548981Z", "scheduled_resolve_time": "2020-02-01T00:53:00Z", "actual_resolve_time": "2020-02-01T00:53:00Z", "resolution_set_time": "2020-02-01T00:53:00Z", "scheduled_close_time": "2019-01-01T00:00:00Z", "actual_close_time": "2019-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "In June 2016, the people of the UK voted narrowly in favour of leaving the European Union. Things have not gone altogether smoothly since then. Negotiations between the UK and the EU have been acrimonious and a \"no-deal\" exit (whose economic consequences would be disagreeable) seems a real possibility; in July 2018 two cabinet ministers resigned from the government over disagreements about how to handle Brexit, and the government is widely perceived as unstable; the people of the UK remain deeply divided over whether Brexit was ever a good idea, whether it is a good idea now, and what it should look like if so.\n\nThe UK is due to leave the EU in March of 2019. This date could be delayed, if all the EU member countries agree. The UK's exit could probably be abandoned, if the government so chose. This question asks whether the UK will leave on something like the original timescale.\n\n**Will the UK leave the EU by the end of June 2020?**\n\nThis question will resolve when any of the following conditions occurs; the first to occur determines the resolution:\n\n* The UK is no longer a member of the EU. (Positive resolution.)\n* 2020-07-01 is reached. (Negative resolution.)\n* There is no longer a nation called the United Kingdom. (Ambiguous resolution.)\n* There is no longer an international organization called the European Union. (Ambiguous resolution.)\n\nA relevant other question is [Brexit negotiations completed by March 2019?](https://www.metaculus.com/questions/436/brexit-negotiations-completed-by-march-2019/) but these resolve differently if there is a short delay, or (as I interpret the other question) if there is a \"no-deal\" Brexit.", "fine_print": "", "post_id": 1301, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1546279239.869355, "end_time": null, "forecaster_count": 208, "interval_lower_bounds": [ 0.66 ], "centers": [ 0.75 ], "interval_upper_bounds": [ 0.86 ] } ], "latest": { "start_time": 1546279239.869355, "end_time": null, "forecaster_count": 208, "interval_lower_bounds": [ 0.66 ], "centers": [ 0.75 ], "interval_upper_bounds": [ 0.86 ], "forecast_values": [ 0.25, 0.75 ], "means": [ 0.7367209006633094 ], "histogram": [ [ 0.0, 0.14604833708210485, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09640867148364023, 0.0, 0.6555995609163123, 0.0, 0.0, 0.0, 0.0, 0.0, 0.004933179732289293, 0.0, 0.0, 0.0, 0.014005265070627923, 6.596583651309554e-05, 0.0, 5.936597292991513e-05, 0.0, 0.007982193303432979, 0.0, 0.0, 0.03600848273933866, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0004142853703872297, 0.45463794204826496, 0.03737787316687237, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6815352705959294, 0.0, 0.0, 0.2232736525118257, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0487792212496032, 0.9628482267327192, 0.0011700058941325696, 0.6325695790123903, 0.011420666307512093, 0.3833666207692833, 1.8537805460353867, 0.7190063316440114, 0.0, 0.02691000579462427, 0.6663808882116096, 3.9569757877790055, 0.8846477884854809, 0.20659962793558748, 0.08218015312572788, 0.053232838459902294, 0.8408316195939196, 0.8703418982992323, 0.0, 1.1336335275107747, 8.090741337199523e-05, 2.5097060508707894, 0.01916767097166281, 0.9328554621470516, 0.0, 0.2117840403451051, 0.842039926915241, 1.1718138384860077, 0.346389613781711, 0.8879304149064352, 0.0, 3.4216972683254507, 2.298778166645556e-05, 0.0, 0.0010360180975858549, 0.0, 0.4187808476953618, 0.0, 0.0, 0.512390222754413, 0.3785792508124327 ] ] }, "score_data": { "peer_score": 15.085131272218028, "coverage": 0.9999348179042277, "baseline_score": 64.97576093242344, "spot_peer_score": 46.91792204923886, "peer_archived_score": 15.085131272218028, "baseline_archived_score": 64.97576093242344, "spot_peer_archived_score": 46.91792204923886 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1546204130.912415, "end_time": null, "forecaster_count": 208, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1546204130.912415, "end_time": null, "forecaster_count": 208, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.38144590162316305, 0.618554098376837 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 35, "user_vote": null }, "forecasts_count": 354, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1139, "title": "Will a scientific mission to the outer solar system be assigned to the Falcon Heavy by mid-2023?", "short_title": "", "url_title": "", "slug": "will-a-scientific-mission-to-the-outer-solar-system-be-assigned-to-the-falcon-heavy-by-mid-2023", "author_id": 106062, "author_username": "Tidearis", "coauthors": [], "created_at": "2018-07-20T07:35:09.786630Z", "published_at": "2018-07-25T07:00:00Z", "edited_at": "2025-09-05T17:28:59.162967Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-25T07:00:00Z", "comment_count": 44, "status": "resolved", "resolved": true, "actual_close_time": "2020-07-31T12:00:00Z", "scheduled_close_time": "2020-07-31T12:00:00Z", "scheduled_resolve_time": "2021-07-23T22:24:00Z", "actual_resolve_time": "2021-07-23T22:24:00Z", "open_time": "2018-07-25T07:00:00Z", "nr_forecasters": 176, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3695, "name": "Space", "slug": "space", "emoji": "🚀", "description": "Space", "type": "category" } ] }, "question": { "id": 1139, "title": "Will a scientific mission to the outer solar system be assigned to the Falcon Heavy by mid-2023?", "created_at": "2018-07-20T07:35:09.786630Z", "open_time": "2018-07-25T07:00:00Z", "cp_reveal_time": "2018-07-26T20:45:01.169585Z", "spot_scoring_time": "2018-07-26T20:45:01.169585Z", "scheduled_resolve_time": "2021-07-23T22:24:00Z", "actual_resolve_time": "2021-07-23T22:24:00Z", "resolution_set_time": "2021-07-23T22:24:00Z", "scheduled_close_time": "2020-07-31T12:00:00Z", "actual_close_time": "2020-07-31T12:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "NASA, sometimes in cooperation with other countries, launches missions to space to study planets, stars and other celestial bodies. In order to do this, scientists and engineers have to account for factors, such as money and weight.\n\nTo make a decent mission and to gather as much data as possible, a probe that NASA could send has to carry with it many scientific instruments. The more instruments there are, the heavier the probe is (and more expensive). The heavier the probe is, the more powerful the rocket to send it has to be. The more powerful the rocket is, the more expensive the mission is. Money and weight are the two main factors to make a mission to space.\n\nThese days, NASA's budget is more and more reduced. It was 0.47% of the federal budget in 2017 whereas it could reach 4.41% during the Apollo era. Only a few missions can be made, and most of them concern the study of Earth, stars, or the internal solar system. Even though some missions plan to study Jupiter, its moons, and sometimes Saturn, not a single mission, not even as a project, is planified to study space beyond the orbit of Saturn at the moment. The last and only mission that studied Uranus and Neptune was Voyager 2 and New Horizons studied Pluto for a few hours after a 9 years journey through space.\n\nBut earlier this year, the Falcon Heavy was launched for the first time. SpaceX's new heavy launcher, and current most powerful rocket is capable of launching 63,800 kg to Low Earth Orbit (LEO) and 3,500 kg to Pluto, for a cost of 150M $, whereas the Delta IV heavy, the previous most powerful rocket, could only launch 28 790 kg to LEO for a cost of 400M $. The price of the kg in space is almost 6 times lower for the Falcon Heavy than the Delta IV Heavy (2351 $/KG → FH -- 13893 $/KG → D4H).\n\nTherefore, the Falcon Heavy offers to NASA and other agencies the possibility to multiply its capacities of studying space and for a lower cost.\n\n*** Will a scientific mission to the outer solar system be assigned to the Falcon Heavy within the next 5 years? ***\n\nresolves positive if by end of July 2023, a credible media or other announcement indicates that a contract has been signed with SpaceX as one party, for the purpose of a scientific payload being launched on a Falcon Heavy rocket. \"Scientific payload\" here shall be taken to be a payload paid for by a nonprofit or government agency with scientific but not military, communication, etc. application.", "fine_print": "", "post_id": 1139, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1596192920.284624, "end_time": null, "forecaster_count": 176, "interval_lower_bounds": [ 0.5 ], "centers": [ 0.52 ], "interval_upper_bounds": [ 0.62 ] } ], "latest": { "start_time": 1596192920.284624, "end_time": null, "forecaster_count": 176, "interval_lower_bounds": [ 0.5 ], "centers": [ 0.52 ], "interval_upper_bounds": [ 0.62 ], "forecast_values": [ 0.48, 0.52 ], "means": [ 0.5285371834463644 ], "histogram": [ [ 0.0, 0.6122561315717072, 0.0, 0.022830311346795272, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.000758827081129534, 0.0, 0.0, 0.0, 0.0, 0.033276337654742354, 0.0, 0.0, 0.0, 0.0, 0.00045340046305732443, 0.0, 0.7371102798017084, 0.020534114751101673, 0.0, 0.0585551595056967, 0.0, 4.090650335777065e-05, 0.038343080541852814, 0.0, 0.6859451434022531, 0.025563085225457808, 0.004550622294224255, 0.518545454361061, 0.543354819793116, 0.3477181330935116, 0.0, 0.1928989053337517, 0.09462940209889918, 0.003078895384026612, 0.6699286947644898, 0.0, 0.0, 0.0, 0.1357321946137585, 0.21253354785716155, 0.0, 0.2592081496082032, 0.01946533459818547, 0.5613469899327372, 3.142981543380468, 1.9949297371088748, 1.6984692593615405, 1.271331471979479, 0.5815769816956772, 1.0804168566291117, 1.6063910290541124, 0.01491809431212289, 0.0, 0.1299135706193567, 0.7672666104405723, 0.002690495415291645, 0.8392053876812645, 0.001045379894743468, 0.5835780350339916, 1.0764078545613596, 0.40866075936788904, 1.3151260761567918, 0.006602625472505505, 0.04168264207615671, 0.8834128221567685, 0.003294740553424223, 0.0, 0.026706497251433908, 0.005843293084191481, 0.01212727511776144, 0.0, 0.06528956297203924, 0.0, 0.0, 1.2065964472588122, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.008385780650690565, 0.0, 0.0, 0.009989256399493319, 0.06516417602184403, 0.0, 0.006212782662818872, 0.0, 0.25538852939339174, 0.0, 0.0, 0.0, 0.12587766985236018 ] ] }, "score_data": { "peer_score": 17.01645089152116, "coverage": 0.9993950007266356, "baseline_score": 23.92858324992568, "spot_peer_score": -32.59450769598724, "peer_archived_score": 17.01645089152116, "baseline_archived_score": 23.92858324992568, "spot_peer_archived_score": -32.59450769598724 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1596192920.326833, "end_time": null, "forecaster_count": 176, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1596192920.326833, "end_time": null, "forecaster_count": 176, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.5003778901945801, 0.49962210980541993 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 25, "user_vote": null }, "forecasts_count": 344, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1100, "title": "Will Trump be reelected president if the election is called by November 10th 2020?", "short_title": "Trump reelected by November 10th 2020", "url_title": "Trump reelected by November 10th 2020", "slug": "trump-reelected-by-november-10th-2020", "author_id": 103473, "author_username": "shiranpasternak", "coauthors": [], "created_at": "2018-07-17T17:37:26.935091Z", "published_at": "2018-07-21T07:00:00Z", "edited_at": "2025-09-05T17:28:58.908569Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-21T07:00:00Z", "comment_count": 1024, "status": "resolved", "resolved": true, "actual_close_time": "2020-11-03T05:00:00Z", "scheduled_close_time": "2020-11-03T05:00:00Z", "scheduled_resolve_time": "2020-11-07T16:30:50.472315Z", "actual_resolve_time": "2020-11-07T16:30:50.472315Z", "open_time": "2018-07-21T07:00:00Z", "nr_forecasters": 2186, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32593, "name": "2016-2020 Leaderboard", "slug": "2016_2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3685, "name": "Elections", "slug": "elections", "emoji": "🗳️", "description": "Elections", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 1100, "title": "Will Trump be reelected president if the election is called by November 10th 2020?", "created_at": "2018-07-17T17:37:26.935091Z", "open_time": "2018-07-21T07:00:00Z", "cp_reveal_time": "2018-07-21T12:14:33.814787Z", "spot_scoring_time": "2018-07-21T12:14:33.814787Z", "scheduled_resolve_time": "2020-11-07T16:30:50.472315Z", "actual_resolve_time": "2020-11-07T16:30:50.472315Z", "resolution_set_time": "2020-11-07T16:30:50.472315Z", "scheduled_close_time": "2020-11-03T05:00:00Z", "actual_close_time": "2020-11-03T05:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "<small>Note: This question is conditional on timely results. We made another one [here](https://www.metaculus.com/questions/5383/) which is not.</small>\n\nWill President Donald Trump be reelected for a second term in 2020? This means that he will \n\n(a) not be removed beforehand, \n\n(b) actually run for a second term, and \n\n(c) legally defeat other presidential nominees. \n\nResolution is based on consensus major-media declaration that enough states have been called for or against Trump to give more than 50% of the total electors to one candidate.\n\nIt does _not_ imply that Trump will necessarily serve a second term, even if elected, and in particular does not account for events that may occur between election night and swearing in.\n\nResolution is ambiguous if a major media call has not been made as of one week after the election; in this case one or more alternative questions will be launched.\n\nIf Trump dies or is removed from office by any means prior to the election, question resolves negative, and will retroactively close one week prior to the takeover of the Presidential office by Pence or another party.\n\nIf Trump publicly declares that he will not seek re-election, question resolves negative on the date when it becomes essentially impossible for him to become the Republican nominee, with a retroactive closing date 1 day prior to the public declaration.", "fine_print": "", "post_id": 1100, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1604378985.327, "end_time": null, "forecaster_count": 2185, "interval_lower_bounds": [ 0.09 ], "centers": [ 0.15 ], "interval_upper_bounds": [ 0.28 ] } ], "latest": { "start_time": 1604378985.327, "end_time": null, "forecaster_count": 2185, "interval_lower_bounds": [ 0.09 ], "centers": [ 0.15 ], "interval_upper_bounds": [ 0.28 ], "forecast_values": [ 0.85, 0.15 ], "means": [ 0.21525628361328555 ], "histogram": [ [ 0.0, 4.879297667512604, 0.23316344534582575, 0.24491915089737296, 0.017722401273603403, 4.5802296273275935, 0.8376891891534199, 2.834485967842982, 8.08152739670513, 3.354172367557784, 10.921864576010764, 1.5837035347910249, 1.7938723138047912, 3.072918976855469, 2.2118534155244576, 3.1723712089004836, 4.117218503926689, 0.34385515168162983, 2.8170088624791436, 1.5264717809823334, 6.861961159671359, 0.79002728758104, 0.10907438367600047, 1.3683029250213958, 0.9053970701204326, 1.6257180224635361, 0.020331758586241165, 0.029415443324378857, 1.9872287434060658, 0.0, 2.730073700046985, 0.38836025160407295, 0.6135641234456541, 0.9115159950783198, 0.07413988701895595, 3.0173712782397772, 0.8082921107273789, 0.5982140478279263, 1.0481984964389466, 0.0016214278797290667, 1.3388748778163135, 0.005535235954848679, 0.048856131623305384, 0.6379941401406554, 0.01664539626131756, 0.07208725316206424, 0.036417210745465084, 0.0004384520071523475, 0.00017130739302770976, 0.025882598956163377, 0.3493548231845048, 1.048356104551944, 0.012310413279342226, 0.24040553699024247, 1.4693930780420648, 0.5536207891821037, 0.056730770617775, 0.016472272429700303, 0.0, 0.0059498754845730495, 0.1827060855608095, 0.0395868331438225, 0.001135986799807562, 0.0007130823988809755, 0.04215228305161596, 0.940533650234061, 0.0005179616241925696, 0.0013719677010596044, 5.136858675063792e-06, 1.2948294665324557e-06, 0.09666043456079676, 0.0312941492464835, 0.010525679397994944, 0.010792448209448998, 0.8699993807317041, 0.00836134585324367, 0.050061040455093936, 0.00040452928219973427, 1.3638625469507269e-11, 5.465186231905845e-10, 0.018799638917962352, 0.0021630963839220124, 4.5469533877000456e-07, 0.0, 0.0001434827309541343, 0.13667645840052486, 1.5232422755041983e-16, 5.028228755003656e-15, 0.15127429209612414, 3.5174991477007136e-08, 0.03843758837417772, 2.7249600599310417e-08, 0.012843624276158169, 0.004895919305317371, 1.2148677678540606e-11, 0.000623086002444511, 0.0, 6.590244437359188e-10, 0.010734819112251699, 2.87679278643699 ] ] }, "score_data": { "peer_score": 31.404942559548918, "coverage": 0.9999846045760084, "baseline_score": 20.594457804804183, "spot_peer_score": -4.356966133527342, "peer_archived_score": 31.404942559548918, "baseline_archived_score": 20.594457804804183, "spot_peer_archived_score": -4.356966133527342 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1604378986.487666, "end_time": null, "forecaster_count": 2185, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1604378986.487666, "end_time": null, "forecaster_count": 2185, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9262168670037924, 0.07378313299620766 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 446, "user_vote": null }, "forecasts_count": 6512, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1095, "title": "Will Donald Trump's approval rating decline noticeably in the wake of the Helsinki Summit?", "short_title": "", "url_title": "", "slug": "will-donald-trumps-approval-rating-decline-noticeably-in-the-wake-of-the-helsinki-summit", "author_id": 100014, "author_username": "converse", "coauthors": [], "created_at": "2018-07-17T03:39:50.099876Z", "published_at": "2018-07-18T07:00:00Z", "edited_at": "2025-09-05T17:29:06.277960Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-18T07:00:00Z", "comment_count": 12, "status": "resolved", "resolved": true, "actual_close_time": "2018-07-22T04:00:00Z", "scheduled_close_time": "2018-07-22T04:00:00Z", "scheduled_resolve_time": "2018-07-31T04:01:00Z", "actual_resolve_time": "2018-07-31T04:01:00Z", "open_time": "2018-07-18T07:00:00Z", "nr_forecasters": 67, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 1095, "title": "Will Donald Trump's approval rating decline noticeably in the wake of the Helsinki Summit?", "created_at": "2018-07-17T03:39:50.099876Z", "open_time": "2018-07-18T07:00:00Z", "cp_reveal_time": "2018-07-18T22:14:13.506867Z", "spot_scoring_time": "2018-07-18T22:14:13.506867Z", "scheduled_resolve_time": "2018-07-31T04:01:00Z", "actual_resolve_time": "2018-07-31T04:01:00Z", "resolution_set_time": "2018-07-31T04:01:00Z", "scheduled_close_time": "2018-07-22T04:00:00Z", "actual_close_time": "2018-07-22T04:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Many of U.S. President Donald Trump's actions that have scandalized his political opponents have not made a dent in the approval he enjoys from his base.\n\nIt is possible, though, that the negative reaction he has received around his Helsinki meeting is different in kind, and will unsettle some of his staunchest supporters.\n\nAs of this moment (8:30pm PT on July 16, 2018) Trump's approval rating on the fivethirtyeight.com aggregated [Approval Rating](https://projects.fivethirtyeight.com/trump-approval-ratings/) Page is 42.1%. \n\nIt takes on the order of several days for new incoming polls to be received and aggregated to the point where they begin to affect the rolling aggregate. So we will determine the answer based on a reading two weeks from now.\n\n*This question will resolve positively if the approval rating reflected [on this page](https://projects.fivethirtyeight.com/trump-approval-ratings/) is less than 40.0% (representing an apparent decline of 2.1 points over two weeks) anytime during the day of July 30, 2018.*", "fine_print": "", "post_id": 1095, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1532224508.945827, "end_time": null, "forecaster_count": 67, "interval_lower_bounds": [ 0.13 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.33 ] } ], "latest": { "start_time": 1532224508.945827, "end_time": null, "forecaster_count": 67, "interval_lower_bounds": [ 0.13 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.33 ], "forecast_values": [ 0.75, 0.25 ], "means": [ 0.2598248053659269 ], "histogram": [ [ 0.0, 2.0416889427189755, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.6105685789046964, 0.0, 0.0, 0.31171167159274227, 0.0, 0.0, 0.0, 0.4955742943419901, 0.0, 0.0, 1.753222201721604, 0.3520703225627485, 0.7538799595077584, 0.0, 0.0, 0.5701503720228134, 0.0, 0.6337881751559187, 0.0, 0.0, 0.646385544799047, 0.0, 0.0, 2.342890145277435, 0.03738920373933642, 0.10945236915001805, 0.8384942775071321, 0.0, 0.17798256379100744, 0.4091590214430291, 0.00336530659706166, 0.11744267076738801, 0.011752428427110738, 0.05476540877060556, 0.12213964696863193, 0.025974222019380454, 0.0, 0.0020593749349257107, 0.0, 0.003228095225742652, 0.38383618459985386, 0.03372481884086813, 0.0, 0.0, 0.0, 0.09917425735890091, 0.0, 0.0, 0.0, 0.0, 0.5632462767504633, 0.0, 0.0, 0.0, 0.0, 0.0, 0.021786738318048823, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3492399953960082 ] ] }, "score_data": { "peer_score": 8.528857963716742, "coverage": 0.8740653154627822, "baseline_score": 31.795356901295122, "spot_peer_score": -10.056087555102726, "peer_archived_score": 8.528857963716742, "baseline_archived_score": 31.795356901295122, "spot_peer_archived_score": -10.056087555102726 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1532126236.226825, "end_time": null, "forecaster_count": 59, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1532126236.226825, "end_time": null, "forecaster_count": 59, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8474746348160176, 0.15252536518398244 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 13, "user_vote": null }, "forecasts_count": 110, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1090, "title": "Will the next assassination of a sitting US President occur after January 1, 2100?", "short_title": "No Assassination of US President by 2100", "url_title": "No Assassination of US President by 2100", "slug": "no-assassination-of-us-president-by-2100", "author_id": 106084, "author_username": "AdamK", "coauthors": [], "created_at": "2018-07-15T22:33:55.246093Z", "published_at": "2018-07-21T07:00:00Z", "edited_at": "2025-10-20T06:30:43.882945Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-21T07:00:00Z", "comment_count": 23, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2099-12-31T08:00:00Z", "scheduled_resolve_time": "2099-12-31T08:00:00Z", "actual_resolve_time": null, "open_time": "2018-07-21T07:00:00Z", "nr_forecasters": 230, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 1090, "title": "Will the next assassination of a sitting US President occur after January 1, 2100?", "created_at": "2018-07-15T22:33:55.246093Z", "open_time": "2018-07-21T07:00:00Z", "cp_reveal_time": "2018-07-22T18:46:59.616737Z", "spot_scoring_time": "2018-07-22T18:46:59.616737Z", "scheduled_resolve_time": "2099-12-31T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2099-12-31T08:00:00Z", "actual_close_time": "2099-12-31T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "On November 22, 1963, Lee Harvey Oswald shot President John F. Kennedy in Dallas as he proceeded down the street in a motorcade. Kennedy died shortly thereafter, and the nation descended into mourning. The [JFK assassination](https://www.jfklibrary.org/JFK/JFK-in-History/November-22-1963-Death-of-the-President.aspx) not only led to countless conspiracy theories, but it also led to better Presidential security measures.\n\nJFK was the last President killed in office. But he wasn't the first. Our nation's checkered history has witnessed a disturbing number of assassinations, including:\n\n* John Wilkes Booth famously killed [President Abraham Lincoln](https://www.history.com/this-day-in-history/john-wilkes-booth-shoots-abraham-lincoln) on April 14, 1865.\n\n* Charles J. Guiteau shot and killed [President James Garfield](http://americanhistory.si.edu/presidency/3d1d.html) on September 19, 1881. \n\n* Leon Czolgosz, an anarchist, shot [President William McKinley](http://www.historynet.com/president-william-mckinley-assassinated-by-an-anarchist.htm) in 1901.\n\nAnd there have been plenty of near misses as well. For instance, on March 30, 1981, John Hinckley Jr. [shot President Reagan](https://en.wikipedia.org/wiki/Attempted_assassination_of_Ronald_Reagan), who fortunately recovered from the assault. \n\nIt's been nearly 55 years since JFK's death.", "resolution_criteria": "This question will resolve as **Yes** if every US President serving from January 20, 2017 to January 1, 2100 serves their term without successfully being assassinated. \n\nAny assassination attempt which does not lead directly to a US President's medical death will not trigger resolution. Any assassination of a US President who is not currently holding that office will also not trigger resolution. Attacks which target a broad group of people (such as nuclear or biological weapons) may qualify as an \"assassination\" if it can be determined that the attacker intended to target the President (whether or not other individuals were also a target)", "fine_print": "", "post_id": 1090, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1760941833.734643, "end_time": 1785999540.315, "forecaster_count": 228, "interval_lower_bounds": [ 0.11 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.33 ] } ], "latest": { "start_time": 1760941833.734643, "end_time": 1785999540.315, "forecaster_count": 228, "interval_lower_bounds": [ 0.11 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.33 ], "forecast_values": [ 0.75, 0.25 ], "means": [ 0.26767980299054284 ], "histogram": null }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728288306.606637, "end_time": null, "forecaster_count": 228, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728288306.606637, "end_time": null, "forecaster_count": 228, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8006165999829427, 0.19938340001705732 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 14, "user_vote": null }, "forecasts_count": 460, "key_factors": [], "is_current_content_translated": false, "description": "On November 22, 1963, Lee Harvey Oswald shot President John F. Kennedy in Dallas as he proceeded down the street in a motorcade. Kennedy died shortly thereafter, and the nation descended into mourning. The [JFK assassination](https://www.jfklibrary.org/JFK/JFK-in-History/November-22-1963-Death-of-the-President.aspx) not only led to countless conspiracy theories, but it also led to better Presidential security measures.\n\nJFK was the last President killed in office. But he wasn't the first. Our nation's checkered history has witnessed a disturbing number of assassinations, including:\n\n* John Wilkes Booth famously killed [President Abraham Lincoln](https://www.history.com/this-day-in-history/john-wilkes-booth-shoots-abraham-lincoln) on April 14, 1865.\n\n* Charles J. Guiteau shot and killed [President James Garfield](http://americanhistory.si.edu/presidency/3d1d.html) on September 19, 1881. \n\n* Leon Czolgosz, an anarchist, shot [President William McKinley](http://www.historynet.com/president-william-mckinley-assassinated-by-an-anarchist.htm) in 1901.\n\nAnd there have been plenty of near misses as well. For instance, on March 30, 1981, John Hinckley Jr. [shot President Reagan](https://en.wikipedia.org/wiki/Attempted_assassination_of_Ronald_Reagan), who fortunately recovered from the assault. \n\nIt's been nearly 55 years since JFK's death." }, { "id": 1079, "title": "Will Elon Musk (eventually) lose his appeal?", "short_title": "", "url_title": "", "slug": "will-elon-musk-eventually-lose-his-appeal", "author_id": 103733, "author_username": "jzima", "coauthors": [], "created_at": "2018-07-12T12:23:35.572954Z", "published_at": "2018-07-15T07:00:00Z", "edited_at": "2025-09-19T05:31:00.274137Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-15T07:00:00Z", "comment_count": 45, "status": "resolved", "resolved": true, "actual_close_time": "2020-12-31T23:00:00Z", "scheduled_close_time": "2020-12-31T23:00:00Z", "scheduled_resolve_time": "2025-12-31T23:00:00Z", "actual_resolve_time": "2025-02-01T01:00:00Z", "open_time": "2018-07-15T07:00:00Z", "nr_forecasters": 295, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3700, "name": "Social Sciences", "slug": "social-sciences", "emoji": "🧑🤝🧑", "description": "Social Sciences", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3695, "name": "Space", "slug": "space", "emoji": "🚀", "description": "Space", "type": "category" } ] }, "question": { "id": 1079, "title": "Will Elon Musk (eventually) lose his appeal?", "created_at": "2018-07-12T12:23:35.572954Z", "open_time": "2018-07-15T07:00:00Z", "cp_reveal_time": "2018-07-15T19:10:01.412809Z", "spot_scoring_time": "2018-07-15T19:10:01.412809Z", "scheduled_resolve_time": "2025-12-31T23:00:00Z", "actual_resolve_time": "2025-02-01T01:00:00Z", "resolution_set_time": "2025-09-16T14:54:02.288427Z", "scheduled_close_time": "2020-12-31T23:00:00Z", "actual_close_time": "2020-12-31T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": false, "open_lower_bound": false, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": false, "open_lower_bound": false, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Elon Musk is the CEO and Lead Designer of SpaceX, the CEO of Tesla, Inc. and the CEO of Neuralink, yet increasingly it seems best to think of him as a celebrity. Elon Musk has (as of the writing of this question) 22.3 million followers on Twitter and single tweets have been known to shift Tesla's market cap by hundreds of millions of dollars. A large part of Musk's success has been his ability to raise billions of dollars for his various ventures, which (perhaps) was only made possible by his publicity.\n\nBut how long will Musk's fame last? Should he succeed in his highly ambitious goals and <a href='http://www.spacex.com/mars'>establish a human colony on Mars</a> or <a href=' https://www.metaculus.com/questions/980/will-tesla-become-the-largest-car-company-in-the-world-by-sales-prior-to-2035/'>make Tesla the world's largest auto company</a> it seems inevitable that his popularity will continue to grow. Should he fail, however, it seems equally inevitable that people will at some point lose interest in him and his grandiose promises.\n\nWe can track Musk's fame via <a href=' https://trends.google.com/trends/explore?date=all&q=Elon%20Musk'>Google Trends</a>. This does not tell us what people's opinion of him is, but it does give us a pretty good idea about how much people are talking about Musk online.\n\nIt is asked:<strong>Will the total interest in Elon Musk on <a href=' https://trends.google.com/trends/explore?date=all&q=Elon%20Musk'>Google Trends</a> in 2025 be less than a quarter of his total interest in 2020?</strong> \n\nTotal interest shall be defined as the sum of the individual monthly interest values over a year. Note that the linked-to Google Trends chart is for the search term 'Elon Musk' and not for interest in Elon Musk as the CEO of SpaceX or some other specific category.", "fine_print": "", "post_id": 1079, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609449902.015713, "end_time": null, "forecaster_count": 295, "interval_lower_bounds": [ 0.15 ], "centers": [ 0.28 ], "interval_upper_bounds": [ 0.35 ] } ], "latest": { "start_time": 1609449902.015713, "end_time": null, "forecaster_count": 295, "interval_lower_bounds": [ 0.15 ], "centers": [ 0.28 ], "interval_upper_bounds": [ 0.35 ], "forecast_values": [ 0.72, 0.28 ], "means": [ 0.2716363569394954 ], "histogram": [ [ 0.0, 1.442929976474239, 0.0028459579961435193, 0.0, 0.0, 1.3917845380925833, 8.841011234581645e-05, 0.31871969449303933, 0.5484054293878204, 0.008865018137717742, 1.0878437553979858, 0.2652829426536501, 0.0, 1.740853838319053, 0.024881141011537787, 1.704104320674571, 0.3388874956620257, 0.5317631134475878, 0.2969987436520638, 0.18090186661263524, 1.382304757310722, 0.5147648618724513, 0.030184366071204477, 1.2551275959290258, 0.05034633855360001, 1.7727051571771246, 0.35724557257856604, 0.906637092833805, 2.9847903116671466, 0.0, 2.2652677263533287, 0.361549195931449, 0.04009222550069174, 2.5038013832191037, 0.18570044015092754, 1.6429572499334506, 0.13017413739884096, 0.23240268785004506, 0.0, 1.7889631482602555, 1.0357092283147442, 0.9209845277843651, 0.0024142774282466073, 8.30761110900015e-06, 0.007897872197504133, 0.7679508755330724, 0.004452647812908715, 0.0, 0.0, 0.0026042986376976925, 0.33085189662118786, 0.004285276165989556, 0.0, 0.08766034482753104, 0.0, 0.10335591833122332, 0.06371349187463562, 0.05597053449349823, 0.0, 0.0, 0.025156902720971562, 0.0, 0.0, 0.0, 0.0, 0.034618527196709825, 0.004985498152452197, 0.0, 0.0, 2.715145285712894e-06, 0.06024687358534879, 0.0, 0.16311944720935717, 0.0, 0.0001890992080555522, 0.037431190539444825, 0.00014069161726554494, 1.5221502265476238e-05, 0.0, 0.0, 0.10981759792347279, 0.0005360372918530339, 7.527079933165896e-05, 0.0, 0.0, 0.23989635830784303, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011721207786425771, 0.0, 0.0, 0.0, 0.4991753847912662 ] ] }, "score_data": { "baseline_score": 47.37944306499695, "peer_score": 28.593911805193333, "coverage": 0.999985512690407, "relative_legacy_score": 0.0, "weighted_coverage": 0.999985512690407, "spot_peer_score": -4.490083168904696, "spot_baseline_score": 67.80719051126377, "baseline_archived_score": 47.37944306499695, "peer_archived_score": 28.593911805193333, "relative_legacy_archived_score": 0.0, "spot_peer_archived_score": -4.490083168904696, "spot_baseline_archived_score": 67.80719051126377 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609449902.27971, "end_time": null, "forecaster_count": 295, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609449902.27971, "end_time": null, "forecaster_count": 295, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8717762206964034, 0.12822377930359666 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 22, "user_vote": null }, "forecasts_count": 495, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1075, "title": "Before 8 September 2018, will any of the remaining signatories to the Iran nuclear deal announce they are withdrawing?", "short_title": "", "url_title": "", "slug": "before-8-september-2018-will-any-of-the-remaining-signatories-to-the-iran-nuclear-deal-announce-they-are-withdrawing", "author_id": 104439, "author_username": "IARPA Question Bot", "coauthors": [], "created_at": "2018-07-11T19:30:29.670095Z", "published_at": "2018-07-14T07:00:00Z", "edited_at": "2025-09-05T17:29:01.613450Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-14T07:00:00Z", "comment_count": 6, "status": "resolved", "resolved": true, "actual_close_time": "2018-09-07T18:01:01Z", "scheduled_close_time": "2018-09-07T18:01:01Z", "scheduled_resolve_time": "2018-09-09T02:01:00Z", "actual_resolve_time": "2018-09-09T02:01:00Z", "open_time": "2018-07-14T07:00:00Z", "nr_forecasters": 103, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 1075, "title": "Before 8 September 2018, will any of the remaining signatories to the Iran nuclear deal announce they are withdrawing?", "created_at": "2018-07-11T19:30:29.670095Z", "open_time": "2018-07-14T07:00:00Z", "cp_reveal_time": "2018-07-15T19:53:15.560292Z", "spot_scoring_time": "2018-07-15T19:53:15.560292Z", "scheduled_resolve_time": "2018-09-09T02:01:00Z", "actual_resolve_time": "2018-09-09T02:01:00Z", "resolution_set_time": "2018-09-09T02:01:00Z", "scheduled_close_time": "2018-09-07T18:01:01Z", "actual_close_time": "2018-09-07T18:01:01Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "*This question was generated from the [IARPA Global Forecasting Challenge](https://www.iarpa.gov/challenges/gfchallenge.html).*\n\nChina, France, Germany, Russia, the United Kingdom, the United States, the European Union, and Iran signed the Joint Comprehensive Plan of Action (JCPOA) in July 2015 (<a href = \"https://apps.washingtonpost.com/g/documents/world/full-text-of-the-iran-nuclear-deal/1651/\"target=\"_blank\">The Washington Post</a>, <a href = \"https://www.al-monitor.com/pulse/originals/2018/06/iran-jcpoa-nuclear-deal-zarif-letter-un-salehi-190000-swu.html\"target=\"_blank\">Al-Monitor</a>). The United States announced its withdrawal from the deal on 8 May 2018 (<a href = \"https://www.nytimes.com/2018/05/08/world/middleeast/trump-iran-nuclear-deal.html\"target=\"_blank\">NY Times</a>).", "fine_print": "", "post_id": 1075, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1536341179.024078, "end_time": null, "forecaster_count": 103, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.02 ] } ], "latest": { "start_time": 1536341179.024078, "end_time": null, "forecaster_count": 103, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.02 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.03638149123828806 ], "histogram": [ [ 0.0, 14.045219434083798, 0.31069795666060407, 0.11593403549251957, 0.11739907899885632, 2.0620588949075707, 0.07102772919048882, 0.10280886532797705, 0.09452069636430221, 0.5762111588443762, 0.06597307672485375, 0.0, 0.053906913995983005, 0.0, 0.0, 0.013777620318965636, 0.0, 0.0, 0.0005513502689110807, 0.25311099331604514, 0.19057938876166186, 0.0, 0.0, 0.2390473384080099, 0.1320132265266261, 0.022391602380196524, 0.029728711458096302, 0.0, 0.0, 0.0, 0.0, 0.018602160257638698, 0.0, 0.07941099381019567, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00022111209728182583, 0.0, 0.0, 0.0, 0.00010633760576014552, 0.0, 0.06955922078988591, 0.0, 0.0, 0.02755825804385377, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10952114625365335, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 7.326548465149899, "coverage": 0.9815559311614738, "baseline_score": 87.13064906012846, "spot_peer_score": 17.232123896773313, "peer_archived_score": 7.326548465149899, "baseline_archived_score": 87.13064906012846, "spot_peer_archived_score": 17.232123896773313 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1536313204.291219, "end_time": null, "forecaster_count": 99, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1536313204.291219, "end_time": null, "forecaster_count": 99, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9106097570817717, 0.08939024291822832 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 3, "user_vote": null }, "forecasts_count": 175, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1061, "title": "Will there be a 9 foot tall human by 2075?", "short_title": "9 Foot Tall Human by 2075", "url_title": "9 Foot Tall Human by 2075", "slug": "9-foot-tall-human-by-2075", "author_id": 106084, "author_username": "AdamK", "coauthors": [], "created_at": "2018-07-08T07:33:34.608677Z", "published_at": "2018-08-19T04:00:00Z", "edited_at": "2025-11-21T21:43:18.203653Z", "curation_status": "approved", "curation_status_updated_at": "2018-08-19T04:00:00Z", "comment_count": 22, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2074-12-31T08:00:00Z", "scheduled_resolve_time": "2075-01-01T08:00:00Z", "actual_resolve_time": null, "open_time": "2018-08-19T04:00:00Z", "nr_forecasters": 186, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" }, { "id": 3700, "name": "Social Sciences", "slug": "social-sciences", "emoji": "🧑🤝🧑", "description": "Social Sciences", "type": "category" } ] }, "question": { "id": 1061, "title": "Will there be a 9 foot tall human by 2075?", "created_at": "2018-07-08T07:33:34.608677Z", "open_time": "2018-08-19T04:00:00Z", "cp_reveal_time": "2018-08-19T10:37:53.870441Z", "spot_scoring_time": "2018-08-19T10:37:53.870441Z", "scheduled_resolve_time": "2075-01-01T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2074-12-31T08:00:00Z", "actual_close_time": "2074-12-31T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Born in 1918, Robert Wadlow grew to enormous size and scraped the skies at 8' 11\" thanks to hormonal issue that tragically also led to a cacade of health problems. He died in 1940 of consequences from an infection.\n\nOther people have crested the 8' tall mark, but they are few and far between, and no one's come close to Wadlow's record, at least according to the [officials at Guinness](http://www.guinnessworldrecords.com/world-records/tallest-man-ever/).\n\nCan people ever grow to 9 feet tall or even beyond? This [article from The Guardian](https://www.theguardian.com/science/2004/may/06/thisweekssciencequestions3) points out that: \n\n> Normally, the growth of our bones is limited by our sex hormones. A good burst of sex hormones at the right time tells the ends of our bones to stop growing. In acromegalic gigantism, as the tumour grows, it destroys cells in the pituitary gland that stimulate the release of sex hormones. The bones, therefore, never get the signal to stop growing.\n\n> But surely there must be a limit to a person's height? John Wass, a specialist in acromegalic gigantism at the University of Oxford, reckons it would be impressive to survive for long if you grew taller than 9ft. \n\nHere's why Wass thinks 9 feet tall is a ceiling, so to speak: \n\n* blood pressure in the legs would be extreme and dangerous\n* keeping blood circulating in such a person would strain the heart to the breaking point\n\n[Gizmodo points out](https://gizmodo.com/5994755/how-tall-can-a-human-get) that: \n\n> the primary [obstacle to mega growth] is our environment. Gravity, and the limited buoyancy of the air around us, means that our bones would have to grow exponentially larger to support a human that towered ten to twelve feet tall. \n\nAnd this [New York Times piece](https://www.nytimes.com/2011/08/30/science/30qna.html) quotes Dr. Rob DeSalle, of the Sackler Institute for Comparative Genomics, who says: \n\n> Many joint disorders and bone problems exist for these individuals that impact average-height people much less. Engineering problems with respect to organisms and their evolution do not get solved easily.\n\nAll that said, Wadlow came within just 1 inch of 9 feet tall. Surely, some person, somewhere, will break the mark. But when?", "resolution_criteria": "Resolves **Yes** if by 2075 Guinness (or some comparable replacement or alternative) record a > 9' tall human.\n\n*9 foot = 274 cm*", "fine_print": "", "post_id": 1061, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763761387.186033, "end_time": 1774539549.259846, "forecaster_count": 180, "interval_lower_bounds": [ 0.12 ], "centers": [ 0.2 ], "interval_upper_bounds": [ 0.3 ] } ], "latest": { "start_time": 1763761387.186033, "end_time": 1774539549.259846, "forecaster_count": 180, "interval_lower_bounds": [ 0.12 ], "centers": [ 0.2 ], "interval_upper_bounds": [ 0.3 ], "forecast_values": [ 0.8, 0.2 ], "means": [ 0.2528083274353394 ], "histogram": [ [ 0.5872473674636907, 0.990255195852409, 0.39597196400467544, 0.3118310865649578, 0.10292157811467835, 0.18849872779543947, 0.0, 0.00019995296792767756, 0.8528413847843082, 0.7120477006255133, 0.8343949135239317, 1.2180624048455724, 0.25910255867106957, 0.020227060196309182, 0.7708654940792506, 0.6890766340252622, 0.031921812499875114, 0.7609762881963646, 0.9974443257698417, 2.3303015207259614, 0.6765665677247754, 0.8557279390447055, 0.0, 0.0, 0.21483854374974706, 3.236921546772714, 0.005347879813693682, 0.015469332659973582, 0.09059563415721866, 0.0, 2.778843283775687, 0.15841504893572975, 0.0646136485292203, 0.05976830020144824, 0.01239536504350724, 0.14489111971627264, 0.0, 0.0, 0.47107008968091196, 0.0, 1.798483865433929, 0.0, 0.0, 0.0, 0.0, 0.05881239654279657, 0.2826041322405854, 0.0, 0.010207352360804922, 0.11683644882467438, 0.6593660454336442, 5.166925175618712e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04046435316225562, 0.06165123283401591, 0.0, 0.005683451264617549, 0.0, 8.42457896521453e-06, 6.130744873295977e-06, 0.0, 0.0, 0.0, 0.00969861985426441, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3123645016126409, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0015212778070905623, 0.0, 0.6104493939385937, 0.127633656982369, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.40042795714187907 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287789.990972, "end_time": null, "forecaster_count": 185, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287789.990972, "end_time": null, "forecaster_count": 185, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9079352310284884, 0.09206476897151164 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 8, "user_vote": null }, "forecasts_count": 409, "key_factors": [], "is_current_content_translated": false, "description": "Born in 1918, Robert Wadlow grew to enormous size and scraped the skies at 8' 11\" thanks to hormonal issue that tragically also led to a cacade of health problems. He died in 1940 of consequences from an infection.\n\nOther people have crested the 8' tall mark, but they are few and far between, and no one's come close to Wadlow's record, at least according to the [officials at Guinness](http://www.guinnessworldrecords.com/world-records/tallest-man-ever/).\n\nCan people ever grow to 9 feet tall or even beyond? This [article from The Guardian](https://www.theguardian.com/science/2004/may/06/thisweekssciencequestions3) points out that: \n\n> Normally, the growth of our bones is limited by our sex hormones. A good burst of sex hormones at the right time tells the ends of our bones to stop growing. In acromegalic gigantism, as the tumour grows, it destroys cells in the pituitary gland that stimulate the release of sex hormones. The bones, therefore, never get the signal to stop growing.\n\n> But surely there must be a limit to a person's height? John Wass, a specialist in acromegalic gigantism at the University of Oxford, reckons it would be impressive to survive for long if you grew taller than 9ft. \n\nHere's why Wass thinks 9 feet tall is a ceiling, so to speak: \n\n* blood pressure in the legs would be extreme and dangerous\n* keeping blood circulating in such a person would strain the heart to the breaking point\n\n[Gizmodo points out](https://gizmodo.com/5994755/how-tall-can-a-human-get) that: \n\n> the primary [obstacle to mega growth] is our environment. Gravity, and the limited buoyancy of the air around us, means that our bones would have to grow exponentially larger to support a human that towered ten to twelve feet tall. \n\nAnd this [New York Times piece](https://www.nytimes.com/2011/08/30/science/30qna.html) quotes Dr. Rob DeSalle, of the Sackler Institute for Comparative Genomics, who says: \n\n> Many joint disorders and bone problems exist for these individuals that impact average-height people much less. Engineering problems with respect to organisms and their evolution do not get solved easily.\n\nAll that said, Wadlow came within just 1 inch of 9 feet tall. Surely, some person, somewhere, will break the mark. But when?" }, { "id": 1060, "title": "Will ANY of the top 10 most popular baby names in the U.S. (as of 2018) still be in the top 10 in 2048?", "short_title": "", "url_title": "", "slug": "will-any-of-the-top-10-most-popular-baby-names-in-the-us-as-of-2018-still-be-in-the-top-10-in-2048", "author_id": 106084, "author_username": "AdamK", "coauthors": [], "created_at": "2018-07-08T07:30:48.124123Z", "published_at": "2018-07-11T07:00:00Z", "edited_at": "2025-09-05T17:28:52.920119Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-11T07:00:00Z", "comment_count": 5, "status": "closed", "resolved": false, "actual_close_time": "2020-01-01T08:00:00Z", "scheduled_close_time": "2020-01-01T08:00:00Z", "scheduled_resolve_time": "2049-01-01T08:00:00Z", "actual_resolve_time": null, "open_time": "2018-07-11T07:00:00Z", "nr_forecasters": 63, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32606, "name": "2018-2019 Leaderboard", "slug": "2018_2019_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 1060, "title": "Will ANY of the top 10 most popular baby names in the U.S. (as of 2018) still be in the top 10 in 2048?", "created_at": "2018-07-08T07:30:48.124123Z", "open_time": "2018-07-11T07:00:00Z", "cp_reveal_time": "2018-07-13T07:00:00Z", "spot_scoring_time": "2018-07-13T07:00:00Z", "scheduled_resolve_time": "2049-01-01T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2020-01-01T08:00:00Z", "actual_close_time": "2020-01-01T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "closed", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "According to [Baby Center](https://www.babycenter.com/top-baby-names-2018.htm), the most popular baby names for girls in 2018 are:\n\n1. Emma \n2. Olivia\n3. Ava\n4. Isabella\n5. Sophia\n6. Amelia\n7. Mia\n8. Charlotte \t\n9. Harper \n10. Mila \n\nThe most popular baby names for boys are:\n\n1. Liam\n2. Noah\n3. Logan\n4. Oliver\n5. Mason\n6. Lucas\n7. Ethan\n8. Elijah\n9. Aiden\n10. James\n\nIf history is any guide, there will be a fair amout of turnover on this list 30 years from now. \n\nWill any of the 20 names above remain somewhere in the top 10 in 2048?\n\nFor the purposes of this question, we would refer to the [Social Security Administration's](https://www.ssa.gov/oact/babynames/) baby name popularity analysis.", "fine_print": "", "post_id": 1060, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1577812204.975152, "end_time": null, "forecaster_count": 63, "interval_lower_bounds": [ 0.55 ], "centers": [ 0.6 ], "interval_upper_bounds": [ 0.67 ] } ], "latest": { "start_time": 1577812204.975152, "end_time": null, "forecaster_count": 63, "interval_lower_bounds": [ 0.55 ], "centers": [ 0.6 ], "interval_upper_bounds": [ 0.67 ], "forecast_values": [ 0.4, 0.6 ], "means": [ 0.6250940961484563 ], "histogram": [ [ 0.0, 0.03492060446249783, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17139679823358864, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04791750235716968, 0.0, 0.09352845379090119, 0.0, 0.05301110107094136, 0.0, 0.0, 0.08543252822519284, 0.8833691903822941, 0.009846599194668917, 0.0, 0.0, 1.0060431685606872, 0.01314514673938087, 0.0, 0.0, 0.0, 0.12168712976649625, 0.011780283942823808, 0.21564328479102138, 0.0, 0.0, 0.7250745315089938, 1.737749042073976, 0.32972789464769453, 0.018644708154279096, 0.0, 0.2153730452690045, 1.5918796132655395, 0.0, 0.14409911274774573, 0.5183295490595771, 0.31508149791861734, 1.5494231576634439, 0.3917019996792684, 0.7790924126099044, 0.0, 0.3645648957485751, 0.4454094958399697, 0.0, 0.0, 0.0, 0.0, 0.17014486580285942, 0.0, 0.0, 0.0, 0.0, 0.33905283347288273, 0.0, 0.0, 0.0, 0.019501694237860755, 0.0, 0.0, 0.0, 0.0, 0.9387124664479816, 0.02923076418133356, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.009686923446224 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1573576091.095885, "end_time": null, "forecaster_count": 56, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1573576091.095885, "end_time": null, "forecaster_count": 56, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.6396585058776301, 0.36034149412237 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 3, "user_vote": null }, "forecasts_count": 78, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1059, "title": "Before 2051, will a meteor cause more injuries than the one that shook Chelyabinsk, Russia in 2013?", "short_title": "", "url_title": "", "slug": "before-2051-will-a-meteor-cause-more-injuries-than-the-one-that-shook-chelyabinsk-russia-in-2013", "author_id": 106084, "author_username": "AdamK", "coauthors": [], "created_at": "2018-07-08T07:29:02.964494Z", "published_at": "2020-05-28T09:00:00Z", "edited_at": "2025-11-22T12:27:27.283186Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-28T09:00:00Z", "comment_count": 12, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2040-10-17T07:00:00Z", "scheduled_resolve_time": "2051-01-01T08:00:00Z", "actual_resolve_time": null, "open_time": "2020-05-28T09:00:00Z", "nr_forecasters": 46, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" }, { "id": 3695, "name": "Space", "slug": "space", "emoji": "🚀", "description": "Space", "type": "category" } ] }, "question": { "id": 1059, "title": "Before 2051, will a meteor cause more injuries than the one that shook Chelyabinsk, Russia in 2013?", "created_at": "2018-07-08T07:29:02.964494Z", "open_time": "2020-05-28T09:00:00Z", "cp_reveal_time": "2020-05-30T09:00:00Z", "spot_scoring_time": "2020-05-30T09:00:00Z", "scheduled_resolve_time": "2051-01-01T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2040-10-17T07:00:00Z", "actual_close_time": "2040-10-17T07:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "In February, 2013, a meteor [scorched](https://www.youtube.com/watch?v=ggLTPyRXUKc) the Russian skies. EarthSky gives the [key details](http://earthsky.org/space/meteor-asteroid-chelyabinsk-russia-feb-15-2013): \n\n> A small asteroid with an estimated size of 65 feet... was moving at 12 miles per second (~19 km/sec) when it struck the protective blanket of air around our planet, which did its job and caused the asteroid to explode. The bright, hot explosion took place only about 20 miles (30 km) above the city of Chelyabinsk in Russia and carried 20 to 30 times the energy of the Hiroshima atomic bomb. Its shock wave broke windows and knocked down parts of buildings in six Russian cities and caused some 1,500 people to seek medical attention for injuries, mostly from flying glass.\n\nIt's only a matter of time before more space rocks as big as (or bigger than) the one that caused the Chelyabinsk Event find their way to Earth.\n\nBefore 2051-01-01, will an event cause *more* injuries than the Chelyabinsk meteor, which injured 1491 people ([russian source](https://web.archive.org/web/20130502144652/http://top.rbc.ru/incidents/18/02/2013/845595.shtml), [wp](https://en.wikipedia.org/wiki/Chelyabinsk_meteor))? More specifically, will a meteor next injure at least 1492 people? This resolves according to credible media reports, or their median estimate if several are found.", "fine_print": "", "post_id": 1059, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763814436.753597, "end_time": 1781081581.658368, "forecaster_count": 45, "interval_lower_bounds": [ 0.3 ], "centers": [ 0.45 ], "interval_upper_bounds": [ 0.6 ] } ], "latest": { "start_time": 1763814436.753597, "end_time": 1781081581.658368, "forecaster_count": 45, "interval_lower_bounds": [ 0.3 ], "centers": [ 0.45 ], "interval_upper_bounds": [ 0.6 ], "forecast_values": [ 0.55, 0.45 ], "means": [ 0.4105056489094621 ], "histogram": [ [ 0.7370646316360566, 0.4249559516928073, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18119093168767203, 0.14772949571888752, 0.0, 0.0, 0.0, 0.3732026023739936, 0.0, 0.0, 0.0, 0.0, 0.028841894683954285, 0.0, 0.08496097501576635, 0.00902096530357109, 0.0, 0.0, 0.0, 0.05870557476045682, 0.0, 0.0, 0.0, 1.0, 0.0, 0.6813708105344584, 0.34946576266536805, 0.0, 0.8600494355428064, 0.0, 0.0, 0.0, 0.0, 0.8203562150583852, 0.0, 0.0, 0.0, 0.11935795955623035, 1.8241049390771154, 0.0, 0.0, 0.20005070513194295, 0.0, 0.46594234120633293, 0.06213662630461396, 0.05148079025128712, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.9311545387387832, 0.0, 0.0, 0.0, 0.0, 0.0, 0.25393556394644184, 0.0, 0.22045724352962418, 0.0, 0.6292630324472235, 0.0, 0.0, 0.0033186276747047033, 0.0, 0.024521526059856833, 0.0, 0.0, 0.0, 0.0, 0.38150119878791605, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287379.806443, "end_time": null, "forecaster_count": 43, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287379.806443, "end_time": null, "forecaster_count": 43, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.6824320749723727, 0.3175679250276273 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 12, "user_vote": null }, "forecasts_count": 124, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1058, "title": "Will humanity (essentially) run out of at least one element currently considered \"under serious threat\" by 2040?", "short_title": "Shortage of rare element by 2040?", "url_title": "Shortage of rare element by 2040?", "slug": "shortage-of-rare-element-by-2040", "author_id": 106084, "author_username": "AdamK", "coauthors": [], "created_at": "2018-07-08T07:26:42.553619Z", "published_at": "2018-07-11T07:00:00Z", "edited_at": "2025-10-16T00:56:41.385335Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-11T07:00:00Z", "comment_count": 17, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2040-01-01T08:00:00Z", "scheduled_resolve_time": "2040-01-01T08:00:00Z", "actual_resolve_time": null, "open_time": "2018-07-11T07:00:00Z", "nr_forecasters": 148, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" } ] }, "question": { "id": 1058, "title": "Will humanity (essentially) run out of at least one element currently considered \"under serious threat\" by 2040?", "created_at": "2018-07-08T07:26:42.553619Z", "open_time": "2018-07-11T07:00:00Z", "cp_reveal_time": "2018-07-12T21:01:03.626000Z", "spot_scoring_time": "2018-07-12T21:01:03.626000Z", "scheduled_resolve_time": "2040-01-01T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2040-01-01T08:00:00Z", "actual_close_time": "2040-01-01T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In 2008, science fiction author, Robert Silverberg, penned a provocative web essay, [The Death of Galium](https://web.archive.org/web/20080704170435/http://www.asimovs.com/_issue_0806/ref.shtml), that predicted that we would collectively run out of several essential, rare elements within a single decade. Per Silverberg:\n\n> The element gallium is in very short supply and the world may well run out of it in just a few years. Indium is threatened too, says Armin Reller, a materials chemist at Germany’s University of Augsburg. He estimates that our planet’s stock of indium will last no more than another decade. All the hafnium will be gone by 2017 also, and another twenty years will see the extinction of zinc. Even copper is an endangered item, since worldwide demand for it is likely to exceed available supplies by the end of the present century.\n\nSilverberg's dire fears have not (yet!) come to pass, but the [American Chemical Society (ACS)](https://www.acs.org/content/acs/en/greenchemistry/research-innovation/research-topics/endangered-elements.html) has identified 44 elements that \"face supply limitations in the coming years.\" Those considered by the ACS \"under serious threat\" over the next century include: silver, zinc, hafnium, gallium and helium.\n\nIt sounds serious. But not everyone is concerned. This [counterpoint editorial](https://www.theregister.co.uk/2014/05/22/energy_economics_coal/) from The Register, for instance, maintains that the galium shortage is a nothingburger: \n\n> We mine for aluminium by sticking bauxite into a Bayer Process plant, where we boil it in caustic soda. If you put the right doohicky on the side of this plant then you get the gallium out. It's at about 100ppm, 100 grammes per tonne of bauxite processed... there's around a 1,000-year supply of Ga in the bauxite that we already know that we'll process for the aluminium content... We simply don't have any meaningful shortage of these metals [worth] worrying about.", "resolution_criteria": "As a proxy for supply issues, we will use price. The question will resolve as **Yes** if before January 1, 2040 any of the following \"under serious threat\" elements has a trackable commodities price that attains an inflation-adjusted price more than 10 times its price as of July 10, 2018, as measured by a reliable entity, like [USGS](https://minerals.usgs.gov/minerals/pubs/commodity/). The elements in question are: He, Zn, Ga, Ge, As, Ag, In, Te and Hf.", "fine_print": "", "post_id": 1058, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1760576190.627919, "end_time": 1766766434.449368, "forecaster_count": 145, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.4 ], "interval_upper_bounds": [ 0.48 ] } ], "latest": { "start_time": 1760576190.627919, "end_time": 1766766434.449368, "forecaster_count": 145, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.4 ], "interval_upper_bounds": [ 0.48 ], "forecast_values": [ 0.6, 0.4 ], "means": [ 0.3697072307919742 ], "histogram": null }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289996.82609, "end_time": null, "forecaster_count": 148, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289996.82609, "end_time": null, "forecaster_count": 148, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8414069090779667, 0.15859309092203333 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 15, "user_vote": null }, "forecasts_count": 369, "key_factors": [], "is_current_content_translated": false, "description": "In 2008, science fiction author, Robert Silverberg, penned a provocative web essay, [The Death of Galium](https://web.archive.org/web/20080704170435/http://www.asimovs.com/_issue_0806/ref.shtml), that predicted that we would collectively run out of several essential, rare elements within a single decade. Per Silverberg:\n\n> The element gallium is in very short supply and the world may well run out of it in just a few years. Indium is threatened too, says Armin Reller, a materials chemist at Germany’s University of Augsburg. He estimates that our planet’s stock of indium will last no more than another decade. All the hafnium will be gone by 2017 also, and another twenty years will see the extinction of zinc. Even copper is an endangered item, since worldwide demand for it is likely to exceed available supplies by the end of the present century.\n\nSilverberg's dire fears have not (yet!) come to pass, but the [American Chemical Society (ACS)](https://www.acs.org/content/acs/en/greenchemistry/research-innovation/research-topics/endangered-elements.html) has identified 44 elements that \"face supply limitations in the coming years.\" Those considered by the ACS \"under serious threat\" over the next century include: silver, zinc, hafnium, gallium and helium.\n\nIt sounds serious. But not everyone is concerned. This [counterpoint editorial](https://www.theregister.co.uk/2014/05/22/energy_economics_coal/) from The Register, for instance, maintains that the galium shortage is a nothingburger: \n\n> We mine for aluminium by sticking bauxite into a Bayer Process plant, where we boil it in caustic soda. If you put the right doohicky on the side of this plant then you get the gallium out. It's at about 100ppm, 100 grammes per tonne of bauxite processed... there's around a 1,000-year supply of Ga in the bauxite that we already know that we'll process for the aluminium content... We simply don't have any meaningful shortage of these metals [worth] worrying about." }, { "id": 1052, "title": "Will there be a city on Earth with a population of over 100 million by 2100?", "short_title": "City on Earth >100M Population by 2100", "url_title": "City on Earth >100M Population by 2100", "slug": "city-on-earth-100m-population-by-2100", "author_id": 106084, "author_username": "AdamK", "coauthors": [], "created_at": "2018-07-08T07:08:14.356864Z", "published_at": "2018-07-11T07:00:00Z", "edited_at": "2025-10-26T16:31:16.554251Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-11T07:00:00Z", "comment_count": 22, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2099-12-31T08:00:00Z", "scheduled_resolve_time": "2100-01-01T08:00:00Z", "actual_resolve_time": null, "open_time": "2018-07-11T07:00:00Z", "nr_forecasters": 215, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3700, "name": "Social Sciences", "slug": "social-sciences", "emoji": "🧑🤝🧑", "description": "Social Sciences", "type": "category" } ] }, "question": { "id": 1052, "title": "Will there be a city on Earth with a population of over 100 million by 2100?", "created_at": "2018-07-08T07:08:14.356864Z", "open_time": "2018-07-11T07:00:00Z", "cp_reveal_time": "2018-07-13T07:00:00Z", "spot_scoring_time": "2018-07-13T07:00:00Z", "scheduled_resolve_time": "2100-01-01T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2099-12-31T08:00:00Z", "actual_close_time": "2099-12-31T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "For a mostly hairless, fangless species that was likely reduced to [just a few thousand members](https://www.npr.org/sections/krulwich/2012/10/22/163397584/how-human-beings-almost-vanished-from-earth-in-70-000-b-c) in relatively recent times by a volcano, we've certainly been busy.\n\nThe human population on Earth now exceeds 7 billion. And we're still growing. [Not everyone](https://www.youtube.com/watch?v=4rvuueqs3vI) is thrilled by this. But it's reality.\n\nCurrently, as of Q2 2018, Tokyo, Japan is the world's largest metropolis, cramming [over 38,000,000 people](https://www.worldatlas.com/citypops.htm) (give or take) into its borders.\n\nBut by the end of the century, we could (and probably will) witness far grander cities. \n\nPer Canadian demographers, Daniel Hoornweg and Kevin Pope, for instance, Lagos in Nigeria may swell to 100+M by 2100 if trends continue. Face 2 Face Africa has the story:\n\n> By 2100 if Nigeria’s population continues to grow and people move to cities at the same rate as now, Lagos with its current 20 million people could be home to more people than the state of California.\n\nThe demographers' full paper is [here](http://journals.sagepub.com/doi/abs/10.1177/0956247816663557). \n\nThe International Institute for Environment and Development goes into detail about why Hoornweg and Pope are so bullish on Africa [here](https://www.iied.org/will-africa-have-worlds-largest-cities-2100).\n\nWhether it's Lagos that claims the crown or some other city in Africa or elsewhere, what do you think? Will some metro hit 100M people by 2100? (For the estimate we will include the general metropolitan area rather than the strict city boundaries, as done [here](https://en.wikipedia.org/wiki/List_of_urban_areas_by_population#Urban_areas_(Top_100).))", "fine_print": "", "post_id": 1052, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1761496266.212514, "end_time": 1788425914.566, "forecaster_count": 212, "interval_lower_bounds": [ 0.3 ], "centers": [ 0.54 ], "interval_upper_bounds": [ 0.7 ] } ], "latest": { "start_time": 1761496266.212514, "end_time": 1788425914.566, "forecaster_count": 212, "interval_lower_bounds": [ 0.3 ], "centers": [ 0.54 ], "interval_upper_bounds": [ 0.7 ], "forecast_values": [ 0.45999999999999996, 0.54 ], "means": [ 0.5256881106277126 ], "histogram": [ [ 0.0, 0.7651742092748841, 0.0, 0.0, 0.0, 0.0005207607792523289, 0.0, 0.0, 0.0, 0.0, 0.0, 3.7121255234984676e-05, 0.0, 0.0, 0.5488572400703385, 0.2450583285127248, 0.024786917445453468, 0.0, 0.0, 0.040664366460619474, 0.6819789752605493, 0.0, 0.30693121302713944, 0.0, 0.00014037380569452242, 2.84843186529827, 0.5510224454626124, 0.0, 0.0, 0.0, 0.9540015549240802, 0.03060031542451884, 0.0, 0.31860221999465826, 1.0, 0.03119690306289527, 0.0, 0.0, 0.002438880800458197, 0.005756606627914959, 0.7015614239822697, 0.0, 0.03196472162770919, 0.7847432260479249, 0.33068278007493207, 0.5562554064796658, 0.05655809338898407, 0.0, 0.0, 0.05279864560925521, 1.1749231178255035, 0.5482276931019945, 0.0, 0.06003187222250854, 1.7319057206848552, 0.8605062793419564, 0.6445992628447642, 0.08244081897106378, 0.0, 0.21008575602774043, 1.7308368368715192, 0.03984842835494633, 0.5641765896557657, 0.003252085122420164, 0.019642219646325034, 0.11996346314298417, 0.3432128851574679, 0.8470249896309813, 0.009854982342170327, 0.0, 1.168717829850456, 0.0, 0.11858882620073519, 0.18799796017086226, 0.0001915771844553516, 0.747714637091849, 0.902196376842506, 0.09592281427687249, 0.0007895706103555237, 0.6132561918649077, 0.30803616357344005, 0.0, 0.00017615610506239757, 0.14986866676728178, 0.0027570149084849677, 0.2472431644595239, 0.0, 0.7691402266482313, 0.39770765388702983, 0.0, 1.0375086530522564, 0.21720904987266781, 0.0, 0.17996602428221325, 0.06816134310180401, 0.0, 0.25428138242896947, 0.0, 0.0, 0.29457290308975437 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728288813.583639, "end_time": null, "forecaster_count": 213, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728288813.583639, "end_time": null, "forecaster_count": 213, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.6189123063937476, 0.38108769360625233 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 22, "user_vote": null }, "forecasts_count": 453, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1050, "title": "Will the IBM's Summit remain the top dog in supercomputing power?", "short_title": "", "url_title": "", "slug": "will-the-ibms-summit-remain-the-top-dog-in-supercomputing-power", "author_id": 106084, "author_username": "AdamK", "coauthors": [], "created_at": "2018-07-08T06:57:31.516674Z", "published_at": "2018-08-19T04:00:00Z", "edited_at": "2025-09-05T17:29:21.948427Z", "curation_status": "approved", "curation_status_updated_at": "2018-08-19T04:00:00Z", "comment_count": 5, "status": "resolved", "resolved": true, "actual_close_time": "2018-11-01T03:59:00Z", "scheduled_close_time": "2018-11-01T03:59:00Z", "scheduled_resolve_time": "2018-11-16T02:58:00Z", "actual_resolve_time": "2018-11-16T02:58:00Z", "open_time": "2018-08-19T04:00:00Z", "nr_forecasters": 67, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" } ] }, "question": { "id": 1050, "title": "Will the IBM's Summit remain the top dog in supercomputing power?", "created_at": "2018-07-08T06:57:31.516674Z", "open_time": "2018-08-19T04:00:00Z", "cp_reveal_time": "2018-08-19T16:44:29.589993Z", "spot_scoring_time": "2018-08-19T16:44:29.589993Z", "scheduled_resolve_time": "2018-11-16T02:58:00Z", "actual_resolve_time": "2018-11-16T02:58:00Z", "resolution_set_time": "2018-11-16T02:58:00Z", "scheduled_close_time": "2018-11-01T03:59:00Z", "actual_close_time": "2018-11-01T03:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "In early June, IBM together with the Department of Energy launched Summit, the world's newest biggest supercomputer. It processes at a mind-numbing 200,000 trillion calculations per second.\n\nPer [TechCrunch](https://techcrunch.com/2018/06/08/ibms-new-summit-supercomputer-for-the-doe-delivers-200-petaflops/):\n\n> Summit... features 4,608 compute servers with two 22-core IBM Power9 chips and six Nvidia Tesla V100 GPUs each. In total, the system also features over 10 petabytes of memory. \n\nFor now, Summit [stands at the top](https://www.top500.org/news/us-regains-top500-crown-with-summit-supercomputer-sierra-grabs-number-three-spot/) (so to speak) of the TOP500 list. But international competition in the supercomputer realm is fierce. As [GCN reported](https://gcn.com/articles/2017/11/29/top500-supercomputers.aspx) last year:\n\n> China’s dominance on the recent [top supercomputer] lists indicates it is investing heavily in high-performance computing as supercomputers become a vital tool for simulating everything from nuclear explosions to medicine.\n\nMeanwhile, in early 2018, the EU announced it was joining the party. [Engadget reported](https://www.engadget.com/2018/01/12/europe-billion-euros-exascale-supercomputers/):\n\n> Buying and developing supercomputing technology is crazy expensive, with exascale machines expected to cost up to a half billion dollars. To buy and develop them, Europe will spend $486 million itself, with the balance of the $1.2 billion coming from member states. It plans to first acquire machines that can compete with current top supercomputers, then develop its own exascale machines by 2023.\n\nIt seems unlikely that another country (e.g. China) will break Summit's record in the next few months. But it also seems inevitable that Summit will eventually be dethroned. \n\n***Will Summit continue to hold the top spot on the upcoming November [Top500](https://www.top500.org/lists/2018/06/) list? (the official list comparing the world's supercomputers)***", "fine_print": "", "post_id": 1050, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1541020002.678269, "end_time": null, "forecaster_count": 67, "interval_lower_bounds": [ 0.71 ], "centers": [ 0.81 ], "interval_upper_bounds": [ 0.89 ] } ], "latest": { "start_time": 1541020002.678269, "end_time": null, "forecaster_count": 67, "interval_lower_bounds": [ 0.71 ], "centers": [ 0.81 ], "interval_upper_bounds": [ 0.89 ], "forecast_values": [ 0.18999999999999995, 0.81 ], "means": [ 0.8103322341384054 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0007576017002229459, 0.0, 0.0, 0.019395540998817004, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.017210301564934184, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.013401757476467992, 0.10103175184404456, 0.04136365129413757, 0.0797787163600839, 0.0, 0.0, 0.1627657268681097, 0.6872184256913908, 0.0, 0.8578107098217, 1.0, 0.678584892823128, 0.940527522258481, 0.0, 0.0, 0.03035066010230634, 0.34201615214336556, 0.09476538374383409, 0.0, 0.7802828173439227, 0.4634048111397331, 0.9002619217714235, 0.6577086624317363, 0.0, 0.0, 0.16805188654243905, 1.7802000492888048, 0.03465239751088114, 0.6444319296650869, 0.6039843306575171, 0.24585267888888213, 1.3468464120250263, 0.0, 0.0, 0.08709201066508168, 0.18184291418632875, 0.003228095225742652, 0.2844637239593917, 0.0, 0.0, 1.6268596332182212 ] ] }, "score_data": { "peer_score": 8.773216831518862, "coverage": 0.9996035228234936, "baseline_score": 64.49101948139517, "spot_peer_score": 38.2069681740538, "peer_archived_score": 8.773216831518862, "baseline_archived_score": 64.49101948139517, "spot_peer_archived_score": 38.2069681740538 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1540955062.096908, "end_time": null, "forecaster_count": 67, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1540955062.096908, "end_time": null, "forecaster_count": 67, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.2277612124904439, 0.7722387875095561 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 5, "user_vote": null }, "forecasts_count": 111, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1049, "title": "Will there be an X-Class solar flare before October 1, 2018?", "short_title": "", "url_title": "", "slug": "will-there-be-an-x-class-solar-flare-before-october-1-2018", "author_id": 106084, "author_username": "AdamK", "coauthors": [], "created_at": "2018-07-08T06:53:11.821299Z", "published_at": "2018-07-11T07:00:00Z", "edited_at": "2025-09-05T17:28:52.818176Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-11T07:00:00Z", "comment_count": 2, "status": "resolved", "resolved": true, "actual_close_time": "2018-09-15T07:00:00Z", "scheduled_close_time": "2018-09-15T07:00:00Z", "scheduled_resolve_time": "2018-10-01T16:16:00Z", "actual_resolve_time": "2018-10-01T16:16:00Z", "open_time": "2018-07-11T07:00:00Z", "nr_forecasters": 75, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" }, { "id": 3695, "name": "Space", "slug": "space", "emoji": "🚀", "description": "Space", "type": "category" } ] }, "question": { "id": 1049, "title": "Will there be an X-Class solar flare before October 1, 2018?", "created_at": "2018-07-08T06:53:11.821299Z", "open_time": "2018-07-11T07:00:00Z", "cp_reveal_time": "2018-07-12T19:53:33.163049Z", "spot_scoring_time": "2018-07-12T19:53:33.163049Z", "scheduled_resolve_time": "2018-10-01T16:16:00Z", "actual_resolve_time": "2018-10-01T16:16:00Z", "resolution_set_time": "2018-10-01T16:16:00Z", "scheduled_close_time": "2018-09-15T07:00:00Z", "actual_close_time": "2018-09-15T07:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "The 2-octallion ton ball of plasma that we cutely call \"the sun\" is constantly seething. With some regularity, the sun \"flares.\" Per [NASA](https://www.nasa.gov/content/goddard/what-is-a-solar-flare): \n\n> A solar flare is an intense burst of radiation coming from the release of magnetic energy associated with sunspots. Flares are our solar system’s largest explosive events. They are seen as bright areas on the sun and they can last from minutes to hours.\n\nAs you might expect, not all flares are created equal. Scientists clasify them according to intensity. Per convention, the smallest flare class is \"A\"; then we have \"B\" and \"C\"; for some reason, we skip to \"M\"; and, lastly, we finish with the fearsome \"X\", which, among other things, can knock out radio communications around the world.\n\nX-Class flares are the rarest. And they can be dangerous to Earthlings and our technology, especially if the flare's business end points right at us. Here's a [description of a mega flare](https://science.nasa.gov/science-news/science-at-nasa/2003/12nov_haywire) (technically, an X28 on the scale) that blasted forth from the sun in November, 2003:\n\n> The effects on Earth were many: Radio blackouts disrupted communications. Solar protons penetrated Earth's upper atmosphere, exposing astronauts and some air travelers to radiation doses equal to a medical chest X-ray. Auroras appeared all over the world--in Florida, Texas, Australia and many other places where they are seldom seen.\n\nX-Class flares are uncommon, but not that uncommon. Last September, for instance, saw a [spate of them](https://www.space.com/38115-sun-monster-solar-flares-seven-days.html).\n\nWhat do you think? Will the sun blast off another X-Class flare before October 1, 2018? (For a positive resolution, [SpaceWeatherLive site](https://www.spaceweatherlive.com/en/solar-activity/solar-flares) must report the flare as X-class.", "fine_print": "", "post_id": 1049, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1536972504.928991, "end_time": null, "forecaster_count": 75, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.25 ] } ], "latest": { "start_time": 1536972504.928991, "end_time": null, "forecaster_count": 75, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.25 ], "forecast_values": [ 0.9, 0.1 ], "means": [ 0.157787195687822 ], "histogram": [ [ 0.0, 2.686030965602264, 0.0, 0.40444949447799144, 0.2347694085184629, 2.124587577201607, 0.5499123981425624, 0.0, 0.1834425985545891, 0.002443062932311581, 2.678487257793148, 0.1130965568839125, 0.08404903342566711, 0.0, 0.0, 0.06074680218210776, 0.018876485630930608, 0.0, 0.0, 0.4274130675954733, 1.314963953343609, 0.0344326025214608, 0.0, 0.0, 0.028403737957569904, 1.244419275290557, 0.0, 1.4476691576523801, 0.5198530315067792, 0.0, 0.1241198378293701, 0.0, 0.0, 0.010703886146113043, 0.03474599808951813, 0.023254082934554995, 0.0, 0.004095049893788444, 0.031301113244932864, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8394799688108832, 0.0012808209520579696, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5849577636717652, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.013550184787389273, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 7.41342927586748, "coverage": 0.9948264304917268, "baseline_score": 71.18715404625245, "spot_peer_score": 12.847380563629322, "peer_archived_score": 7.41342927586748, "baseline_archived_score": 71.18715404625245, "spot_peer_archived_score": 12.847380563629322 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1536693587.791921, "end_time": null, "forecaster_count": 72, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1536693587.791921, "end_time": null, "forecaster_count": 72, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8774830086354117, 0.12251699136458831 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 1, "user_vote": null }, "forecasts_count": 139, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1047, "title": "Will Elon Musk's kid-sized submarines come to the rescue?", "short_title": "", "url_title": "", "slug": "will-elon-musks-kid-sized-submarines-come-to-the-rescue", "author_id": 8, "author_username": "Anthony", "coauthors": [], "created_at": "2018-07-07T22:29:22.617753Z", "published_at": "2018-07-07T07:00:00Z", "edited_at": "2025-09-05T17:28:56.606888Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-07T07:00:00Z", "comment_count": 12, "status": "resolved", "resolved": true, "actual_close_time": "2018-07-10T11:00:00Z", "scheduled_close_time": "2018-07-15T07:00:00Z", "scheduled_resolve_time": "2018-07-15T07:00:00Z", "actual_resolve_time": "2018-07-10T11:00:00Z", "open_time": "2018-07-07T07:00:00Z", "nr_forecasters": 47, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" } ] }, "question": { "id": 1047, "title": "Will Elon Musk's kid-sized submarines come to the rescue?", "created_at": "2018-07-07T22:29:22.617753Z", "open_time": "2018-07-07T07:00:00Z", "cp_reveal_time": "2018-07-08T04:49:31.108138Z", "spot_scoring_time": "2018-07-08T04:49:31.108138Z", "scheduled_resolve_time": "2018-07-15T07:00:00Z", "actual_resolve_time": "2018-07-10T11:00:00Z", "resolution_set_time": "2018-07-10T11:00:00Z", "scheduled_close_time": "2018-07-15T07:00:00Z", "actual_close_time": "2018-07-10T11:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Yes, this is a serious question. With things getting desperate for the thai teens trapped miles into a [horrifyingly tricky](https://www.nytimes.com/2018/07/06/world/asia/thai-cave-rescue-divers.html) cave complex, Elon Musk has claimed that he's got a team that is (as of question launch) hours away from completing [little mini-submarines](https://www.theverge.com/2018/7/7/17544452/elon-musk-spacex-thailand-cave-soccer-team-kid-sized-submarine-rescue) that could spirit teens through the tortuous cave passages without them needing to swim or learn scuba.\n\n*** Will at least one cave-trapped Thai teenager enter a pressurized submarine system provided by Musk? ***\n\nThe plan need not work (just be attempted on at least one child), though of course we can all hope it does.", "fine_print": "", "post_id": 1047, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1531227109.623919, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": [ 0.03 ], "centers": [ 0.06 ], "interval_upper_bounds": [ 0.14 ] } ], "latest": { "start_time": 1531227109.623919, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": [ 0.03 ], "centers": [ 0.06 ], "interval_upper_bounds": [ 0.14 ], "forecast_values": [ 0.94, 0.06 ], "means": [ 0.12669968670646264 ], "histogram": [ [ 0.0, 2.2674358742342626, 0.5949610715209219, 1.7480508454720285, 0.2619543455417107, 0.8655887224229246, 0.8227064721666111, 0.4336203067551109, 0.3322051308726494, 0.8919060302911029, 0.8998711266084662, 0.025137985516365664, 0.16466410993500383, 0.0, 0.7469208418768826, 0.01542797475028349, 0.0, 0.0, 0.0, 0.006737946999085467, 0.6553093899238189, 0.0, 0.0, 0.0, 0.0, 0.14546031501591658, 0.010561813766971701, 0.0, 0.0, 0.0, 0.338266843588547, 0.04978706836786394, 0.0, 0.00515416995245163, 0.0, 0.2064663553317556, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5089300594347316, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11034223931239878, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.39962148407489445 ] ] }, "score_data": { "peer_score": 2.3812342925007663, "coverage": 0.3143119464628399, "baseline_score": 24.321039345321438, "spot_peer_score": 1.612250835914456, "peer_archived_score": 2.3812342925007663, "baseline_archived_score": 24.321039345321438, "spot_peer_archived_score": 1.612250835914456 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1531158048.9832, "end_time": null, "forecaster_count": 45, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1531158048.9832, "end_time": null, "forecaster_count": 45, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9250421025210362, 0.07495789747896373 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 3, "user_vote": null }, "forecasts_count": 106, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1046, "title": "Will Michael Phelps be tempted out of retirement to compete in the 2020 Olympics?", "short_title": "", "url_title": "", "slug": "will-michael-phelps-be-tempted-out-of-retirement-to-compete-in-the-2020-olympics", "author_id": 105927, "author_username": "chipman", "coauthors": [], "created_at": "2018-07-07T01:31:23.100112Z", "published_at": "2018-07-09T07:00:00Z", "edited_at": "2025-09-05T17:29:12.887632Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-09T07:00:00Z", "comment_count": 3, "status": "resolved", "resolved": true, "actual_close_time": "2018-12-31T05:15:00Z", "scheduled_close_time": "2018-12-31T05:15:00Z", "scheduled_resolve_time": "2019-01-02T04:31:00Z", "actual_resolve_time": "2019-01-02T04:31:00Z", "open_time": "2018-07-09T07:00:00Z", "nr_forecasters": 97, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 1046, "title": "Will Michael Phelps be tempted out of retirement to compete in the 2020 Olympics?", "created_at": "2018-07-07T01:31:23.100112Z", "open_time": "2018-07-09T07:00:00Z", "cp_reveal_time": "2018-07-11T07:00:00Z", "spot_scoring_time": "2018-07-11T07:00:00Z", "scheduled_resolve_time": "2019-01-02T04:31:00Z", "actual_resolve_time": "2019-01-02T04:31:00Z", "resolution_set_time": "2019-01-02T04:31:00Z", "scheduled_close_time": "2018-12-31T05:15:00Z", "actual_close_time": "2018-12-31T05:15:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "American swimmer, Michael Phelps, is the most decorated Olympian in history.\n\nAs Olympic.org [reports](https://www.olympic.org/michael-phelps): \n\n> By the time he retired at Rio 2106 at the age of 31, Michael Phelps had collected a total of 23 golds, three silvers and two bronzes at the Olympics, a record-breaking haul that looks unlikely to be bettered for many years to come.\n\nFollowing his dominating performance in 2016 in Rio, he announced his [retirement](http://www.foxnews.com/entertainment/2017/04/21/michael-phelps-details-his-life-after-retiring-from-swimming.html) from the sport. In no uncertain terms.\n\nThe thing is, he already retired once from the sport--in [no uncertain terms](https://www.npr.org/sections/thetorch/2012/08/08/158422864/michael-phelps-exits-the-olympics-and-enters-retirement-at-27)--in 2012! \n\nSo was Rio really it? Is he seriously going to stay out of the pool in 2020 in Toyko?\n\nCheck out discussion along these lines [here](https://www.quora.com/Will-Michael-Phelps-compete-in-the-2020-Tokyo-Olympics).\n\nFellow Olympian Apolo Ohno expressed [doubts](http://www.thepostgame.com/apolo-ohno-michael-phelps-be-2020-olympics) in an interview in February:\"I think [he'll compete in 2020], it's just my personal opinion.\" \n\nIn the wake of 2016, teammate Ryan Lochte also [held out hope](http://www.baltimoresun.com/sports/olympics/88090481-157.html), saying \"I guarantee he will be there... Michael, I'll see you in Tokyo.\"\n\nDespite more [recent statements](http://www.businessinsider.com/michael-phelps-retired-family-done-swimming-2018-4) to the contrary, Phelps left the door open to return in an interview with the [Baltimore Sun](http://www.baltimoresun.com/sports/olympics/bs-sp-michael-phelps-future-0419-20170419-story.html) last August, saying: \"We'll see if I get that itch again.\"\n\nThe report ended with these teasing lines: \"Is he really finished? Stay tuned.\"\n\nQuestion resolves positive if Phelps announces that he's coming out of retirement to compete in the 2020 Olympics before January 1, 2019.", "fine_print": "", "post_id": 1046, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1546221219.118282, "end_time": null, "forecaster_count": 97, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.05 ], "interval_upper_bounds": [ 0.1 ] } ], "latest": { "start_time": 1546221219.118282, "end_time": null, "forecaster_count": 97, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.05 ], "interval_upper_bounds": [ 0.1 ], "forecast_values": [ 0.95, 0.05 ], "means": [ 0.09545240169199638 ], "histogram": [ [ 0.0, 6.412582754497112, 0.9029722499523452, 0.2893001323593672, 0.8692779775219851, 1.6688412669737471, 0.7338388773387357, 0.9503748144584436, 0.885261276204858, 0.005750651984612199, 0.9689225323725634, 0.0006886775860685237, 0.7165793711725532, 0.0, 0.09389829709384126, 0.15663847717220714, 0.0, 0.0, 0.1163825671331607, 0.0, 0.07668440719104168, 0.0, 0.0, 0.0010606664926521671, 0.017990599509988367, 0.18414883145547095, 0.0028831904530317756, 0.0, 1.7128303489503995, 0.0, 0.5936181955786324, 0.07039059569164097, 0.17820523323797913, 0.05389839539644623, 0.0, 0.0004940915054150324, 0.019589177792223862, 0.0, 0.09313308047933455, 0.012630607718125297, 0.011519742289570785, 0.02511113768417862, 0.003260899582300764, 0.0, 0.021304056001353065, 0.05067228029906734, 0.004622951861781569, 0.0, 0.0, 0.0, 0.0029412763711401824, 0.0, 0.0, 0.0, 0.0, 0.008653094397426326, 0.04325450585999784, 0.0, 0.0, 0.0, 0.0007442701451381057, 0.24106033391707232, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 8.530443175152707, "coverage": 0.9996848636518526, "baseline_score": 54.7112178653048, "spot_peer_score": -4.435330665966379, "peer_archived_score": 8.530443175152707, "baseline_archived_score": 54.7112178653048, "spot_peer_archived_score": -4.435330665966379 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1546176931.442033, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1546176931.442033, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9390284834246262, 0.06097151657537382 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 1, "user_vote": null }, "forecasts_count": 221, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1044, "title": "Will humanity use a gene drive to wipe at least one species of mosquito off the face of the Earth by 2100?", "short_title": "", "url_title": "", "slug": "will-humanity-use-a-gene-drive-to-wipe-at-least-one-species-of-mosquito-off-the-face-of-the-earth-by-2100", "author_id": 105927, "author_username": "chipman", "coauthors": [], "created_at": "2018-07-07T00:30:54.085945Z", "published_at": "2018-07-09T07:00:00Z", "edited_at": "2025-09-05T17:28:46.865691Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-09T07:00:00Z", "comment_count": 17, "status": "closed", "resolved": false, "actual_close_time": "2020-01-01T05:59:00Z", "scheduled_close_time": "2020-01-01T05:59:00Z", "scheduled_resolve_time": "2101-01-01T05:59:00Z", "actual_resolve_time": null, "open_time": "2018-07-09T07:00:00Z", "nr_forecasters": 156, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32606, "name": "2018-2019 Leaderboard", "slug": "2018_2019_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15865, "name": "Health & Pandemics", "slug": "biosecurity", "emoji": "🧬", "type": "topic" }, { "id": 15867, "name": "Environment & Climate", "slug": "climate", "emoji": "🌎", "type": "topic" } ], "category": [ { "id": 3691, "name": "Health & Pandemics", "slug": "health-pandemics", "emoji": "🦠", "description": "Health & Pandemics", "type": "category" }, { "id": 3697, "name": "Environment & Climate", "slug": "environment-climate", "emoji": "🌱", "description": "Environment & Climate", "type": "category" }, { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2965, "type": "question_series", "name": "Playing God Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2015-11-11T21:52:19Z", "close_date": "2021-11-01T20:50:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.683365Z", "edited_at": "2024-02-29T10:13:56.421164Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2965, "type": "question_series", "name": "Playing God Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2015-11-11T21:52:19Z", "close_date": "2021-11-01T20:50:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.683365Z", "edited_at": "2024-02-29T10:13:56.421164Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 1044, "title": "Will humanity use a gene drive to wipe at least one species of mosquito off the face of the Earth by 2100?", "created_at": "2018-07-07T00:30:54.085945Z", "open_time": "2018-07-09T07:00:00Z", "cp_reveal_time": "2018-07-11T07:00:00Z", "spot_scoring_time": "2018-07-11T07:00:00Z", "scheduled_resolve_time": "2101-01-01T05:59:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2020-01-01T05:59:00Z", "actual_close_time": "2020-01-01T05:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "closed", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Let's not mince words. Mosquitos are a scourge unto the Earth. \n\nConsider this horrifying set of details from a [2002 Nature article](https://www.nature.com/news/2002/021003/full/news021001-6.html): \n\n> Malaria may have killed half of all the people that ever lived. And more people are now infected than at any point in history. There are up to half a billion cases every year, and about 2 million deaths - half of those are children in sub-Saharan Africa. \n\nHow do people contract malaria? You guessed it: mosquitoes. Specifically those from the genus [Anopheles](https://www.cdc.gov/malaria/about/biology/mosquitoes/index.html).\n\nWriting in Slate, journalist Daniel Engber builds a considered case for [wiping out moquitoes](http://www.slate.com/articles/health_and_science/science/2016/01/zika_carrying_mosquitoes_are_a_global_scourge_and_must_be_stopped.html):\n\n> I hold a special reservoir of bile for [these] flying hypodermic needles that... spread bioterror in their wake. I’m mad at the mosquitoes, and it’s time to give ’em hell.\n\nWe have motivation to get the job done, along with gene-editing technology and other tools to do the dirty work. A company called [Oxitec](https://www.oxitec.com/), for instance, uses genetically modified skeeters to reduce pest populations ingeniously.\n\nBut the Oxitec plan would just control numbers. To really do-in a species, we'd need a technology called the [gene drive](https://www.nature.com/news/gene-drive-mosquitoes-engineered-to-fight-malaria-1.18858).\n\nAs Smithsonian [reported](https://www.smithsonianmag.com/innovation/kill-all-mosquitos-180959069/): \n\n> In theory, [we could] wipe out... every species of mosquito... there are around 3,500 of them, of which only about 100 spread human disease. You might want to stop at fewer than a dozen species in three genera—Anopheles (translation: “useless,” the malaria mosquito), Aedes (translation: “unpleasant,” the principal vector for yellow fever, dengue and Zika) and Culex (translation: “gnat,” responsible for spreading West Nile, St. Louis encephalitis and other viruses).\n\nAhh, but with great power comes great responsibility. Will we go through with this? More specifically: \n\n*** Before the 21st century is out, will humanity deliberately exterminate at least one species of mosquito using a gene drive? ***\n\nThe positive resolution, a credible estimate of the mosquito population should be consistent with zero, and there should be a compelling argument that this is due to the gene drive (e.g. other species of mosquitos would continue to exist, other methods of controlling this species would have failed, etc.)", "fine_print": "", "post_id": 1044, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1577840522.500052, "end_time": null, "forecaster_count": 156, "interval_lower_bounds": [ 0.5 ], "centers": [ 0.69 ], "interval_upper_bounds": [ 0.79 ] } ], "latest": { "start_time": 1577840522.500052, "end_time": null, "forecaster_count": 156, "interval_lower_bounds": [ 0.5 ], "centers": [ 0.69 ], "interval_upper_bounds": [ 0.79 ], "forecast_values": [ 0.31000000000000005, 0.69 ], "means": [ 0.65470483442152 ], "histogram": [ [ 0.0, 0.0759667903024997, 0.004431894274154124, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04962983984877113, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.02884772668736735, 0.0, 0.5596214603483027, 0.0, 0.0, 0.0, 0.0, 0.0, 0.02727454090511225, 0.0, 0.0, 0.0, 1.505964135896207, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12877488354163996, 0.0, 1.9726318387805781, 0.5634815380806114, 0.0, 0.0, 0.0, 0.0005586451968452264, 0.0, 0.0, 0.0, 0.0, 1.1109035740725604, 0.0, 0.0, 0.0, 0.0, 0.6656774578979076, 0.04707492184138248, 0.7531880250708634, 0.0, 0.058056115020803926, 0.5806616404703403, 0.21533833282287487, 0.4758484430868802, 0.0, 0.061011609582199464, 0.45075838161254966, 0.5473017004096271, 0.04581447487530702, 1.5628312228004666, 0.5910620462846118, 0.507758219044196, 0.6373130423608244, 0.0017378489942889836, 0.6386482676571038, 0.26991286094973066, 1.795066976223442, 0.20597277055767832, 1.1449013607799339, 0.031045268756654983, 0.37690822565047555, 1.750976151607965, 0.0, 0.03881039784442918, 0.0, 0.0011977228662977551, 0.2459152281756692, 0.004875572180126605, 0.09985284559586777, 0.726715184741021, 0.0, 1.189865595259997, 0.0, 0.0, 1.023194369234299e-05, 0.04151932098917563, 0.11163471536808486, 0.0, 0.005098068961925024, 0.0, 1.5748855023268065 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1577810601.894256, "end_time": null, "forecaster_count": 154, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1577810601.894256, "end_time": null, "forecaster_count": 154, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.4720321349479757, 0.5279678650520243 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 15, "user_vote": null }, "forecasts_count": 226, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 1043, "title": "Will any state impose a state-wide soda tax by 2025?", "short_title": "", "url_title": "", "slug": "will-any-state-impose-a-state-wide-soda-tax-by-2025", "author_id": 105927, "author_username": "chipman", "coauthors": [], "created_at": "2018-07-07T00:14:22.780674Z", "published_at": "2018-07-09T07:00:00Z", "edited_at": "2025-09-05T17:29:25.356244Z", "curation_status": "approved", "curation_status_updated_at": "2018-07-09T07:00:00Z", "comment_count": 4, "status": "resolved", "resolved": true, "actual_close_time": "2019-01-01T05:59:00Z", "scheduled_close_time": "2019-01-01T05:59:00Z", "scheduled_resolve_time": "2025-01-01T06:00:00Z", "actual_resolve_time": "2025-01-04T23:45:00Z", "open_time": "2018-07-09T07:00:00Z", "nr_forecasters": 74, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" }, { "id": 3688, "name": "Law", "slug": "law", "emoji": "⚖️", "description": "Law", "type": "category" } ] }, "question": { "id": 1043, "title": "Will any state impose a state-wide soda tax by 2025?", "created_at": "2018-07-07T00:14:22.780674Z", "open_time": "2018-07-09T07:00:00Z", "cp_reveal_time": "2018-07-10T20:34:56.600654Z", "spot_scoring_time": "2018-07-10T20:34:56.600654Z", "scheduled_resolve_time": "2025-01-01T06:00:00Z", "actual_resolve_time": "2025-01-04T23:45:00Z", "resolution_set_time": "2025-01-04T23:59:33.861225Z", "scheduled_close_time": "2019-01-01T05:59:00Z", "actual_close_time": "2019-01-01T05:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "For decades, a brutal war has been raging in the world of nutrition science. \n\nIn the 1960s and 1970s, believers that dietary fat was the enemy--whose ranks included University of Minnesota's [Ancel Keys](https://en.wikipedia.org/wiki/Ancel_Keys) and Harvard's [Fred Stare](https://www.statnews.com/2016/09/12/sugar-industry-harvard-research/)--sparred aggressively with those like [John Yudkin](https://www.telegraph.co.uk/lifestyle/wellbeing/diet/10634081/John-Yudkin-the-man-who-tried-to-warn-us-about-sugar.html) in England and Dr. [Alfred Pennington](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC1848046/) (and his disciples, like Dr. Robert Atkins), who thought sugar was the far greater dietary evil.\n\nThe anti-fat crowd won that political battle and got enshrined the idea the \"low fat is healthy\" in monuments like the first [U.S. Dietary Guidelines](https://health.gov/dietaryguidelines/history.htm).\n\nNearly 40 years since those guidelines radically shifted how Americans eat, the pendulum seems to be swinging the other way. Dietary fat's witnessing something of a [renaissance](https://www.bloomberg.com/news/articles/2018-02-22/fat-is-back-and-premium-butter-makers-are-taking-the-cream). While dietary sugar is once again being seen as a [malign force](https://www.youtube.com/watch?v=dBnniua6-oM).\n\nTo that end, policymakers and politicians are beginning to take action to restrict sugar or at least make it less palatable to consumers.\n\nIn the UK, a recently passed [sugar tax](http://theconversation.com/sugar-tax-what-you-need-to-know-94520) has big implications: \"From now on, drinks with a sugar content of more than 5g per 100ml will be taxed 18p per litre and 24p for drinks with 8g or more.\"\n\nIn Mexico, one of the most obese nations in the world, activists managed to [pass a soda tax](https://www.theguardian.com/news/2015/nov/03/obese-soda-sugar-tax-mexico) a few years ago.\n\nAnd in the U.S., cities like Berkeley have already passed similar measures. The [Washington Post](https://www.washingtonpost.com/lifestyle/food/is-a-soda-tax-the-solution-to-americas-obesity-problem/2015/03/23/b6216864-ccf8-11e4-a2a7-9517a3a70506_story.html?utm_term=.3b197915d0d3) shares some key details: \n\n> Berkeley is the first city to impose a tax and the first U.S. experiment with a tax that’s probably high enough to put a dent in consumers’ soda habits. Depending on the product, a penny-per-ounce tax can be heavy; when Coke goes on sale at my supermarket, I can buy 24 cans — 288 ounces — for about $4. A $2.88 tax would mean a 72 percent price increase. For higher-priced energy and fruit drinks, the percentage increase would be smaller. According to Lisa Powell, a professor of health policy and administration at the University of Illinois at Chicago, a penny-per-ounce tax would be about equal to a 17 percent price increase overall. She says that would result in about a 20 percent consumption decline. \n\nAnd then there's this [amazing research](http://drexel.edu/now/archive/2018/April/After-soda-tax-philadelphians-40-percent-less-likely-to-drink-soda-every-day/):\n\n> Almost immediately after the “soda tax” went into place, Philadelphians were 40 percent less likely to drink soda every day, a new Drexel University study found\n\nWhether or not you approve of their nutritional philosophy or tactics, the anti-sugar forces are clearly on the move, and it seems likely that more sugar taxes are in the offing.\n\n*** But will we see a whole state (e.g. California) pass into law a tax on soda – with the explicit, written intent to disincentivize soda consumption – by Jan 1, 2025?***", "fine_print": "", "post_id": 1043, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1546266368.494955, "end_time": null, "forecaster_count": 74, "interval_lower_bounds": [ 0.6 ], "centers": [ 0.64 ], "interval_upper_bounds": [ 0.7 ] } ], "latest": { "start_time": 1546266368.494955, "end_time": null, "forecaster_count": 74, "interval_lower_bounds": [ 0.6 ], "centers": [ 0.64 ], "interval_upper_bounds": [ 0.7 ], "forecast_values": [ 0.36, 0.64 ], "means": [ 0.6503530328271195 ], "histogram": [ [ 0.0, 0.010334075246633377, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.006759718771926677, 0.0, 0.0, 0.0, 0.0, 0.030097723134820904, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4247058044294796, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.45601139589949596, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9207832150683483, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2665441514110732, 0.0, 0.0, 0.8112844820169525, 2.43236762344978, 0.24877094235518352, 0.28540215056739904, 0.21145608976200184, 3.0354460085827424, 0.0, 0.6143128118890386, 0.34906719769614464, 0.0043392766864219865, 0.04142588197500583, 1.8167013935544922, 0.0, 0.012782418845274361, 0.0, 0.0, 2.00233830872988, 0.04393255368134378, 0.0, 0.0, 0.0, 0.08257827564797748, 0.0, 0.04809577689274304, 0.03094953649168641, 0.0, 0.8490434008787147, 0.0, 0.0, 0.09467292063321112, 0.0741010729489044, 0.06813627848605193, 0.0, 0.20119791893575537, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23608450737699926 ] ] }, "score_data": { "baseline_score": -91.6452930898174, "peer_score": 26.844844940125363, "coverage": 0.99970082294245, "relative_legacy_score": 0.0, "weighted_coverage": 0.99970082294245, "spot_peer_score": 7.556400768976053, "spot_baseline_score": -32.19280948873623, "baseline_archived_score": -91.6452930898174, "peer_archived_score": 26.844844940125363, "relative_legacy_archived_score": 0.0, "spot_peer_archived_score": 7.556400768976053, "spot_baseline_archived_score": -32.19280948873623 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1546205111.641471, "end_time": null, "forecaster_count": 74, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1546205111.641471, "end_time": null, "forecaster_count": 74, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.52830970451977, 0.47169029548023 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 6, "user_vote": null }, "forecasts_count": 106, "key_factors": [], "is_current_content_translated": false, "description": "" } ] }