Posts List Oldapi View
We shared this request example with FAB participants: url_qparams = { "limit": count, "offset": offset, "has_group": "false", "order_by": "-activity", "forecast_type": "binary", "project": tournament_id, "status": "open", "type": "forecast", "include_description": "true", } url = f"{api_info.base_url}/questions/" response = requests.get( url, headers={"Authorization": f"Token {api_info.token}"}, params=url_qparams )
But we don't want to support all these parameters, and the ones relevant are: - order_by - status - project - forecast_type - we ignore this, but assume it's binary - FAB only supports binary for now.
GET /api2/questions/?format=api&offset=5520
{ "count": 6412, "next": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5540", "previous": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5500", "results": [ { "id": 3499, "title": "Will Keir Starmer win the 2020 Labour leadership contest?", "short_title": "", "url_title": "", "slug": "will-keir-starmer-win-the-2020-labour-leadership-contest", "author_id": 111220, "author_username": "JKitson", "coauthors": [], "created_at": "2020-01-13T22:43:04.806035Z", "published_at": "2020-01-16T23:00:00Z", "edited_at": "2025-09-05T17:28:47.533735Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-16T23:00:00Z", "comment_count": 24, "status": "resolved", "resolved": true, "actual_close_time": "2020-04-03T22:00:00Z", "scheduled_close_time": "2020-04-03T22:00:00Z", "scheduled_resolve_time": "2020-04-04T10:53:00Z", "actual_resolve_time": "2020-04-04T10:53:00Z", "open_time": "2020-01-16T23:00:00Z", "nr_forecasters": 61, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 3499, "title": "Will Keir Starmer win the 2020 Labour leadership contest?", "created_at": "2020-01-13T22:43:04.806035Z", "open_time": "2020-01-16T23:00:00Z", "cp_reveal_time": "2020-01-18T23:00:00Z", "spot_scoring_time": "2020-01-18T23:00:00Z", "scheduled_resolve_time": "2020-04-04T10:53:00Z", "actual_resolve_time": "2020-04-04T10:53:00Z", "resolution_set_time": "2020-04-04T10:53:00Z", "scheduled_close_time": "2020-04-03T22:00:00Z", "actual_close_time": "2020-04-03T22:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "On the 13th of December 2019, Jeremy Corbyn, leader of the UK Labour party announced that he would be standing down from the position. Keir Starmer, Rebecca Long-Bailey, Lisa Nandy, Jess Phillips and Emily Thornberry are all standing to be the next leader. The results should be announced on the 4th of April 2020.\n\nQuestion: **Will Keir Starmer win the 2020 Labour leadership contest?**\n\nIf there is more than one Labour leadership contest in 2020, this question resolves according to the first one. This question will resolve as ambiguous if no one has won a Labour leadership contest by 31/12/2020.", "fine_print": "", "post_id": 3499, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1585996342.213424, "end_time": null, "forecaster_count": 62, "interval_lower_bounds": [ 0.84 ], "centers": [ 0.93 ], "interval_upper_bounds": [ 0.98 ] } ], "latest": { "start_time": 1585996342.213424, "end_time": null, "forecaster_count": 62, "interval_lower_bounds": [ 0.84 ], "centers": [ 0.93 ], "interval_upper_bounds": [ 0.98 ], "forecast_values": [ 0.06999999999999995, 0.93 ], "means": [ 0.8950943116673209 ], "histogram": [ [ 0.0, 0.0910103266748584, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0021507104137025754, 0.0, 0.0015651176037633063, 0.0, 0.0, 0.0028115822550663413, 0.006437720576519247, 0.0, 0.0, 0.0, 0.003560195227230837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012156317823425493, 0.0, 0.31167199523019246, 0.5476446756958697, 0.0, 0.0, 0.001034323308801349, 0.0, 0.7724138726447535, 0.004407189337132444, 0.0053628669779807745, 0.0, 0.4568174617418219, 1.2384767072592553, 0.0, 0.0, 0.14115056931138625, 0.6691460720242539, 0.0565326198161923, 0.0, 0.0, 0.0, 2.2399084742081543, 0.41766136975613527, 0.0, 1.0714429073623932, 0.34021084787079703, 1.441442722947907, 0.28794860865874894, 0.38836694811145056, 0.6599471603909078, 3.082502037931433 ] ] }, "score_data": { "peer_score": 15.277665872225022, "coverage": 0.9999869284777992, "baseline_score": 56.51207736496689, "spot_peer_score": 57.98832221791504, "peer_archived_score": 15.277665872225022, "baseline_archived_score": 56.51207736496689, "spot_peer_archived_score": 57.98832221791504 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1585950268.551787, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1585950268.551787, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.3266201591263652, 0.6733798408736348 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 10, "user_vote": null }, "forecasts_count": 184, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3497, "title": "Have we permanently lost contact with ASTERIA?", "short_title": "", "url_title": "", "slug": "have-we-permanently-lost-contact-with-asteria", "author_id": 106142, "author_username": "AABoyles", "coauthors": [], "created_at": "2020-01-13T17:07:26.183956Z", "published_at": "2020-01-16T23:00:00Z", "edited_at": "2025-09-05T17:28:56.163233Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-16T23:00:00Z", "comment_count": 6, "status": "resolved", "resolved": true, "actual_close_time": "2020-04-01T03:59:00Z", "scheduled_close_time": "2020-04-01T03:59:00Z", "scheduled_resolve_time": "2020-05-10T18:18:00Z", "actual_resolve_time": "2020-05-10T18:18:00Z", "open_time": "2020-01-16T23:00:00Z", "nr_forecasters": 61, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3695, "name": "Space", "slug": "space", "emoji": "🚀", "description": "Space", "type": "category" } ] }, "question": { "id": 3497, "title": "Have we permanently lost contact with ASTERIA?", "created_at": "2020-01-13T17:07:26.183956Z", "open_time": "2020-01-16T23:00:00Z", "cp_reveal_time": "2020-01-18T20:17:06.661433Z", "spot_scoring_time": "2020-01-18T20:17:06.661433Z", "scheduled_resolve_time": "2020-05-10T18:18:00Z", "actual_resolve_time": "2020-05-10T18:18:00Z", "resolution_set_time": "2020-05-10T18:18:00Z", "scheduled_close_time": "2020-04-01T03:59:00Z", "actual_close_time": "2020-04-01T03:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "On 2017-11-20, NASA's [Arcsecond Space Telescope Enabling Research in Astrophysics (ASTERIA)](https://www.jpl.nasa.gov/cubesat/missions/asteria.php) [cubesat](https://en.wikipedia.org/wiki/CubeSat) was first deployed from the International Space Station for its 90-day mission. Over the following two years, its mission was extended as its performance continued [nominally](https://english.stackexchange.com/questions/184876/how-did-nominal-come-to-mean-within-acceptable-tolerances).\n\nThat ended when NASA lost contact with ASTERIA on 2019-12-05. In [a statement](https://www.jpl.nasa.gov/news/news.php?feature=7568) on 2020-01-03, NASA announced that it expected to continue attempting to reconnect with the cubesat into March. **Will all further attempts to reestablish communications with ASTERIA prior to 2020-03-31 fail?**\n\nNoting that NASA didn't release the information about losing contact with ASTERIA until nearly a month later, this question will resolve positively if no reports of contact with ASTERIA between 2019-12-06 and 2020-03-31 (inclusive) can be found by 2020-05-31.", "fine_print": "", "post_id": 3497, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1585704707.61158, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": [ 0.9 ], "centers": [ 0.95 ], "interval_upper_bounds": [ 0.97 ] } ], "latest": { "start_time": 1585704707.61158, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": [ 0.9 ], "centers": [ 0.95 ], "interval_upper_bounds": [ 0.97 ], "forecast_values": [ 0.050000000000000044, 0.95 ], "means": [ 0.8790431310490215 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8788826202526011, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.028223255188921492, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.002292301674078456, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03550386479693625, 0.0, 0.0, 0.03706793663087933, 0.0, 0.0016681565683498104, 0.002996681779674101, 0.0, 0.0037945794224233387, 0.0011024176184751514, 0.025043422762740086, 0.0, 0.0, 0.0, 0.0, 0.21113090615170743, 0.0, 0.009581012384170293, 0.0, 0.0, 1.5694332387975223, 0.08056048149199234, 0.004697335090360764, 0.0, 0.0, 1.773322332580882, 0.035143034987606944, 0.7407745437089315, 0.3148157324351324, 0.5100066545134191, 3.2021266523265424, 0.8421553878376187, 0.535480877840677, 0.9202674264885515, 2.360268564650977 ] ] }, "score_data": { "peer_score": 8.594673661116532, "coverage": 0.9995248650723134, "baseline_score": 76.23237343973783, "spot_peer_score": -7.624691736531329, "peer_archived_score": 8.594673661116532, "baseline_archived_score": 76.23237343973783, "spot_peer_archived_score": -7.624691736531329 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1585704707.642024, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1585704707.642024, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.2708587696922323, 0.7291412303077677 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 3, "user_vote": null }, "forecasts_count": 119, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3496, "title": "Will the Doomsday clock advance closer to Midnight?", "short_title": "", "url_title": "", "slug": "will-the-doomsday-clock-advance-closer-to-midnight", "author_id": 106142, "author_username": "AABoyles", "coauthors": [], "created_at": "2020-01-13T14:59:05.350522Z", "published_at": "2020-01-16T08:00:00Z", "edited_at": "2025-09-05T17:28:58.049968Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-16T08:00:00Z", "comment_count": 29, "status": "resolved", "resolved": true, "actual_close_time": "2020-01-23T14:00:00Z", "scheduled_close_time": "2020-01-23T14:00:00Z", "scheduled_resolve_time": "2020-01-23T16:07:00Z", "actual_resolve_time": "2020-01-23T16:07:00Z", "open_time": "2020-01-16T08:00:00Z", "nr_forecasters": 32, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15867, "name": "Environment & Climate", "slug": "climate", "emoji": "🌎", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3697, "name": "Environment & Climate", "slug": "environment-climate", "emoji": "🌱", "description": "Environment & Climate", "type": "category" }, { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 3496, "title": "Will the Doomsday clock advance closer to Midnight?", "created_at": "2020-01-13T14:59:05.350522Z", "open_time": "2020-01-16T08:00:00Z", "cp_reveal_time": "2020-01-16T16:43:40.040005Z", "spot_scoring_time": "2020-01-16T16:43:40.040005Z", "scheduled_resolve_time": "2020-01-23T16:07:00Z", "actual_resolve_time": "2020-01-23T16:07:00Z", "resolution_set_time": "2020-01-23T16:07:00Z", "scheduled_close_time": "2020-01-23T14:00:00Z", "actual_close_time": "2020-01-23T14:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[The Doomsday Clock](https://thebulletin.org/doomsday-clock/current-time/) is a symbol which represents the likelihood of a anthropogenic global catastrophe. The clock represents catastrophe as \"midnight\" and the Bulletin's opinion on how close the world is to such a as a number of \"minutes\" to midnight. Its original setting in 1947 was seven minutes to midnight. Since January 2018, the clock has been set at two minutes to midnight.\n\nThis question asks: **When the Doomsday Clock is next updated, will the time be *later* than two minutes to midnight?**\n\nThe clock need not advance by a full minute; any advance will suffice for a positive resolution. If the clock is left unchanged at two minutes to midnight, or if it is moved back, this question resolves negatively. If the Bulletin of the Atomic Scientists announces that the Doomsday Clock is to be discontinued before any relevant time changes (or decisions to leave it unchanged) are announced, this question resolves ambiguously.\n\nSee also Metaculus' previous forecasts of the Clock's direction [for 2017](https://www.metaculus.com/questions/167/will-the-doomsday-clock-advance-toward-midnight-at-the-end-of-this-year/) and [for 2019](https://www.metaculus.com/questions/1668/will-the-doomsday-clock-move-closer-to-midnight-when-it-is-next-updated/).", "fine_print": "", "post_id": 3496, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1579718596.194782, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.29 ], "centers": [ 0.4 ], "interval_upper_bounds": [ 0.46 ] } ], "latest": { "start_time": 1579718596.194782, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.29 ], "centers": [ 0.4 ], "interval_upper_bounds": [ 0.46 ], "forecast_values": [ 0.6, 0.4 ], "means": [ 0.4151554173495606 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.014369596090439076, 0.0, 0.0, 0.0, 0.0, 0.8243121018521602, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3415440718595426, 0.0, 0.0, 0.0, 0.6939525952509218, 0.0094962884186239, 0.0, 0.0, 0.6134675762751818, 0.0, 0.059105746561956225, 0.14731282932738304, 0.0, 0.1083449769120332, 0.0, 0.24311673443421403, 0.0, 1.0110688823644178, 0.11160910633783082, 0.0, 1.4754522137874202, 0.42089866646249546, 0.7620908974955233, 0.21572547604369705, 0.0, 0.019746017729336655, 0.8355804261814468, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07016860785683743, 0.0, 0.16798662710965054, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0492373430437701, 0.0, 0.0, 0.0, 1.0, 0.6308407491715394, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 11.35643494290416, "coverage": 0.9965409173469129, "baseline_score": -42.600299861873246, "spot_peer_score": 0.10342501391757748, "peer_archived_score": 11.35643494290416, "baseline_archived_score": -42.600299861873246, "spot_peer_archived_score": 0.10342501391757748 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1579471348.843612, "end_time": null, "forecaster_count": 31, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1579471348.843612, "end_time": null, "forecaster_count": 31, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7060911608098628, 0.29390883919013716 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 7, "user_vote": null }, "forecasts_count": 52, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3495, "title": "Will Whole Woman's Health be overturned in 2020?", "short_title": "", "url_title": "", "slug": "will-whole-womans-health-be-overturned-in-2020", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T22:07:56.387065Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:29.008436Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 20, "status": "resolved", "resolved": true, "actual_close_time": "2020-12-31T23:54:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-12-31T23:54:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 85, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3495, "title": "Will Whole Woman's Health be overturned in 2020?", "created_at": "2020-01-12T22:07:56.387065Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-16T08:00:00Z", "spot_scoring_time": "2020-01-16T08:00:00Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-12-31T23:54:00Z", "resolution_set_time": "2020-12-31T23:54:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2020-12-31T23:54:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Whole Woman's Health v. Hellersted](https://en.wikipedia.org/wiki/Whole_Woman%27s_Health_v._Hellerstedt), is a United States Supreme Court case decided on June 27, 2016. The Court ruled 5–3 that Texas cannot place restrictions on the delivery of abortion services that create an undue burden for women seeking an abortion. The landmark case ensures states must show proof of an actual health benefit when enacting or passing abortion laws.\n\nIn December, Texas Attorney General Ken Paxton [called on the U.S. Supreme Court](https://www.austinchronicle.com/news/2020-01-10/ken-paxton-urges-u-s-supreme-court-to-overturn-historic-whole-womans-ruling/) to dismiss the legal challenge to a Louisiana law that requires abortion providers to obtain hospital admitting privileges within 30 miles of the clinic, and thereby essentially toss out the Whole Woman's Health ruling.\n\nAccording Vox's [Dylan Matthews](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions):\n\n\n> This term, [the Supreme Court will hear and rule in the case of June Medical Services LLC v. Gee](https://www.vox.com/2019/10/4/20874618/supreme-court-louisiana-abortion-law-scotus-gee), a challenge to a Louisiana law requiring abortion providers to have admitting privileges at a nearby hospital.\n\n>As my colleagues [Anna North and Ian Millhiser explain](https://www.vox.com/2019/10/4/20874618/supreme-court-louisiana-abortion-law-scotus-gee), abortion rights advocates consider this restriction both medically unnecessary (the rate of complications for first-trimester abortions is very low, and you don’t need admitting privileges to send people with complications to a nearby hospital) and designed to shut down abortion clinics.\n\n>But more importantly, the Supreme Court already struck down a nearly identical Texas law in 2016’s Whole Women’s Health v. Hellerstedt. The fact that it’s hearing this case so soon after setting a precedent that admitting privileges laws are unconstitutional suggests strongly that the Court — which has since added Neil Gorsuch and Brett Kavanaugh and lost abortion rights supporter Anthony Kennedy — is ready to overrule Whole Women’s Health and allow more state restrictions on abortion.\n\n>I think there’s a real, maybe 20-30 percent chance that the anti-abortion rights majority on the court overrules Roe v. Wade outright, pulling off the Band-aid and eliminating the constitutional right to abortion in one fell swoop, [as many GOP politicians have urged them to do with this case](https://www.vox.com/2020/1/2/21047079/abortion-rights-republicans-call-overturn-roe-v-wade). But whether or not Roe falls, I think it’s a near-certainty that Whole Women’s Health will fall. \n\n**Will Whole Woman's Health be overturned in 2020?**\n\nThis question resolves positively if Vox, the NYT, WaPo, Reuters, Politico, or the Associated Press reports that the Whole Woman's Health v. Hellersted is effectively overturned before the end of 2020.", "fine_print": "", "post_id": 3495, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609450413.971197, "end_time": null, "forecaster_count": 85, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1609450413.971197, "end_time": null, "forecaster_count": 85, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.022323750626013026 ], "histogram": [ [ 0.0, 14.791158887645215, 0.06464763481520866, 0.0, 0.11666173967363361, 0.23935357943813185, 1.006879476280195, 0.0, 0.0, 0.0, 0.3940575689669821, 0.0, 0.0, 0.0, 0.0, 0.03997326364240643, 0.0, 0.0016763820783180668, 0.007745482461618098, 0.0, 0.1761834171421036, 0.0, 0.0, 0.0, 0.0, 0.004178148511023217, 0.0, 0.0, 0.004764507471173336, 0.0, 0.0, 0.0, 0.0, 0.0023407900061487357, 0.0, 0.011989632823413173, 0.0, 0.0, 0.001990151609851139, 0.0, 0.0027314582170858293, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0013964902613327744, 0.0, 0.0, 0.005409792956936867, 0.006118496811426084, 0.0, 0.0068953792186266984, 0.0, 0.0, 0.0036464791920768538, 0.0, 0.0, 0.0, 0.0, 0.003573061018341814, 0.002074706495258939, 0.0, 0.0, 0.0, 0.000560044871523829, 0.0, 0.0, 0.0, 0.0, 0.008943482860102126, 0.0, 0.0007321358620784834, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03804943800146037 ] ] }, "score_data": { "peer_score": 29.80717099114229, "coverage": 0.9988237481605435, "baseline_score": 41.421476583289284, "spot_peer_score": -34.890642213719396, "peer_archived_score": 29.80717099114229, "baseline_archived_score": 41.421476583289284, "spot_peer_archived_score": -34.890642213719396 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609450414.005591, "end_time": null, "forecaster_count": 85, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609450414.005591, "end_time": null, "forecaster_count": 85, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9749829974277278, 0.025017002572272205 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 9, "user_vote": null }, "forecasts_count": 238, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3493, "title": "Will Prediction Markets judge the Democratic primary to be settled after Super Tuesday?", "short_title": "", "url_title": "", "slug": "will-prediction-markets-judge-the-democratic-primary-to-be-settled-after-super-tuesday", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T20:21:53.019104Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:28:58.099530Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 11, "status": "resolved", "resolved": true, "actual_close_time": "2020-02-26T09:24:12Z", "scheduled_close_time": "2020-02-26T09:24:12Z", "scheduled_resolve_time": "2020-03-10T20:32:00Z", "actual_resolve_time": "2020-03-10T20:32:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 58, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3493, "title": "Will Prediction Markets judge the Democratic primary to be settled after Super Tuesday?", "created_at": "2020-01-12T20:21:53.019104Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-15T22:21:10.539810Z", "spot_scoring_time": "2020-01-15T22:21:10.539810Z", "scheduled_resolve_time": "2020-03-10T20:32:00Z", "actual_resolve_time": "2020-03-10T20:32:00Z", "resolution_set_time": "2020-03-10T20:32:00Z", "scheduled_close_time": "2020-02-26T09:24:12Z", "actual_close_time": "2020-02-26T09:24:12Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "According to Vox's [Kelsey Piper](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions):\n\n> I observed above that the primaries are quite a tossup — there are still four candidates with a very reasonable shot at winning. But I still have this feeling they’ll be over quickly. There’s the four early caucuses and primaries: Iowa, New Hampshire, South Carolina, and Nevada. Then on March 3rd, a large, demographically balanced (representing the overall Democratic electorate pretty well) set of states will vote.\n\n> Two-person races often drag out well past Super Tuesday. But I think this four-person race might be primed to end pretty fast. Once a candidate has gotten a series of wins, voters might be pretty quick to rally behind them — and a Super Tuesday lead might quickly become insurmountable.\n\n**Will the Democratic primary be settled on Super Tuesday?**\n\nThis question resolves positively if [Electionbettingodds.com](https://electionbettingodds.com/) assigns one candidate at least 90% of winning the Democratic primary at some point within the 7-day period starting on Super Tuesday (Tuesday, 3 March).", "fine_print": "", "post_id": 3493, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1582700238.296324, "end_time": null, "forecaster_count": 57, "interval_lower_bounds": [ 0.23 ], "centers": [ 0.33 ], "interval_upper_bounds": [ 0.43 ] } ], "latest": { "start_time": 1582700238.296324, "end_time": null, "forecaster_count": 57, "interval_lower_bounds": [ 0.23 ], "centers": [ 0.33 ], "interval_upper_bounds": [ 0.43 ], "forecast_values": [ 0.6699999999999999, 0.33 ], "means": [ 0.34510328484898845 ], "histogram": [ [ 0.0, 0.036618791251766876, 0.0, 0.0, 0.7635897992428782, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05144413896398135, 0.0, 0.0, 0.004923348169013251, 0.0, 0.0, 0.9356445192922368, 0.1792663589359841, 0.0, 0.0, 1.1110956848606341, 0.0, 0.0, 0.34331951429497504, 0.0, 1.2823869422392875, 0.0, 0.2502185777012937, 0.028267734020512116, 0.0, 0.42950962997243175, 0.0, 0.0060946454249203655, 2.217977193439695, 0.0, 0.10452470613861276, 0.5503932751784225, 0.4994839393471079, 0.0, 0.23059971548004934, 0.3160893803856667, 0.0, 0.37457559292106957, 0.6647089477243179, 0.6195470556045188, 0.00741623973729105, 0.0, 0.0, 0.0, 0.0, 0.8942763949192901, 0.0, 0.0, 0.0, 0.0, 0.025302524918872856, 0.0105689540850258, 0.04606515460131248, 0.0, 0.0, 1.0145057574200118, 0.0, 0.0, 0.0, 0.0, 0.13921407200555425, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05730206168043847, 0.06367247509170942, 0.0, 0.0, 0.3348090926083014, 0.012431064033118435, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 4.7467368621690955, "coverage": 0.9904785336955841, "baseline_score": 4.764014076008758, "spot_peer_score": 21.98776373434933, "peer_archived_score": 4.7467368621690955, "baseline_archived_score": 4.764014076008758, "spot_peer_archived_score": 21.98776373434933 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1582700238.369854, "end_time": null, "forecaster_count": 57, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1582700238.369854, "end_time": null, "forecaster_count": 57, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8116453032653685, 0.1883546967346315 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 5, "user_vote": null }, "forecasts_count": 131, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3492, "title": "Will the GOP hold the Senate on January 3rd 2021?", "short_title": "GOP holds US Senate on 2021-01-03.", "url_title": "GOP holds US Senate on 2021-01-03.", "slug": "gop-holds-us-senate-on-2021-01-03", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T20:11:02.373594Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:00.356659Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 81, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-01T01:00:00Z", "scheduled_close_time": "2021-01-01T01:00:00Z", "scheduled_resolve_time": "2021-01-04T17:07:00Z", "actual_resolve_time": "2021-01-04T17:07:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 262, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3492, "title": "Will the GOP hold the Senate on January 3rd 2021?", "created_at": "2020-01-12T20:11:02.373594Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-15T11:20:51.685534Z", "spot_scoring_time": "2020-01-15T11:20:51.685534Z", "scheduled_resolve_time": "2021-01-04T17:07:00Z", "actual_resolve_time": "2021-01-04T17:07:00Z", "resolution_set_time": "2021-01-04T17:07:00Z", "scheduled_close_time": "2021-01-01T01:00:00Z", "actual_close_time": "2021-01-01T01:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Currently, with 53 seats, the Republicans occupy a majority of the 100 seats in the Senate [(Senate.gov)](https://www.senate.gov/history/partydiv.htm).\n\nThe [2020 United States Senate elections](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions) will be held on November 3, 2020, with the 33 Class 2 seats of the Senate being contested in regular elections.\n\nAccording to Vox's [Dylan Matthews](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions), the Republican stronghold can be upset:\n\n> There’s a chance, if literally everything breaks in Democrats’ favor, that they retake the Senate. But it requires a lot going right for them, and even one botched race means Republicans hold control.\n\n> On paper this should have been a promising year for Dems. Twenty-three Republican seats are up, compared to only 12 Democratic seats; these were, except for a couple of special elections, seats that were last open in 2014, when Republicans gained a whopping nine seats. You would think that Democrats could regain some of those nine that they lost, but you’d mostly be wrong. Democrats lost seats in Alaska, Arkansas, Louisiana, South Dakota, and West Virginia that they’re basically not contesting this time around. Iowa and Montana look only slightly better.\n\n> Instead, Democrats’ hopes rest on the two 2014 losses they think they can reverse — in North Carolina and Colorado — as well as a special election in Arizona, an unlikely Alabama seat they won in 2017, and Susan Collins’ once-safe seat in Maine that they’re hoping her vote for Kavanaugh will make competitive.\nSweeps of this magnitude do happen (2006 and 2008 both saw huge Democratic sweeps), they’re rare, especially as the parties have polarized geographically, and Democrats are underdogs in Alabama and North Carolina in particular. There’s a chance the Dems pull it out, but I think it’s quite unlikely.\n\n**Will the GOP hold the Senate in 2020?**\n\nThis resolves positively if the Senate Majority leader elected at the start of the 2021 congressional session is a Republican.", "fine_print": "", "post_id": 3492, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609460808.15639, "end_time": null, "forecaster_count": 261, "interval_lower_bounds": [ 0.95 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1609460808.15639, "end_time": null, "forecaster_count": 261, "interval_lower_bounds": [ 0.95 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.010000000000000009, 0.99 ], "means": [ 0.9513265863406036 ], "histogram": [ [ 0.0, 0.00011410381748289475, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.1013833179524543e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.890306463085183e-05, 0.0, 9.14388768928344e-05, 0.0, 0.00013046678685194694, 0.0, 0.0, 0.0002743967707933561, 0.0, 0.0, 0.0, 0.0, 0.004260912509489666, 0.0, 0.0, 0.0002018831601830155, 0.0, 0.00017128535529460434, 0.0, 9.885728634569143e-05, 0.0001034130268201654, 0.0, 0.0, 0.0, 1.4296519793476644e-05, 0.0, 0.0, 0.0002789917454775173, 0.0, 4.58066895056436e-05, 0.0, 1.0490099382478893e-05, 0.005536035759513936, 0.0, 5.815665826709477e-05, 3.281770089985382e-05, 1.913499310780921e-05, 3.0101613744710394e-05, 0.5318960934940012, 0.0, 0.0, 0.0010828970577812067, 0.004943362787962739, 0.0, 0.005031569551412101, 0.0, 0.00092073063788743, 0.004954006578192512, 0.0, 0.0, 0.064629355209261, 0.43875193104411403, 0.2993770303389158, 0.0, 0.0, 0.9621611360066158, 0.0, 0.003971696302186987, 0.21995547983017483, 1.2772036579247002e-05, 0.0, 0.0, 0.03967972874095921, 0.0031047912635142876, 0.04428587015221921, 0.004801434291320513, 0.001824158271192322, 0.04021460020580597, 0.0006979940329030641, 0.6265455316312615, 0.0, 0.05763717351455898, 0.2189592781894918, 0.7209148937711882, 0.23903163026430402, 0.711713492205042, 0.01886463133225214, 2.5807501097286853, 0.21453213184901399, 1.411135812735582, 3.602521803034228, 17.727092398422926 ] ] }, "score_data": { "peer_score": -6.230944106954329, "coverage": 0.9988470773598157, "baseline_score": 26.548849737535463, "spot_peer_score": -0.43012065017031675, "peer_archived_score": -6.230944106954329, "baseline_archived_score": 26.548849737535463, "spot_peer_archived_score": -0.43012065017031675 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609460808.219575, "end_time": null, "forecaster_count": 261, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609460808.219575, "end_time": null, "forecaster_count": 261, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.045383070098865996, 0.954616929901134 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 34, "user_vote": null }, "forecasts_count": 1056, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3491, "title": "Will China fail to curtail its internment camps programs for Uyghurs and Muslims in 2020?", "short_title": "", "url_title": "", "slug": "will-china-fail-to-curtail-its-internment-camps-programs-for-uyghurs-and-muslims-in-2020", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T18:31:18.041660Z", "published_at": "2020-01-14T00:00:00Z", "edited_at": "2025-09-05T17:29:07.312588Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T00:00:00Z", "comment_count": 30, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-01T00:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-01-03T23:03:00Z", "actual_resolve_time": "2021-01-03T23:03:00Z", "open_time": "2020-01-14T00:00:00Z", "nr_forecasters": 146, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3491, "title": "Will China fail to curtail its internment camps programs for Uyghurs and Muslims in 2020?", "created_at": "2020-01-12T18:31:18.041660Z", "open_time": "2020-01-14T00:00:00Z", "cp_reveal_time": "2020-01-16T00:00:00Z", "spot_scoring_time": "2020-01-16T00:00:00Z", "scheduled_resolve_time": "2021-01-03T23:03:00Z", "actual_resolve_time": "2021-01-03T23:03:00Z", "resolution_set_time": "2021-01-03T23:03:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2021-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "The Xinjiang re-education camps are internment camps that have been operated by the Xinjiang Uygur government for the purpose of indoctrinating Uyghur Muslims since 2017 as part of a \"people's war on terror\" announced in 2014 ([Wikipedia](https://en.wikipedia.org/wiki/Xinjiang_re-education_camps)).\n\nThese camps are reportedly operated outside the legal system and many Uyghurs have been interned without trial with no charges levied against them. Local authorities are reportedly holding hundreds of thousands of Uyghurs and Muslims from other ethnic minorities in these camps, for the stated purpose of countering extremism and terrorism as well as to promote [sinicization](https://en.wikipedia.org/wiki/Sinicization) ([ibid.](https://en.wikipedia.org/wiki/Xinjiang_re-education_camps)).\n\n**Will China fail to partially curtail its internment camps programs for Uyghurs and Muslims in 2020?**\n\nThis question resolves negatively if any of the following conditions obtain:\n\n 1. At least two independent credible reports indicate a reduction of the inmate population of at least 200,000 inmates relative to their latest estimates for 2019.\n\n or\n\n 2. At least two independent credible reports provide compelling evidence that there was a net reduction of at least 200 internment camps in the Xinjiang Uygur Autonomous Region.\n\nThe question resolves positively if neither of these two conditions have been satisfied by the end of 2020.", "fine_print": "", "post_id": 3491, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609452044.640886, "end_time": null, "forecaster_count": 145, "interval_lower_bounds": [ 0.98 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1609452044.640886, "end_time": null, "forecaster_count": 145, "interval_lower_bounds": [ 0.98 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.010000000000000009, 0.99 ], "means": [ 0.982434783549659 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0020079465742677107, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0007131897417585168, 0.0, 0.0, 0.00016247770129069366, 0.0, 0.0006418353065942569, 0.0, 0.00036395185950732447, 0.0008747306226863019, 0.00011838191657698926, 0.0001392392448195829, 0.0003217954126493845, 0.00021690668860572797, 0.0, 0.00018829645220658223, 0.0, 0.0004101638304265817, 0.018909946143259644, 0.0, 0.0, 0.0, 0.0, 8.306864301159443e-05, 0.00028341133569470734, 0.0, 4.3550273315147114e-05, 0.0, 0.055431466008215774, 0.005532597936500438, 0.006015647134756919, 0.8525821634637379, 0.0372872499520307, 1.2051489943935287, 0.20357166164331647, 1.1159772245753061, 2.877138334635292, 16.202492423160344 ] ] }, "score_data": { "peer_score": 4.314278538646284, "coverage": 0.9979382314346241, "baseline_score": 81.85718052852835, "spot_peer_score": 17.243898331676327, "peer_archived_score": 4.314278538646284, "baseline_archived_score": 81.85718052852835, "spot_peer_archived_score": 17.243898331676327 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609452044.766621, "end_time": null, "forecaster_count": 145, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609452044.766621, "end_time": null, "forecaster_count": 145, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.005022496411854149, 0.9949775035881459 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 13, "user_vote": null }, "forecasts_count": 361, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3490, "title": "Will facial recognition be banned in at least 3 more U.S. cities in 2020?", "short_title": "", "url_title": "", "slug": "will-facial-recognition-be-banned-in-at-least-3-more-us-cities-in-2020", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T17:20:01.172932Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:07.068222Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 40, "status": "resolved", "resolved": true, "actual_close_time": "2020-06-23T07:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-06-23T07:00:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 97, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3688, "name": "Law", "slug": "law", "emoji": "⚖️", "description": "Law", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3490, "title": "Will facial recognition be banned in at least 3 more U.S. cities in 2020?", "created_at": "2020-01-12T17:20:01.172932Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-14T19:37:11.741278Z", "spot_scoring_time": "2020-01-14T19:37:11.741278Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-06-23T07:00:00Z", "resolution_set_time": "2020-06-23T07:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2020-06-23T07:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "San Francisco [banned facial recognition use](https://www.google.co.uk/search?hl=en&q=San+Francisco+banned+facial+recognition+use&meta=) by city and county agencies in May of 2019. Somerville, Massachusetts [followed suit](https://www.wbur.org/bostonomix/2019/06/28/somerville-bans-government-use-of-facial-recognition-tech) in June of that year. And in July 2019, Oakland, California, [became the latest](https://www.vice.com/en_us/article/zmpaex/oakland-becomes-third-us-city-to-ban-facial-recognition-xz) to ban city departments — including police — from using facial-recognition technology.\n\nAccording to this [Vox article](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions) by Sigal Samuel, Kelsey Piper, and Dylan Matthews:\n\n> In 2019, we saw a growing backlash against facial recognition technology. San Francisco, Oakland, and Berkeley banned it, as did three communities in Massachusetts: Somerville, Brookline, and Northampton. In 2020, I predict we’ll see at least three more cities institute a ban on the controversial tech.\n\n> To be clear, I’m talking about a ban that applies to city departments like police; I think outright bans that would also cover businesses, individuals, and federal agencies are way less likely.\n\n> I’m partly going off local news about particular cities — Portland is currently deliberating over a ban, and the western Massachusetts city of Springfield might be next. Last year saw mounting pushback against facial recognition from AI researchers, groups like the ACLU, low-income tenants in Brooklyn, and many more. Their protests seem to be growing bolder, not quieter. \nI should note that according to Pew Research Center survey data, most Americans are now in favor of police using facial recognition. I don’t think a nationwide ban is in the cards for 2020 (sorry, Bernie). But a lot can still happen on the city level, and I think it will.\n\n**Will facial recognition be banned in at least 3 more U.S. cities in 2020?**\n\nThis question resolves positively if at least three U.S. cities pass legislation that bans the local government use of facial-recognition technology before the end of 2020. To count, the relevant legislation needs to be passed, but the bans need not go into effect before the end of 2020. In case of ambiguity we will adopt the resolution by Dylan Matthews and Kelsey Piper in their assessments of their 2020 predictions.", "fine_print": "", "post_id": 3490, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1593212508.427265, "end_time": null, "forecaster_count": 109, "interval_lower_bounds": [ 0.8 ], "centers": [ 0.87 ], "interval_upper_bounds": [ 0.95 ] } ], "latest": { "start_time": 1593212508.427265, "end_time": null, "forecaster_count": 109, "interval_lower_bounds": [ 0.8 ], "centers": [ 0.87 ], "interval_upper_bounds": [ 0.95 ], "forecast_values": [ 0.13, 0.87 ], "means": [ 0.8435907585731434 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18912594285765036, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.027746324208746075, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.011792323513531065, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0069913547155140855, 0.27938762653101, 0.0, 0.016478897344287303, 0.0, 0.019071393092777264, 0.0, 0.20017853360851276, 0.010843095881615717, 0.0, 0.0, 0.02983409897790073, 0.0, 0.009134070348939649, 0.04541843129948949, 0.0, 0.023942442062986697, 0.012809810214353387, 0.0028577210314313546, 0.0928295350872076, 0.0, 0.0, 0.04637631829989633, 0.3616875435935225, 0.28475931157672196, 0.013899637672086875, 0.050481084411424036, 0.059336336525151766, 0.15720655874101852, 0.0012776506094414805, 0.0025589185429359847, 0.0816090587579221, 0.0, 0.22969492631848037, 0.04241740016701681, 0.0, 0.0715083065639535, 0.0, 1.5347209542109543, 0.5202384278530006, 0.0, 0.005806345621716643, 0.0, 1.4946249940600906, 0.784846217452747, 0.008148424796983637, 0.0, 0.0, 2.953767476180304, 0.0, 1.0, 0.0, 0.0, 1.731448581776085, 0.0, 0.582275501896683, 0.711200017235199, 0.4747595592880645, 0.666351468116201, 0.0, 0.0, 1.3144566811415563, 3.2207409747342988 ] ] }, "score_data": { "peer_score": -0.3290377492877755, "coverage": 0.45544648342988686, "baseline_score": 18.70043795101076, "spot_peer_score": -0.5425557006973637, "peer_archived_score": -0.3290377492877755, "baseline_archived_score": 18.70043795101076, "spot_peer_archived_score": -0.5425557006973637 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1592849482.375125, "end_time": null, "forecaster_count": 97, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1592849482.375125, "end_time": null, "forecaster_count": 97, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.3186462865266848, 0.6813537134733152 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 14, "user_vote": null }, "forecasts_count": 213, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3489, "title": "Will global carbon emissions in 2020 be higher than they were in 2019?", "short_title": "", "url_title": "", "slug": "will-global-carbon-emissions-in-2020-be-higher-than-they-were-in-2019", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T17:09:32.565075Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:28:58.917250Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 13, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-01T00:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-02-07T18:42:00Z", "actual_resolve_time": "2021-02-07T18:42:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 143, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3489, "title": "Will global carbon emissions in 2020 be higher than they were in 2019?", "created_at": "2020-01-12T17:09:32.565075Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-15T06:35:07.414933Z", "spot_scoring_time": "2020-01-15T06:35:07.414933Z", "scheduled_resolve_time": "2021-02-07T18:42:00Z", "actual_resolve_time": "2021-02-07T18:42:00Z", "resolution_set_time": "2021-02-07T18:42:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2021-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Global emissions of CO₂ has the largest global warming potential, compared to all other greenhouse gasses, such as nitrous oxide, methane, and other [fluorinated gases](https://en.wikipedia.org/wiki/Fluorinated_gases). For example, in 2015, 76% of GHG emissions in CO₂ equivalent/yr was just CO₂ [(IPPC, 2018)](https://www.ipcc.ch/site/assets/uploads/2018/02/SYR_AR5_FINAL_full.pdf). In that year, an estimated 50.9 gigatonnes of CO₂ equivalent (GtCO2e) was emitted globally [(World Bank, 2019)](https://data.worldbank.org/indicator/EN.ATM.GHGT.KT.CE).\n\nAccording to this [Vox article](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions) by Sigal Samuel, Kelsey Piper, and Dylan Matthews:\n\n> In [most recent years](https://www.vox.com/future-perfect/2020/1/7/21051910/predictions-trump-brexit-recession-2019-2020) (though not all of them), global carbon emissions have increased from the previous year. Saying there’s an 80 percent chance they’ll increase again, then, is a much more conservative prediction than it sounds. (This is called reference class forecasting, where you predict a statistic by looking at what prediction would have been correct in recent years or in other events like this one.)\n\n> Why are CO2 levels still increasing? Well, global population is still increasing (though more slowly than ever) and our efforts to decrease greenhouse gas emissions have been...lackluster, to say the least. Major polluters have made only token steps to reduce their emissions, and no country has cut their emissions as fast as the IPCC says that we need to. That’s a scary trend, and it makes me deeply unhappy. But I don’t see a lot of reason to expect it to change in 2020 — not with the same people in power across most of the world’s biggest emitters, and the same incentives for shortsighted climate policy around the world\n\n**Will global Global carbon emissions in 2020 be higher than they were in 2019?**\n\nThis question resolves positively if preliminary estimates produced in the first half of 2021 indicate that global carbon emissions alone (not GHG emissions in CO2e) in 2020 was higher than emissions in 2019.", "fine_print": "", "post_id": 3489, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609450704.056418, "end_time": null, "forecaster_count": 143, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1609450704.056418, "end_time": null, "forecaster_count": 143, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.016574158153521458 ], "histogram": [ [ 0.0, 18.586941785359873, 1.1086631550083443, 0.08988166438993273, 1.3541630387690136, 0.8223055825076788, 0.002584403006121208, 0.031195839509106957, 0.05334926073074192, 0.15410776142854676, 0.06523640558453143, 0.00995389333145613, 0.00702513572895421, 0.024427408159604987, 0.0, 0.002376370488814467, 0.0, 0.0, 0.0011567877219808393, 0.0, 0.025528264231918948, 5.993842542530222e-05, 0.0, 0.007542565285863104, 0.0, 0.028229611249952796, 0.0, 0.0, 0.0, 0.0, 0.015373161072905622, 0.0, 0.0, 0.0, 0.0, 0.00044580895221323335, 0.008676316626368738, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.005650954782903663, 0.0, 0.0, 0.011390837559926292, 0.000626297507510926, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.000349760912830967, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.741357047952185e-05, 0.0005058879239527044, 0.0, 0.0, 0.0, 0.0, 0.0001104069187429836, 0.0, 0.00012866984915586968, 0.0, 0.00010838361664361995, 0.0, 0.0, 0.0001765977602049539, 0.00020466027922642828, 0.00030804108319352715, 0.0006845434057629713, 0.0005271207946484044, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 43.400992910142726, "coverage": 0.9988310736261748, "baseline_score": 38.67346017785672, "spot_peer_score": 29.08662822436884, "peer_archived_score": 43.400992910142726, "baseline_archived_score": 38.67346017785672, "spot_peer_archived_score": 29.08662822436884 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609450704.221361, "end_time": null, "forecaster_count": 143, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609450704.221361, "end_time": null, "forecaster_count": 143, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9798011077240899, 0.020198892275910072 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 15, "user_vote": null }, "forecasts_count": 338, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3488, "title": "Will California have a wildfire among the 10 most destructive in state history in 2020?", "short_title": "", "url_title": "", "slug": "will-california-have-a-wildfire-among-the-10-most-destructive-in-state-history-in-2020", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T16:23:06.049637Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:25.237981Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 34, "status": "resolved", "resolved": true, "actual_close_time": "2020-08-27T13:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-08-27T13:00:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 99, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15867, "name": "Environment & Climate", "slug": "climate", "emoji": "🌎", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3697, "name": "Environment & Climate", "slug": "environment-climate", "emoji": "🌱", "description": "Environment & Climate", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3488, "title": "Will California have a wildfire among the 10 most destructive in state history in 2020?", "created_at": "2020-01-12T16:23:06.049637Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-16T08:00:00Z", "spot_scoring_time": "2020-01-16T08:00:00Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-08-27T13:00:00Z", "resolution_set_time": "2020-08-27T13:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2020-08-27T13:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "California has dry, windy, and often hot weather conditions from spring through late autumn that can produce moderate to devastating wildfires. As a result, many wildfires are somewhat of [a common occurence in California](https://en.wikipedia.org/wiki/List_of_California_wildfires). \n\nAccording to the [California Department of Forestry and Fire Protection](https://en.wikipedia.org/wiki/California_Department_of_Forestry_and_Fire_Protection), the most destructive wildfire was [the Camp Fire of November 2018](https://www.fire.ca.gov/media/5511/top20_destruction.pdf), which spanned 153,336 acres, destroyed an estimated 18,804 structures and killed 85 people.\n\nAccording to [Vox's Umair Irfan](https://www.vox.com/2018/8/7/17661096/california-wildfires-2018-camp-woolsey-climate-change),\n\n> The overall trend in California is troubling too. Six of California’s 10 most destructive wildfires on record have hit in just the past three years.\n\nSimilarly, According to Vox's [Kelsey Piper](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions):\n\n> [On a list of the ten most destructive wildfires](https://www.fire.ca.gov/media/5511/top20_destruction.pdf) since records started being kept in 1932, (from the California Department of Forestry and Fire Protection), five happened in the last decade. That suggests that the “new normal” is a record fire about every other year — though 2020 is likely to be worse than the early parts of the decade, as the effects of climate change worsen.\n\n**Will California have a wildfire among the 10 most destructive in state history in 2020?**\n\nThis question resolves positively if the [California Department of Forestry and Fire Protection](https://en.wikipedia.org/wiki/California_Department_of_Forestry_and_Fire_Protection) estimates that a wildfire that occurs in 2020 destroys more structures than the 10th most destructive wildfire to date (which currently is the [Thomas fire in Ventura & Santa Barbara](https://www.fire.ca.gov/media/5511/top20_destruction.pdf) which destroyed 1,063 structures).", "fine_print": "", "post_id": 3488, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1598538899.819248, "end_time": null, "forecaster_count": 99, "interval_lower_bounds": [ 0.9 ], "centers": [ 0.95 ], "interval_upper_bounds": [ 0.98 ] } ], "latest": { "start_time": 1598538899.819248, "end_time": null, "forecaster_count": 99, "interval_lower_bounds": [ 0.9 ], "centers": [ 0.95 ], "interval_upper_bounds": [ 0.98 ], "forecast_values": [ 0.050000000000000044, 0.95 ], "means": [ 0.9107722260937432 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.011417034368343237, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0070842988660632985, 0.0, 0.0012218060671938978, 0.0, 0.0, 0.0, 0.0, 0.01491613561893787, 0.0, 0.0, 0.0, 0.0, 0.01017311500656513, 0.048763470183996814, 0.009481888734741973, 0.002606167907938849, 0.010412903050242759, 0.004846831003380211, 0.020440797383905543, 0.07058441746289859, 0.0, 0.0175699387993486, 0.06382495974923517, 0.015523235489570585, 0.02993113043447708, 0.0, 0.0, 0.028818142748531093, 0.02719346795778013, 0.0, 0.0, 1.180675387288647, 0.15022645871380552, 0.08487636612376773, 0.08234640041084564, 0.0, 0.0, 0.06464969961262194, 0.0002698016324819901, 0.0, 0.8162129203468208, 0.0, 0.07646458813354658, 0.0, 0.0, 0.7752926901367718, 0.0, 0.33586785700963756, 0.0, 0.0, 0.07936674049253409, 0.0, 0.8337509075202569, 0.4042296159892161, 0.14229194644921092, 2.4585738648269255, 0.7018679171069158, 3.7615122813431983, 0.05620179634266334, 0.0, 1.423640107535391, 4.580868673014782 ] ] }, "score_data": { "peer_score": 2.7056001540313894, "coverage": 0.6398103043248516, "baseline_score": 6.137700449541161, "spot_peer_score": 5.879931508325152, "peer_archived_score": 2.7056001540313894, "baseline_archived_score": 6.137700449541161, "spot_peer_archived_score": 5.879931508325152 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1598513878.246506, "end_time": null, "forecaster_count": 98, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1598513878.246506, "end_time": null, "forecaster_count": 98, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.08264391118626035, 0.9173560888137396 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 11, "user_vote": null }, "forecasts_count": 246, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3487, "title": "Will the average world temperature in 2020 be higher than in 2019?", "short_title": "", "url_title": "", "slug": "will-the-average-world-temperature-in-2020-be-higher-than-in-2019", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T16:07:36.806698Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:22.044473Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 17, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-01T00:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-01-16T16:53:00Z", "actual_resolve_time": "2021-01-16T16:53:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 172, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3487, "title": "Will the average world temperature in 2020 be higher than in 2019?", "created_at": "2020-01-12T16:07:36.806698Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-14T23:25:40.599248Z", "spot_scoring_time": "2020-01-14T23:25:40.599248Z", "scheduled_resolve_time": "2021-01-16T16:53:00Z", "actual_resolve_time": "2021-01-16T16:53:00Z", "resolution_set_time": "2021-01-16T16:53:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2021-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Surface air temperature change is a primary measure of global climate change. The globally averaged combined land and ocean surface temperature data as calculated by a linear trend, show a warming of 0.85°C (90% CI: [0.65 to 1.06]), over the period 1880 to 2012 [(IPCC, 2013)](https://www.ipcc.ch/site/assets/uploads/2018/02/WG1AR5_SPM_FINAL.pdf). The effects of increased global surface temperatures, and the associated changes in climate include:\n\n- Increases in the frequency and intensity of intense precipitation [(Min et al., 2011)](https://www.nature.com/articles/nature09763) and increases in the proportion of the global land surface in extreme drought [(Burke et al., 2006)](https://journals.ametsoc.org/doi/full/10.1175/JHM544.1),\n- Global sea level rise [(Vermeer and Rahmstorf)](https://www.pnas.org/content/106/51/21527.short) which in turn may result in the erosion of beaches and coastal wetlands, increased flooding, and intrusion of saltwater into rivers, bays, and aquifers [(Titus, 2008)](https://www.tandfonline.com/doi/abs/10.1080/08920758609362000) and global ocean warming and acidification [(Pörtner, 2008)](https://www.int-res.com/abstracts/meps/v373/p203-217/),\n- Adverse effects to human health, due to thermal stress, and the increased prevalence of infectious diseases [(McMichael et al., 2006)](https://www.sciencedirect.com/science/article/pii/S0140673606680793) and increased food security risk ([Zhao et al., 2017](https://www.pnas.org/content/114/35/9326.short); [FAO, 2008](http://www.fao.org/forestry/15538-079b31d45081fe9c3dbc6ff34de4807e4.pdf)),\n- Loss of terrestrial biodiversity at all system levels, including species-level reductions in range size and abundance, especially amongst endemic species [(Warren et al., 2013)](https://www.nature.com/articles/nclimate1887).\n\nAccording to [GISS Surface Temperature Analysis data](https://data.giss.nasa.gov/gistemp/), over the 2008 to 2018 period, the mean surface air temperature was 0.76°C higher relative to the 1951 to 1980 baseline. 2016 was the hottest recorded year with the mean surface temperature being 1.02°C higher than that over the same baseline.\n\nAccording to Vox's [Sigal Samuel](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions):\n\n> Thanks to new data from the Copernicus Climate Change Service, [we now know that 2019 was the second-hottest year ever recorded](https://www.nytimes.com/2020/01/08/climate/2019-temperatures.html). Only 2016 was hotter, and by a really infinitesimal amount, due to El Niño. \n\n> Weather events such as El Niño always have the potential to produce small fluctuations in global temperature trends, so I’m not going to go above a 60 percent estimated probability here. But I will say this: Overall, temperature has clearly been trending upward. And there is a solid likelihood that 2020 will be a hotter year for the world than 2019. \n\n**Will the average world temperature in 2020 be higher than in 2019?**\n\n-----\n\n\nThis question resolves positively if the Global Annual Mean Surface Air Temperature in 2020 is higher than it was in 2019, according to NASA's [GISTEMP data](https://data.giss.nasa.gov/gistemp/graphs_v4/).\n\n[GISTEMP v4 data may be accessed here](https://data.giss.nasa.gov/gistemp/graphs_v4/). Data can also [be found here](https://docs.google.com/spreadsheets/d/19P3wC8jxOzuG3mmGv3l6pXJeV8_lj6sQw5Ja6EtPX4E/edit?usp=sharing). Please make a copy by clicking \"file\" and then \"make a copy\" if you wish to edit it. If you make useful additions to the dataset, please share the file in the comments.", "fine_print": "", "post_id": 3487, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609450622.002605, "end_time": null, "forecaster_count": 172, "interval_lower_bounds": [ 0.95 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1609450622.002605, "end_time": null, "forecaster_count": 172, "interval_lower_bounds": [ 0.95 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.010000000000000009, 0.99 ], "means": [ 0.9604652445450858 ], "histogram": [ [ 0.0, 0.0022788007749466096, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.26578942313817167, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0007340894389116074, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.005295651988794525, 0.0, 0.0, 0.0009581913646257458, 0.00040026872397790227, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0002703217712831703, 0.0, 6.437562865804148e-05, 0.0, 0.0, 0.0, 0.000850010198038349, 0.0, 0.0, 0.0005276313674631583, 8.288318151066482e-06, 0.0011767867510077393, 0.00028818579188192103, 0.0, 0.0, 0.0001244294806539547, 0.004868855850027048, 0.00015751685255660375, 0.00024382848131307408, 0.0, 0.0, 4.047293622413298e-05, 0.05887328232470236, 0.0, 0.0003301849734243347, 0.0, 0.037996548084607625, 0.0, 0.0, 0.022707755531092586, 0.0, 0.0004395705890910453, 0.0002194335096213128, 0.07951005364725235, 0.060227643882654544, 0.0023725042485384065, 0.013427345770784197, 0.11012216550239948, 0.03815802281349675, 0.0033503898676676904, 0.013799592968693802, 0.035322023148669626, 0.036047444919572826, 0.0, 0.006868952863891298, 0.012520988789161504, 0.8107105909553165, 0.0, 0.03651777024835966, 1.4845008232328387, 0.23542389445346562, 3.006705063800844, 1.897742002388124, 1.3883541659475571, 0.7561137453699753, 14.300494512664866 ] ] }, "score_data": { "peer_score": 15.473277348867152, "coverage": 0.9990012516596115, "baseline_score": 47.31708804082351, "spot_peer_score": 0.5499248650638416, "peer_archived_score": 15.473277348867152, "baseline_archived_score": 47.31708804082351, "spot_peer_archived_score": 0.5499248650638416 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609450622.261145, "end_time": null, "forecaster_count": 172, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609450622.261145, "end_time": null, "forecaster_count": 172, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.02315871786605972, 0.9768412821339403 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 21, "user_vote": null }, "forecasts_count": 455, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3486, "title": "Will Donald Trump's first term end before the next supreme court appointment?", "short_title": "", "url_title": "", "slug": "will-donald-trumps-first-term-end-before-the-next-supreme-court-appointment", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T15:37:57.336162Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:03.364417Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 108, "status": "resolved", "resolved": true, "actual_close_time": "2020-10-27T00:45:00Z", "scheduled_close_time": "2021-01-01T23:59:00Z", "scheduled_resolve_time": "2021-01-01T23:59:00Z", "actual_resolve_time": "2020-10-27T00:45:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 217, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3486, "title": "Will Donald Trump's first term end before the next supreme court appointment?", "created_at": "2020-01-12T15:37:57.336162Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-15T06:28:09.994570Z", "spot_scoring_time": "2020-01-15T06:28:09.994570Z", "scheduled_resolve_time": "2021-01-01T23:59:00Z", "actual_resolve_time": "2020-10-27T00:45:00Z", "resolution_set_time": "2020-10-27T00:45:00Z", "scheduled_close_time": "2021-01-01T23:59:00Z", "actual_close_time": "2020-10-27T00:45:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "So far, Trump appointed two Justices to the Supreme Court of the United States - [Neil Gorsuch](https://en.wikipedia.org/wiki/Neil_Gorsuch) and [Brett Kavanaugh](https://en.wikipedia.org/wiki/Brett_Kavanaugh). There are currently no vacancies.\n\n**Will Donald Trump fail to appoint at least one more Justice to the Supreme Court before the end of his first term?**\n\nResolution is positive if at least one of the following two conditions are fulfilled:\n\n1. Donald Trump fails to send the nomination of at least one new Supreme Court Justice to the Senate.\n\n2. Donald Trump sends the nomination of at least one new Supreme Court Justice to the Senate *but* no nominated Justice is sworn in before the end of his first term, for whatever reason.\n\nFor the purposes of this question, any situation in which Trump is temporarily not exercising his office (e.g., Cheney was Acting President when Bush had surgery) will not preclude a positive resolution, as long as he continues to be the President in the technical constitutional sense.\n\nThis question resolves on the last day of Trump's (first) term in the office of the President. If it resolves positively, this question closes two days prior to the date when resolution is triggered.", "fine_print": "", "post_id": 3486, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1603763151.272105, "end_time": null, "forecaster_count": 218, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.03 ] } ], "latest": { "start_time": 1603763151.272105, "end_time": null, "forecaster_count": 218, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.03 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.04623069830722543 ], "histogram": [ [ 0.0, 14.616064508599154, 4.130221176336593, 3.451229586405253, 0.28841206281614645, 2.113509998832022, 0.16319928909834483, 0.43241169380827654, 0.33222766988670016, 0.15850405903908396, 0.1951153484197115, 0.35472537687295386, 0.10182123705187877, 0.06298724331021104, 0.1261106180753574, 0.08014065799669705, 0.05554948837024749, 0.28413094119078625, 0.0, 0.0, 0.28121644199094536, 0.0, 0.0, 0.0, 0.03301627571242765, 0.05855715580405404, 0.0, 0.0, 0.0, 0.0, 0.06841892944282327, 0.0, 0.0, 0.05376420225565617, 0.03617815056334112, 0.015283086957677099, 0.0, 0.0, 0.005668225713365669, 0.004922939259529659, 0.05213213106440847, 0.0, 0.002650353653054007, 0.0, 0.005969021312681912, 0.002048499888251404, 0.0, 0.0, 0.0, 0.0, 0.04888443364331294, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.6319241708308852e-05, 0.0, 0.0, 0.002605354937713205, 0.0, 0.0, 0.0, 0.00021356669769197493, 0.00018606947593013213, 9.142786044572629e-06, 0.0, 0.00021599231277962268, 0.00024167291620290671, 0.00014247949498060613, 0.0, 0.0, 0.00045566382401749235, 3.836243546787603e-05, 0.0004015496655355992, 0.0, 0.0006434768072560551, 0.00023086026500598395, 7.687572214664489e-05, 0.0024191088232781636, 5.191807436971059e-05, 0.0005351842079376418, 0.0006037486919991518, 0.0, 0.0006998138465912857, 0.000524156366238907, 1.424261850980679e-05, 0.0, 0.0, 0.0006013377315771554, 0.0, 0.0006843566492402157, 0.00035705690029563347, 0.0002941050682341851, 0.0, 0.0, 0.0, 0.0, 0.40513536289161184 ] ] }, "score_data": { "peer_score": 7.113689360401579, "coverage": 0.8095054071346264, "baseline_score": -96.15271010055066, "spot_peer_score": -13.628419326702533, "peer_archived_score": 7.113689360401579, "baseline_archived_score": -96.15271010055066, "spot_peer_archived_score": -13.628419326702533 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1603526644.890507, "end_time": null, "forecaster_count": 208, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1603526644.890507, "end_time": null, "forecaster_count": 208, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9277313276853028, 0.07226867231469725 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 33, "user_vote": null }, "forecasts_count": 826, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3485, "title": "Will Brexit (finally) happen in 2020?", "short_title": "", "url_title": "", "slug": "will-brexit-finally-happen-in-2020", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T15:19:06.852077Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:29.063023Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 21, "status": "resolved", "resolved": true, "actual_close_time": "2020-01-31T23:50:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-01-31T23:50:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 71, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3485, "title": "Will Brexit (finally) happen in 2020?", "created_at": "2020-01-12T15:19:06.852077Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-15T18:17:32.110722Z", "spot_scoring_time": "2020-01-15T18:17:32.110722Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-01-31T23:50:00Z", "resolution_set_time": "2020-01-31T23:50:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2020-01-31T23:50:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[The Brexit withdrawal agreement](https://en.wikipedia.org/wiki/Brexit_withdrawal_agreement) sets the terms of the withdrawal of the UK from the EU. The original withdrawal agreement was rejected by the House of Commons on three occasions, leading to the resignation of Theresa May as Prime Minister of the United Kingdom. It was subsequently renegotiated by Boris Johnson, with a new version published on 17 October 2019.\n\nThe UK and the EU agreed a draft agreement on the withdrawal of the UK from the EU on 17 October 2019 [(IFG, 2019)](https://www.instituteforgovernment.org.uk/explainers/brexit-deal-withdrawal-agreement). MPs have given their final backing to the bill that will implement the UK government's Brexit deal [(BBC, 2020)](https://www.bbc.co.uk/news/uk-politics-51051178).\n\nBoth the UK and the EU need to approve and sign the withdrawal agreement. They will then start to negotiate new arrangements. There would be a transition period to prepare for new rules.\n\nThe UK could leave with no deal if the withdrawal agreement is not approved by 31 January 2020, or at the end of a transition period [(GOV.UK, 2020)](https://www.gov.uk/brexit).\n\n**Will Brexit (finally) happen in 2020?**\n\nThe question resolves *positive* if either of the following obtain:\n\n1. The UK leaves with a withdrawal agreement in 2020 with a transitional period (during which time the UK and EU continues to operate under Single Market rules) scheduled to end in or before 2026.\n2. The UK leaves without a withdrawal agreement in 2020 (i.e. a \"no-deal\" Brexit occurs).\n\nPositive resolution need not require ratification of the terms of UK's exit, and therefore a \"no-deal\" Brexit triggers positive resolution. Moreover, the UK leaving with a withdrawal agreement also triggers positive resolution, provided the transitional period is scheduled to end by the end of 2026.", "fine_print": "", "post_id": 3485, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1580518945.159794, "end_time": null, "forecaster_count": 72, "interval_lower_bounds": [ 0.96 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1580518945.159794, "end_time": null, "forecaster_count": 72, "interval_lower_bounds": [ 0.96 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.010000000000000009, 0.99 ], "means": [ 0.9543543027080639 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01127371494146295, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.02018721735106872, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13472216263136438, 0.008707034755549023, 0.0, 0.0, 0.6323542366020294, 0.0, 0.0, 0.3671566801324653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.004878079341891922, 0.0904897364644334, 0.027700581072225252, 0.0, 0.0, 0.0019319732351326508, 0.0, 0.0, 0.0, 0.0, 0.7201658515387022, 0.0, 0.0, 0.0, 0.03383492044223947, 1.8077143517808743, 0.8303626886554896, 0.6565405660258069, 0.6410602116103061, 9.486650349227705 ] ] }, "score_data": { "peer_score": 0.4198449000945166, "coverage": 0.04895555088229632, "baseline_score": 4.5586329776813255, "spot_peer_score": 1.6974984805341546, "peer_archived_score": 0.4198449000945166, "baseline_archived_score": 4.5586329776813255, "spot_peer_archived_score": 1.6974984805341546 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1580058944.907981, "end_time": null, "forecaster_count": 60, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1580058944.907981, "end_time": null, "forecaster_count": 60, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.13932884156802428, 0.8606711584319757 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 13, "user_vote": null }, "forecasts_count": 106, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3484, "title": "Will the number of people in extreme poverty in 2020 be lower than the number in 2015?", "short_title": "Extreme Poverty Decline in 2020", "url_title": "Extreme Poverty Decline in 2020", "slug": "extreme-poverty-decline-in-2020", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T15:16:30.663337Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:28:59.488340Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 17, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-01T00:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2023-10-03T23:16:00Z", "actual_resolve_time": "2023-10-03T23:16:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 96, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3700, "name": "Social Sciences", "slug": "social-sciences", "emoji": "🧑🤝🧑", "description": "Social Sciences", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3484, "title": "Will the number of people in extreme poverty in 2020 be lower than the number in 2015?", "created_at": "2020-01-12T15:16:30.663337Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-16T08:00:00Z", "spot_scoring_time": "2020-01-16T08:00:00Z", "scheduled_resolve_time": "2023-10-03T23:16:00Z", "actual_resolve_time": "2023-10-03T23:16:00Z", "resolution_set_time": "2023-10-03T23:16:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2021-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Extreme poverty is defined as living on less than $1.90 a day, measured in 2011 Purchasing Power Parity prices [(World Bank, 2017)](https://openknowledge.worldbank.org/bitstream/handle/10986/25141/9781464809613.pdf) In 2015, 9.98% of the World's population or 734.5 million people lived in extreme poverty [(World Bank, 2019)](http://documents.worldbank.org/curated/en/344401569259571927/pdf/September-2019-PovcalNet-Update-Whats-New.pdf).\n\nAccording to [World Bank Data](https://www.gapminder.org/tools/?from=world#$chart-type=bubbles&state$time$value=2006&delay:121.71612903225821;&entities$;&marker$select@;&opacitySelectDim:0.3&axis_x$use=indicator&which=income_per_person_gdppercapita_ppp_inflation_adjusted&scaleType=log&zoomedMin=282&zoomedMax=119849&domainMin:null&domainMax:null;&axis_y$use=indicator&which=extreme_poverty_percent_people_below_190_a_day&scaleType=linear&zoomedMin=0&zoomedMax=93&domainMin:null&domainMax:null;&size$use=indicator&which=population_total&extent@:0.022083333333333333&:0.4083333333333333;&domainMin:null&domainMax:null;&color$use=property&which=world_6region;;;&ui$chart$trails:false), extreme poverty has been declining by roughly 1% per year since the 1980's. However, [there is evidence](https://www.worldbank.org/en/news/press-release/2018/09/19/decline-of-global-extreme-poverty-continues-but-has-slowed-world-bank) that the decline in global extreme poverty has been slowing.", "resolution_criteria": "**Will we see fewer than 734.5M people in extreme poverty, worldwide in the year 2020, according to World Bank estimates?**\n\nThis question resolves positively if the first estimates of the number in extreme poverty in the year 2020, published by the World Bank, is below 734.5M. In case the numbers are not published by the World Bank before the end of 2025, figures from other data sources, such as those listed on the [data sources page](https://www.metaculus.com/help/prediction-resources/#data-sources), may be consulted.", "fine_print": "", "post_id": 3484, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609450582.400026, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": [ 0.62 ], "centers": [ 0.65 ], "interval_upper_bounds": [ 0.75 ] } ], "latest": { "start_time": 1609450582.400026, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": [ 0.62 ], "centers": [ 0.65 ], "interval_upper_bounds": [ 0.75 ], "forecast_values": [ 0.35, 0.65 ], "means": [ 0.6495385530940746 ], "histogram": [ [ 0.0, 0.20269889959141074, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09238778703307976, 0.0, 0.3205538707481383, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09880133150134848, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.039143517677046796, 0.0, 0.07019132559628703, 0.0, 0.011037504018186366, 0.0, 0.0, 0.7721573280084261, 0.0, 0.049014995944548286, 0.0, 0.0, 0.0, 0.0, 0.6496255686879837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.022416477875092735, 0.13700891322606465, 0.0, 0.4640559520481673, 0.8612940043969098, 0.49576009603886095, 0.5607867817756008, 0.044569461888073356, 3.1765334107693612, 1.8201808140668563, 0.8928130322171791, 0.017363302970072207, 0.19936734537963513, 0.25364764538129814, 1.7240445934100768, 0.2488934357492891, 0.10559708250745295, 0.031421871307371584, 0.05671277750258922, 1.6752608055192644, 0.005432350986704414, 0.17991357112094256, 0.47588748297608136, 0.902488567631141, 0.37047084145395864, 0.0, 0.0, 0.0, 0.5919456382250862, 0.015988099695086973, 0.0026718728115051014, 0.0, 0.08656618197888265, 0.010033693009389048, 0.0739925985598237, 0.0, 0.0007831332794337743, 0.0, 0.0015317656631140828, 0.0017751730710432278, 0.0, 0.0, 0.0, 0.28741874108424165 ] ] }, "score_data": { "peer_score": 0.5322402836540462, "coverage": 0.9983872612243699, "baseline_score": 46.88526754912349, "spot_peer_score": 3.5343585919917344, "peer_archived_score": 0.5322402836540462, "baseline_archived_score": 46.88526754912349, "spot_peer_archived_score": 3.5343585919917344 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609450582.446616, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609450582.446616, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.236862032043069, 0.763137967956931 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 20, "user_vote": null }, "forecasts_count": 235, "key_factors": [], "is_current_content_translated": false, "description": "Extreme poverty is defined as living on less than $1.90 a day, measured in 2011 Purchasing Power Parity prices [(World Bank, 2017)](https://openknowledge.worldbank.org/bitstream/handle/10986/25141/9781464809613.pdf) In 2015, 9.98% of the World's population or 734.5 million people lived in extreme poverty [(World Bank, 2019)](http://documents.worldbank.org/curated/en/344401569259571927/pdf/September-2019-PovcalNet-Update-Whats-New.pdf).\n\nAccording to [World Bank Data](https://www.gapminder.org/tools/?from=world#$chart-type=bubbles&state$time$value=2006&delay:121.71612903225821;&entities$;&marker$select@;&opacitySelectDim:0.3&axis_x$use=indicator&which=income_per_person_gdppercapita_ppp_inflation_adjusted&scaleType=log&zoomedMin=282&zoomedMax=119849&domainMin:null&domainMax:null;&axis_y$use=indicator&which=extreme_poverty_percent_people_below_190_a_day&scaleType=linear&zoomedMin=0&zoomedMax=93&domainMin:null&domainMax:null;&size$use=indicator&which=population_total&extent@:0.022083333333333333&:0.4083333333333333;&domainMin:null&domainMax:null;&color$use=property&which=world_6region;;;&ui$chart$trails:false), extreme poverty has been declining by roughly 1% per year since the 1980's. However, [there is evidence](https://www.worldbank.org/en/news/press-release/2018/09/19/decline-of-global-extreme-poverty-continues-but-has-slowed-world-bank) that the decline in global extreme poverty has been slowing." }, { "id": 3483, "title": "In the year 2020, will no gene drive be launched to fight malaria-carrying mosquitoes in any part of the world?", "short_title": "", "url_title": "", "slug": "in-the-year-2020-will-no-gene-drive-be-launched-to-fight-malaria-carrying-mosquitoes-in-any-part-of-the-world", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T15:15:21.773200Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:25.373916Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 10, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-01T00:00:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-01-03T16:13:00Z", "actual_resolve_time": "2021-01-03T16:13:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 105, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15865, "name": "Health & Pandemics", "slug": "biosecurity", "emoji": "🧬", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" }, { "id": 3691, "name": "Health & Pandemics", "slug": "health-pandemics", "emoji": "🦠", "description": "Health & Pandemics", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3483, "title": "In the year 2020, will no gene drive be launched to fight malaria-carrying mosquitoes in any part of the world?", "created_at": "2020-01-12T15:15:21.773200Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-16T08:00:00Z", "spot_scoring_time": "2020-01-16T08:00:00Z", "scheduled_resolve_time": "2021-01-03T16:13:00Z", "actual_resolve_time": "2021-01-03T16:13:00Z", "resolution_set_time": "2021-01-03T16:13:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2021-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Gene drives] are phenomena in a species' population in which one version of a gene, or allele, is probabilistically favored over other alleles that are otherwise equally favored by fitness. A gene drive in a particular allele shows up as a bias for the corresponding phenotype in the offspring. Consider two parents with different alleles for the same gene; if there exists a gene drive for one allele, it is highly likely that all of the parents' offspring will carry the driven gene's trait. \n\nWith new advances in genetic engineering using CRISPR, it is now much easier to modify an organism's genes. This makes *engineered* gene drives [tractable]: a gene coding for the CRISPR system itself can be encoded near to the gene being \"driven,\" so that if one copy of the driven allele and one \"wild\" allele are inherited, the CRISPR system modifies the wild gene so that the driven gene plus CRISPR system is inherited. This process can spread the driven gene exponentially throughout a population, at a rate far exceeding the spread of a gene that is merely favorable for survival.\n\nUses of this method include the potential to eliminate diseases like malaria or lyme disease that are spread by a fast-reproducing vector, by promoting disease-resistant traits. [Valentino Gantz et. al.] have genetically altered a primary malaria vector native to India, the [Anopheles stephensi] mosquito, to carry and pass on anti-malaria traits. Another [study] published in nature biotechnology offers a more drastic approach that would render female [Anopheles gambiae] mosquitoes, native to Africa, completely infertile, with the intent of wiping out the species in affected ecosystems. Similar studies have investigated [engineering mice] (a prime carrier) to be immune to Lyme disease. \n\nWith Malaria [afflicting hundreds of millions] of people per year, advances in gene drive research have instigated public conversations about the usefulness, feasibility, and ethics of gene drives being encouraged before testing them in wild ecosystems.\n\n**In the year 2020, will no gene drive be launched to fight malaria-carrying mosquitoes in any part of the world?**\n\nFor negative resolution, there must be credible reports that the drive was initiated. For the purpose of this question, the wild population can be isolated (say on an island or even in an enclosure) to control spreading but should aim to replicate natural reproduction etc., and *cannot* be a laboratory setting.\n\nIn case of ambiguity we will adopt as our resolution the resolution chosen by Dylan Matthews and/or Kelsey Piper in their [review of their 2020 predictions](https://www.metaculus.com/questions/3475/future-perfect-2020-series/).\n\nIn case negative resolution is triggered, this question retroactively closes two days prior to the day resolution is triggered, but resolves on January 1st, 2021.\n\n\n[afflicting hundreds of millions]:http://www.who.int/features/factfiles/malaria/en/\n[tractable]:http://www.nature.com/news/mosquitoes-engineered-to-pass-down-genes-that-would-wipe-out-their-species-1.18974\n[Gene drives]: http://wyss.harvard.edu/staticfiles/newsroom/pressreleases/Gene%20drives%20FAQ%20FINAL.pdf\n[Valentino Gantz et. al.]: http://www.pnas.org/content/112/49/E6736.abstract\n[study]: http://www.nature.com/nbt/journal/vaop/ncurrent/full/nbt.3439.html\n[Anopheles gambiae]: https://en.wikipedia.org/wiki/Anopheles_gambiae\n[Anopheles stephensi]: https://en.wikipedia.org/wiki/Anopheles_stephensi\n[engineering mice]:http://www.newyorker.com/magazine/2015/11/16/the-gene-hackers\"", "fine_print": "", "post_id": 3483, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609451947.396595, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.99 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1609451947.396595, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.99 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.010000000000000009, 0.99 ], "means": [ 0.9835579935088911 ], "histogram": [ [ 0.0, 0.026951908029936068, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007044922433575599, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00014587839364207648, 0.008482710965774019, 0.0036046240996390278, 0.0, 0.0, 0.0, 0.061740848754306035, 0.0, 0.006404218640100707, 0.001305199350988809, 0.0014955019452109978, 0.0, 0.004757793444040947, 0.0, 0.0, 0.0, 0.01201671429715702, 0.0, 0.0, 0.0, 0.0, 0.01290830348323355, 0.0011330420873293281, 0.0017053798284846226, 0.00041077660837210574, 0.0007123431816616531, 0.004891533131218557, 0.011920347210982016, 0.006011873823010705, 0.005263543730988702, 0.0, 0.08281271899645147, 0.0, 0.0021900188227187093, 0.09929134888077194, 0.05510683421304619, 0.48415280897851287, 0.07715984508049872, 0.11560803765024275, 1.2143552651938605, 16.688427522265435 ] ] }, "score_data": { "peer_score": 9.423874133346374, "coverage": 0.9983866896031584, "baseline_score": 79.00466981843955, "spot_peer_score": 10.193069610692723, "peer_archived_score": 9.423874133346374, "baseline_archived_score": 79.00466981843955, "spot_peer_archived_score": 10.193069610692723 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609451947.475086, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609451947.475086, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.011649238100536308, 0.9883507618994637 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 10, "user_vote": null }, "forecasts_count": 229, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3482, "title": "Will Netanyahu remain Israeli's Prime Minister till the end of 2020?", "short_title": "", "url_title": "", "slug": "will-netanyahu-remain-israelis-prime-minister-till-the-end-of-2020", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T15:12:10.507022Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:22.053096Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 28, "status": "resolved", "resolved": true, "actual_close_time": "2020-12-31T22:25:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-12-31T22:25:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 140, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3482, "title": "Will Netanyahu remain Israeli's Prime Minister till the end of 2020?", "created_at": "2020-01-12T15:12:10.507022Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-16T08:00:00Z", "spot_scoring_time": "2020-01-16T08:00:00Z", "scheduled_resolve_time": "2021-01-01T00:00:00Z", "actual_resolve_time": "2020-12-31T22:25:00Z", "resolution_set_time": "2020-12-31T22:25:00Z", "scheduled_close_time": "2021-01-01T00:00:00Z", "actual_close_time": "2020-12-31T22:25:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "During Benjamin Netanyahu's fourth premiership, a number of alleged corruption scandals have been investigated involving Netanyahu and his close political circle. Police recommended indictments against Netanyahu, and on 21 November 2019, Netanyahu was officially indicted for breach of trust, accepting bribes and fraud [(BBC, 2019)](https://www.bbc.co.uk/news/world-middle-east-47409739).\n\nIn [March 2020](https://en.wikipedia.org/wiki/2020_Israeli_legislative_election), Israel will head to the polls and about 9 million Israelis will decide if he is worthy of re-election.\n\nAccording to this [Vox article](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions) by Sigal Samuel, Kelsey Piper, and Dylan Matthews:\n\n> In March, Israel heads to the polls for the third time in a year, and some observers think this election might finally be the end of Israeli Prime Minister Benjamin Netanyahu. After all, he’s recently been indicted in three corruption cases and he’s facing tough opposition from a centrist party that has a slight lead on him in the polls.\n\n> But if there’s one thing I’ve learned from my years living in and reporting on Israel, it’s that you should never underestimate Bibi’s ability to rise from the political ashes. And indeed, Bibi just blew his opponent out of the water in the Likud party primary, winning 72.5 percent of the vote. He’s already using that to fire up his base.\n\n**Will Netanyahu remain Israeli’s Prime Minister till the end of 2020?**\n\nThis question resolves positively if Netanyahu is remains Israel's Prime Minister in the year 2020. This resolves negatively if, for any reason, Netanyahu no longer holds his position of Prime Minister or Prime Minister-designate at any point in the year 2020.\n\nIn case negative resolution is triggered, this question retroactively closes two days prior to the day resolution is triggered, but resolves on January 1st, 2021.", "fine_print": "", "post_id": 3482, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609451907.103019, "end_time": null, "forecaster_count": 140, "interval_lower_bounds": [ 0.99 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1609451907.103019, "end_time": null, "forecaster_count": 140, "interval_lower_bounds": [ 0.99 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.010000000000000009, 0.99 ], "means": [ 0.9835613610149987 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0013320103498760136, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00030643583271717844, 0.0, 0.0, 0.0001870377705102628, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0009748956867557396, 0.0, 6.799396622574348e-05, 0.0, 0.0, 0.0, 0.0, 0.00017167937732653064, 0.0, 0.0, 0.0004487458165836207, 0.0, 0.00046777383710474576, 0.0, 5.3696670179678695e-05, 0.00039676770828342244, 0.003839031656384135, 8.417018276060128e-05, 0.0016877012418524454, 0.0010785264515461666, 0.0, 0.0, 0.010263999612512086, 0.004055812482600446, 0.0029814847755216707, 0.0, 0.0022708594208157877, 0.0117094927018877, 0.0, 0.0, 0.0, 0.0, 0.03481630309338764, 0.00386859459105236, 0.0, 0.0, 0.11993905733016709, 0.03013339738385378, 0.007969292454655235, 0.0, 0.0, 2.989121289631428e-05, 0.12603170040749365, 0.0006361834383069938, 0.015748442329264648, 0.0007104699743244969, 0.0, 0.0430528358737591, 0.017918709468550374, 0.031554938119118886, 0.0, 0.0008793495751820619, 0.0, 0.011291666108866073, 0.02034529428284924, 0.029436034164519582, 0.0, 0.3322505829035917, 0.059717607671636794, 0.769223499693375, 0.17014995862471433, 20.299767660546046 ] ] }, "score_data": { "peer_score": 19.044962784903404, "coverage": 0.9986742650717111, "baseline_score": 51.606258875320215, "spot_peer_score": 25.595051043972123, "peer_archived_score": 19.044962784903404, "baseline_archived_score": 51.606258875320215, "spot_peer_archived_score": 25.595051043972123 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609451907.198918, "end_time": null, "forecaster_count": 140, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609451907.198918, "end_time": null, "forecaster_count": 140, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.03954725081326116, 0.9604527491867388 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 19, "user_vote": null }, "forecasts_count": 393, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3481, "title": "Will no CRISPR-edited babies be born in the year 2020?", "short_title": "Will no CRISPR-edited babies be born in 2020?", "url_title": "Will no CRISPR-edited babies be born in 2020?", "slug": "will-no-crispr-edited-babies-be-born-in-2020", "author_id": 104761, "author_username": "Tamay", "coauthors": [], "created_at": "2020-01-12T15:06:58.039038Z", "published_at": "2020-01-14T08:00:00Z", "edited_at": "2025-09-05T17:29:19.279647Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T08:00:00Z", "comment_count": 14, "status": "resolved", "resolved": true, "actual_close_time": "2021-03-01T00:00:00Z", "scheduled_close_time": "2021-03-01T00:00:00Z", "scheduled_resolve_time": "2022-01-02T02:31:00Z", "actual_resolve_time": "2022-01-02T02:31:00Z", "open_time": "2020-01-14T08:00:00Z", "nr_forecasters": 161, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" } ], "question_series": [ { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2991, "type": "question_series", "name": "Future Perfect 2020 Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-07-21T07:00:00Z", "close_date": "2021-03-02T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.983096Z", "edited_at": "2024-02-29T10:13:43.670649Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3481, "title": "Will no CRISPR-edited babies be born in the year 2020?", "created_at": "2020-01-12T15:06:58.039038Z", "open_time": "2020-01-14T08:00:00Z", "cp_reveal_time": "2020-01-16T05:02:54.660863Z", "spot_scoring_time": "2020-01-16T05:02:54.660863Z", "scheduled_resolve_time": "2022-01-02T02:31:00Z", "actual_resolve_time": "2022-01-02T02:31:00Z", "resolution_set_time": "2022-01-02T02:31:00Z", "scheduled_close_time": "2021-03-01T00:00:00Z", "actual_close_time": "2021-03-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Genome editing is a type of genetic engineering in which DNA is inserted, deleted, modified or replaced in the genome of a living organism ([Wikipedia](https://en.wikipedia.org/wiki/Genome_editing)) [CRISPR/Cas9](https://en.wikipedia.org/wiki/CRISPR_gene_editing) is a technique that allows for the highly specific and rapid modification of DNA in a genome.\n\nOn 25 November 2018, a Chinese scientist named He Jiankui made a startling announcement: as a result of experiments conducted at his clinic, the world’s first genetically edited babies, [Lulu and Nana](https://en.wikipedia.org/wiki/Lulu_and_Nana_controversy), had been born [(Regalado, 2018b)](https://www.technologyreview.com/s/612458/exclusive-chinese-scientists-are-creating-crispr-babies/).\n\nAfter Jiankui’s announcement, Vox asked [“Is the CRISPR baby controversy the start of a terrifying new chapter in gene editing?”](https://www.vox.com/science-and-health/2018/11/30/18119589/crispr-gene-editing-he-jiankui) and a lot of other people also had the same question. But the answer (so far) seems to be no.\n\nA Chinese court has sentenced He Jiankui, to three years in prison for “illegal medical practice”, and handed down shorter sentences to two colleagues who assisted him [(Cyranoski, 2020)](https://www.nature.com/articles/d41586-020-00001-y) No new babies edited with CRISPR were announced this past year. \n\nAccording to Vox's [Kelsey Piper](https://www.vox.com/future-perfect/2020/1/13/21055740/trump-reelection-biden-nomination-brexit-2020-predictions):\n\n> The fierce global backlash against Jiankui made it clear that the world is uncomfortable with such uses of technology — rightfully so, as there’s immense potential for misuse and Jiankui’s experiments were enormously irresponsible. I bet it won’t happen again this year — though I’m sure it’ll happen again someday. \n\n**Will no CRISPR-edited babies be born in the year 2020?**\n\nThis question resolves positively, if by the end of 2021, no credible reports have emerged that a baby was born in the year 2020 whose embryo was genetically edited by way of a CRISPR system, such as [CAS9](https://en.wikipedia.org/wiki/Cas9). Reports need to be corroborated and substantiated so as to leave little room for doubt, e.g. by being corroborated by statements of research organisations, independent researchers, grant-makers or government science department or agencies.\n\nIn case positive resolution is triggered, this question retroactively closes two days prior to the day resolution is triggered, but resolves on January 1st, 2021.", "fine_print": "", "post_id": 3481, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1614549768.611724, "end_time": null, "forecaster_count": 161, "interval_lower_bounds": [ 0.85 ], "centers": [ 0.89 ], "interval_upper_bounds": [ 0.93 ] } ], "latest": { "start_time": 1614549768.611724, "end_time": null, "forecaster_count": 161, "interval_lower_bounds": [ 0.85 ], "centers": [ 0.89 ], "interval_upper_bounds": [ 0.93 ], "forecast_values": [ 0.10999999999999999, 0.89 ], "means": [ 0.8700571099509479 ], "histogram": [ [ 0.0, 0.0005572898269413089, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15382745695836628, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.859638845107449e-05, 0.0001135773716221881, 0.0, 8.389098098608417e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04201554300548166, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00037344268568904036, 0.0, 0.0, 0.0, 0.00019057363267846293, 0.0002147712921800137, 0.0, 0.0, 0.00033607984892436324, 0.00014840074689743109, 0.0, 0.0, 0.00030172279921580864, 0.0014675461811634483, 0.0004580292366305874, 0.0, 1.2694217564375746e-05, 0.0, 0.0014863021216130411, 0.6980195913378975, 0.0, 0.0, 0.42153355261320574, 0.23303064927313422, 0.01494623027872809, 0.0, 0.0, 0.00013013734650132465, 1.3930842204491998, 0.0, 2.2803932918607588e-05, 0.34357920624511, 0.2714177889711808, 0.9810084001208366, 0.00513138819365678, 0.6175335223692707, 0.018858724134144866, 0.2554455422102843, 3.2419625697197905, 0.6734706002781081, 0.40108347485850576, 0.7691135263834544, 1.532483852050487, 1.6892402608820498, 1.3823148769148084, 2.645854178685537, 1.2447198127817036, 0.6990712272934303, 1.5717895184807342, 0.3090282087135248, 0.49274911910325636, 1.0034898715571658, 0.7687193834444346 ] ] }, "score_data": { "peer_score": 11.386044815262823, "coverage": 0.9988399526201353, "baseline_score": 59.69333894961052, "spot_peer_score": 14.765914044897018, "peer_archived_score": 11.386044815262823, "baseline_archived_score": 59.69333894961052, "spot_peer_archived_score": 14.765914044897018 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1614549768.655857, "end_time": null, "forecaster_count": 161, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1614549768.655857, "end_time": null, "forecaster_count": 161, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.08352542983059219, 0.9164745701694078 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 10, "user_vote": null }, "forecasts_count": 401, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3477, "title": "If human-level artificial intelligence is developed, will World GDP grow by at least 30.0% in any of the subsequent 15 years?", "short_title": "GDP growth to exceed 30% 15-years post HLAI", "url_title": "GDP growth to exceed 30% 15-years post HLAI", "slug": "gdp-growth-to-exceed-30-15-years-post-hlai", "author_id": 108770, "author_username": "Matthew_Barnett", "coauthors": [], "created_at": "2020-01-10T23:31:26.473439Z", "published_at": "2020-01-14T00:00:00Z", "edited_at": "2025-11-09T19:21:08.936292Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T00:00:00Z", "comment_count": 39, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2061-04-11T06:59:00Z", "scheduled_resolve_time": "2100-01-01T23:00:00Z", "actual_resolve_time": null, "open_time": "2020-01-14T00:00:00Z", "nr_forecasters": 174, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 1313, "type": "question_series", "name": "AI Progress Essay Contest", "slug": "ai-fortified-essay-contest", "header_image": "https://cdn.metaculus.com/neural_net.png", "prize_pool": "6500.00", "start_date": "2022-02-08T13:02:52Z", "close_date": "2022-04-16T15:54:55Z", "forecasting_end_date": null, "html_metadata_json": { "description": "Welcome to the AI Progress Essay Contest! Metaculus aims to support accurate forecasting of and preparation for the impacts of transformative AI.\r\n\r\nThank you for your participation!" }, "is_ongoing": false, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T21:35:01.738302Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, { "id": 2342, "type": "question_series", "name": "AGI Outcomes", "slug": "agi-horizons", "header_image": "https://cdn.metaculus.com/agi.png", "prize_pool": null, "start_date": "2023-08-09T13:41:42.701000Z", "close_date": "2023-08-09T13:41:42.701000Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T21:35:01.571893Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ], "default_project": { "id": 2342, "type": "question_series", "name": "AGI Outcomes", "slug": "agi-horizons", "header_image": "https://cdn.metaculus.com/agi.png", "prize_pool": null, "start_date": "2023-08-09T13:41:42.701000Z", "close_date": "2023-08-09T13:41:42.701000Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T21:35:01.571893Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 3477, "title": "If human-level artificial intelligence is developed, will World GDP grow by at least 30.0% in any of the subsequent 15 years?", "created_at": "2020-01-10T23:31:26.473439Z", "open_time": "2020-01-14T00:00:00Z", "cp_reveal_time": "2020-01-14T15:50:15.155660Z", "spot_scoring_time": "2020-01-14T15:50:15.155660Z", "scheduled_resolve_time": "2100-01-01T23:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2061-04-11T06:59:00Z", "actual_close_time": "2061-04-11T06:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In economist Robin Hanson's 2001 paper [Economic Growth Given Machine Intelligence](http://mason.gmu.edu/~rhanson/aigrow.pdf), he writes\n\n> A simple exogenous growth model gives conservative estimates of the economic implications of machine intelligence. [...] Without machine intelligence, world product grows at a familiar rate of 4.3% per year, doubling every 16 years, with about 40% of technological progress coming from ordinary computers. With machine intelligence, the (instantaneous) annual growth rate would be 45%, ten times higher, making world product double every 18 months! If the product shares are raised by 20%, and general technology growth is lowered to preserve the 4.4% figure, the new doubling time falls to less than 6 months.", "resolution_criteria": "If a machine passes the test in [this Metaculus question](https://www.metaculus.com/questions/384/human-machine-intelligence-parity-by-2040/), this question resolves as **YES** if world GDP grows by at least 30.0% for any single year in the fifteen years following the date of the test, according to a reputable organization such as [The World Bank](https://data.worldbank.org/indicator/NY.GDP.MKTP.KD.ZG), and **NO** if it doesn't.\n\nIf no machine passes that test by 2100-01-01, then this question resolves ambiguously.", "fine_print": "", "post_id": 3477, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1762716058.187062, "end_time": 1774868650.426563, "forecaster_count": 163, "interval_lower_bounds": [ 0.33 ], "centers": [ 0.58 ], "interval_upper_bounds": [ 0.75 ] } ], "latest": { "start_time": 1762716058.187062, "end_time": 1774868650.426563, "forecaster_count": 163, "interval_lower_bounds": [ 0.33 ], "centers": [ 0.58 ], "interval_upper_bounds": [ 0.75 ], "forecast_values": [ 0.42000000000000004, 0.58 ], "means": [ 0.5137362795498832 ], "histogram": [ [ 0.11660113476162957, 0.7609843477820939, 0.0, 0.0, 0.0, 0.3277043302481415, 0.0, 0.0, 0.00042342026629729534, 0.0, 1.8442431866139228, 0.0, 0.0, 0.46978901734992834, 0.0, 0.010022894005064682, 0.0, 0.0, 0.0, 0.08864949863662812, 0.02879363631585469, 0.0, 0.0, 0.0, 0.0, 1.043856175723401, 0.0, 0.0, 0.0, 0.0, 0.7872218024484798, 0.0, 0.0, 1.0, 0.0, 0.018459411181464352, 0.7997271545205159, 0.0, 0.0, 0.0, 0.31390715766528304, 0.0, 0.6717772558256114, 0.0, 0.0, 2.414430035980386, 0.0, 0.0, 0.010236530215535066, 0.0, 0.6201875556534224, 0.0, 0.044003127246264914, 0.0, 0.0025166783760162815, 0.02786162057762982, 0.5993534174326363, 0.1126039972771161, 0.0, 0.11671459546839864, 3.2778641126448607, 0.0007470481089898709, 0.2909561413636738, 0.6097497180664087, 0.002708146749221846, 0.7449801724794775, 0.01648555463969269, 0.005460619532337802, 0.0006418572968043303, 0.46433670397222204, 0.02726929473145356, 0.005421889546161894, 0.0, 0.013035914176482605, 0.048784092628293134, 1.5058278113089831, 0.0, 0.0016060738269252572, 0.0, 0.3396355129260222, 1.041207514313016, 0.10736816222626319, 0.06102548717729304, 0.13754657999397538, 0.0, 2.1861707528772483, 0.0397836592094299, 0.0, 0.009627738400141271, 7.864861451472225e-05, 0.4872118294627502, 0.0, 0.06953613013584573, 0.0031286761010948703, 0.007987388636403895, 0.07589029509727829, 0.0, 0.001722426722456613, 0.0, 0.2236938105468443 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289446.706457, "end_time": null, "forecaster_count": 166, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289446.706457, "end_time": null, "forecaster_count": 166, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.5985472981443127, 0.4014527018556873 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 27, "user_vote": null }, "forecasts_count": 431, "key_factors": [], "is_current_content_translated": false, "description": "In economist Robin Hanson's 2001 paper [Economic Growth Given Machine Intelligence](http://mason.gmu.edu/~rhanson/aigrow.pdf), he writes\n\n> A simple exogenous growth model gives conservative estimates of the economic implications of machine intelligence. [...] Without machine intelligence, world product grows at a familiar rate of 4.3% per year, doubling every 16 years, with about 40% of technological progress coming from ordinary computers. With machine intelligence, the (instantaneous) annual growth rate would be 45%, ten times higher, making world product double every 18 months! If the product shares are raised by 20%, and general technology growth is lowered to preserve the 4.4% figure, the new doubling time falls to less than 6 months." }, { "id": 3469, "title": "Will the United States institute a military draft by 2025?", "short_title": "", "url_title": "", "slug": "will-the-united-states-institute-a-military-draft-by-2025", "author_id": 108770, "author_username": "Matthew_Barnett", "coauthors": [], "created_at": "2020-01-08T05:17:41.716983Z", "published_at": "2020-01-14T23:00:00Z", "edited_at": "2025-09-05T17:29:03.268615Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-14T23:00:00Z", "comment_count": 11, "status": "resolved", "resolved": true, "actual_close_time": "2025-01-01T07:59:00Z", "scheduled_close_time": "2025-01-01T07:59:00Z", "scheduled_resolve_time": "2025-01-02T01:00:00Z", "actual_resolve_time": "2025-01-04T01:19:00Z", "open_time": "2020-01-14T23:00:00Z", "nr_forecasters": 143, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 3469, "title": "Will the United States institute a military draft by 2025?", "created_at": "2020-01-08T05:17:41.716983Z", "open_time": "2020-01-14T23:00:00Z", "cp_reveal_time": "2020-01-16T04:44:53.499584Z", "spot_scoring_time": "2020-01-16T04:44:53.499584Z", "scheduled_resolve_time": "2025-01-02T01:00:00Z", "actual_resolve_time": "2025-01-04T01:19:00Z", "resolution_set_time": "2025-01-04T01:20:06.467966Z", "scheduled_close_time": "2025-01-01T07:59:00Z", "actual_close_time": "2025-01-01T07:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "The United States has employed military conscription during five separate conflicts in American history, most recently in the Vietnam war, ending in 1973. If the United States enters another large war, it might begin conscripting soldiers once again.\n\nThis question resolves positively if *any* military personnel\\* are conscripted\\(^†\\) for the military in the United States before January 1st, 2025. Otherwise, it resolves negatively.\n\n---\n\n\\* <small>\"military personnel\" here includes the Coast Guard.</small>\n\n\\(^†\\) <small>\"conscription\" here does not include the Individual Ready Reserve or Stop-Loss activation.</small>", "fine_print": "", "post_id": 3469, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1735689321.57258, "end_time": null, "forecaster_count": 141, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1735689321.57258, "end_time": null, "forecaster_count": 141, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.04264903330799198 ], "histogram": [ [ 10.88230932152699, 7.075346369821052, 0.3607173730289253, 0.31215979542524513, 0.13900509979853734, 0.36434621743955936, 1.603651698554608, 0.018534536649463734, 0.41929112857549033, 0.0, 0.15721680058625243, 0.0, 0.0, 0.016109028389612038, 0.0, 8.069351508216675e-05, 0.0, 0.0, 0.0, 2.8656549856061617e-05, 0.014142526157609709, 0.0, 0.0, 0.003312910787396866, 0.011583847594907888, 0.2160641577762787, 0.05645326862575361, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.819151260991988e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.008800791146786727, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5929493299644631 ] ] }, "score_data": { "baseline_score": 96.95411914975567, "peer_score": 7.6646496568576445, "coverage": 0.9999685638567708, "relative_legacy_score": 0.0, "weighted_coverage": 0.9999685638567708, "spot_peer_score": 8.042593404090567, "spot_baseline_score": 98.5500430304885, "baseline_archived_score": 96.95411914975567, "peer_archived_score": 7.6646496568576445, "relative_legacy_archived_score": 0.0, "spot_peer_archived_score": 8.042593404090567, "spot_baseline_archived_score": 98.5500430304885 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287786.121839, "end_time": null, "forecaster_count": 139, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287786.121839, "end_time": null, "forecaster_count": 139, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9977632690587988, 0.002236730941201272 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 16, "user_vote": null }, "forecasts_count": 275, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 3462, "title": "Will there be a US-Iran war in 2020?", "short_title": "", "url_title": "", "slug": "will-there-be-a-us-iran-war-in-2020", "author_id": 103798, "author_username": "CredibleHulk", "coauthors": [], "created_at": "2020-01-07T00:10:35.832356Z", "published_at": "2020-01-09T18:00:00Z", "edited_at": "2025-09-05T17:29:01.116492Z", "curation_status": "approved", "curation_status_updated_at": "2020-01-09T18:00:00Z", "comment_count": 67, "status": "resolved", "resolved": true, "actual_close_time": "2020-12-01T08:00:00Z", "scheduled_close_time": "2020-12-01T08:00:00Z", "scheduled_resolve_time": "2020-12-31T22:27:00Z", "actual_resolve_time": "2020-12-31T22:27:00Z", "open_time": "2020-01-09T18:00:00Z", "nr_forecasters": 265, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 3462, "title": "Will there be a US-Iran war in 2020?", "created_at": "2020-01-07T00:10:35.832356Z", "open_time": "2020-01-09T18:00:00Z", "cp_reveal_time": "2020-01-09T21:13:39.012637Z", "spot_scoring_time": "2020-01-09T21:13:39.012637Z", "scheduled_resolve_time": "2020-12-31T22:27:00Z", "actual_resolve_time": "2020-12-31T22:27:00Z", "resolution_set_time": "2020-12-31T22:27:00Z", "scheduled_close_time": "2020-12-01T08:00:00Z", "actual_close_time": "2020-12-01T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Recent escalation between US and Iran, culminating (so far) in killing of Qasem Soleimani, led to a lot of talk about war between US and Iran being possible.\n\nSo the question is: **will there be a war between the United States of America and Iran in 2020?**\n\nFor the purpose of this questions, the war between US and Iran is defined as:\n\n- US President (whether it is Donald Trump or any other person) requests and receives Congressional authorization for military action (even if the word \"war\" is not used) against Iran\n- There is a significant military action involving US and Iranian military, and lasting longer than 2 days, as described by major news sources. \n\nBoth conditions should be fulfilled for the question to resolve positively. \n\nIsolated ad-hoc military engagements, as well as targeted strikes, terrorist attacks against civilians or attacks against military targets not resulting in prolonged engagement with Congressional authorization, and actions via proxies do not count as \"war\" for the purpose of this question.\n\nIf the Congress does authorize the military action against Iran, the question should be retroactively closed 24 hours before that.", "fine_print": "", "post_id": 3462, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1606799665.753471, "end_time": null, "forecaster_count": 265, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.03 ] } ], "latest": { "start_time": 1606799665.753471, "end_time": null, "forecaster_count": 265, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.03 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.03526100990079605 ], "histogram": [ [ 0.0, 19.831249504613403, 2.077254169725739, 2.129073859996711, 0.58390006189263, 2.3256606266547797, 0.0032092528872134764, 0.5636487221478892, 0.449470828289104, 0.0009441929912776517, 0.0020308034108104858, 0.6679995180850222, 0.588131422693189, 0.0006899988713494745, 0.17931686948678097, 0.0008611102913578079, 0.049190007670441245, 0.0, 0.006106016340097308, 2.9010036511328867e-05, 4.95361707706015e-05, 0.0, 0.9753055733979284, 0.001246821770062179, 0.005584707320798289, 0.0, 0.0, 0.1458277237799702, 0.0030685633583535242, 0.0, 0.4701910282430473, 2.6609082593834504e-05, 0.0, 6.471153969610305e-05, 9.272988593010688e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 7.4545599342556755e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.284973125868515e-05 ] ] }, "score_data": { "peer_score": 11.151430944644948, "coverage": 0.9999964226924064, "baseline_score": 94.64139925685267, "spot_peer_score": -6.200005388691079, "peer_archived_score": 11.151430944644948, "baseline_archived_score": 94.64139925685267, "spot_peer_archived_score": -6.200005388691079 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1606799665.863495, "end_time": null, "forecaster_count": 265, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1606799665.863495, "end_time": null, "forecaster_count": 265, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.992625382703423, 0.007374617296576925 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 35, "user_vote": null }, "forecasts_count": 560, "key_factors": [], "is_current_content_translated": false, "description": "" } ] }