Posts List Oldapi View
We shared this request example with FAB participants: url_qparams = { "limit": count, "offset": offset, "has_group": "false", "order_by": "-activity", "forecast_type": "binary", "project": tournament_id, "status": "open", "type": "forecast", "include_description": "true", } url = f"{api_info.base_url}/questions/" response = requests.get( url, headers={"Authorization": f"Token {api_info.token}"}, params=url_qparams )
But we don't want to support all these parameters, and the ones relevant are: - order_by - status - project - forecast_type - we ignore this, but assume it's binary - FAB only supports binary for now.
GET /api2/questions/?format=api&offset=5940
{ "count": 6412, "next": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5960", "previous": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5920", "results": [ { "id": 968, "title": "Will cost-adjusted IT technology be worse than it was 8 years (32 quarters) ago in at least one quarter prior to 2030?", "short_title": "", "url_title": "", "slug": "will-cost-adjusted-it-technology-be-worse-than-it-was-8-years-32-quarters-ago-in-at-least-one-quarter-prior-to-2030", "author_id": 103733, "author_username": "jzima", "coauthors": [], "created_at": "2018-06-10T18:45:00.944113Z", "published_at": "2018-06-17T07:00:00Z", "edited_at": "2025-09-05T17:28:56.846825Z", "curation_status": "approved", "curation_status_updated_at": "2018-06-17T07:00:00Z", "comment_count": 10, "status": "closed", "resolved": false, "actual_close_time": "2023-01-01T00:00:00Z", "scheduled_close_time": "2023-01-01T00:00:00Z", "scheduled_resolve_time": "2030-04-30T23:00:00Z", "actual_resolve_time": null, "open_time": "2018-06-17T07:00:00Z", "nr_forecasters": 75, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" } ] }, "question": { "id": 968, "title": "Will cost-adjusted IT technology be worse than it was 8 years (32 quarters) ago in at least one quarter prior to 2030?", "created_at": "2018-06-10T18:45:00.944113Z", "open_time": "2018-06-17T07:00:00Z", "cp_reveal_time": "2018-06-19T07:00:00Z", "spot_scoring_time": "2018-06-19T07:00:00Z", "scheduled_resolve_time": "2030-04-30T23:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2023-01-01T00:00:00Z", "actual_close_time": "2023-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "closed", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "We often take the advance of IT technology for granted and even believe it to be progressing at an exponential rate. While Moore's Law has (by some definitions) continued to hold, the data economists have generated when they estimated <a href='https://fred.stlouisfed.org/series/B935RG3Q086SBEA'>the amount of investment required to have equal quality IT equipment over time</a>, reveals that in some sense progress has slowed down. While quality progress was exponential for a long time too, the last approximately ten years have not been all that great. \n\nThe inverse of the linked-to index can be seen as a kind of estimation of the quality of information technology at a given time. (The predictor is encouraged to export the data into excel and look at the evolution of the inverse of the index and the percentage improvement over the last 8 years over time.) \n\nFor example(s), the percentage increase in quality between Q1 1990 and Q1 1998 was 333.5%; the percentage increase in quality between Q1 2000 and Q1 2008 was 172.6%; the percentage increase in quality between Q1 2010 and Q1 2018 was 11.2%. Will the percentage increase in quality over an 8-year time period fall below 0% prior to 2030? Or will progress pick up again to the pace it was at in the 1990s?\n\nIt is asked: <strong>In some quarter prior to Q1 2030, will the <a href='https://fred.stlouisfed.org/series/B935RG3Q086SBEA'>linked-to index</a> have a value greater than the value 32 quarters (8 years) prior to that quarter?</strong>\n\nNotes: \n\n* Should the index reach a higher level than 5 years (20 quarters) ago (prior to question closure) the question should be closed to avoid resolution while the question is open. \n\n* Should this occur anyway, the question should be retroactively closed the day before the last (resolution triggering) data-point was released. Resolution will be through the linked-to index. \n\n* Should the link be discontinued, a reasonable effort should be made to find the same index from another reputable source, yet should the index not be findable, the question shall resolve ambiguous.", "fine_print": "", "post_id": 968, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1672518603.940848, "end_time": null, "forecaster_count": 75, "interval_lower_bounds": [ 0.15 ], "centers": [ 0.22 ], "interval_upper_bounds": [ 0.33 ] } ], "latest": { "start_time": 1672518603.940848, "end_time": null, "forecaster_count": 75, "interval_lower_bounds": [ 0.15 ], "centers": [ 0.22 ], "interval_upper_bounds": [ 0.33 ], "forecast_values": [ 0.78, 0.22 ], "means": [ 0.24106655970656246 ], "histogram": [ [ 0.0, 0.0034816323194887928, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1900906837396931, 0.6609726698816549, 0.9348911780070488, 0.0, 0.0, 0.0, 0.7043736413074261, 1.5319324337295075, 0.7021345863207582, 0.21896889103996162, 0.0, 0.0, 1.6170877719165433, 1.0527743768662927, 2.2999171854397265, 0.4347224488604969, 0.4008021578256057, 0.1477459351545667, 0.023254082934554995, 0.0, 0.2347694085184629, 0.0, 0.0029327142690001094, 0.31744755557447424, 0.0, 0.6720245335714375, 0.0, 1.0313011132449328, 0.25154229098625835, 0.004778496854644452, 0.0, 0.9606036804502769, 0.5575518906582408, 0.0, 0.3082206783863093, 0.0, 0.0, 0.015174815764353458, 0.08480812467236987, 0.05416656778952044, 0.012430225523610781, 0.0, 0.11061301440850231, 0.0, 0.14360454636297643, 0.0, 0.0, 0.0, 0.0, 0.005537830714382468, 0.0, 0.0, 0.04145990502278528, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.028403737957569904, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0007129919161079304, 0.0, 0.06430137558421471, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1672518603.965273, "end_time": null, "forecaster_count": 75, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1672518603.965273, "end_time": null, "forecaster_count": 75, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9213915897255612, 0.07860841027443874 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 13, "user_vote": null }, "forecasts_count": 173, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 962, "title": "Will there be a heavy fuel oil spill of 1,000 gallons or more in the Arctic between 6 June and 8 September 2018?", "short_title": "", "url_title": "", "slug": "will-there-be-a-heavy-fuel-oil-spill-of-1000-gallons-or-more-in-the-arctic-between-6-june-and-8-september-2018", "author_id": 104439, "author_username": "IARPA Question Bot", "coauthors": [], "created_at": "2018-06-06T19:31:55.343171Z", "published_at": "2018-06-06T07:00:00Z", "edited_at": "2025-09-05T17:28:49.120958Z", "curation_status": "approved", "curation_status_updated_at": "2018-06-06T07:00:00Z", "comment_count": 4, "status": "resolved", "resolved": true, "actual_close_time": "2018-09-07T18:01:32Z", "scheduled_close_time": "2018-09-07T18:01:32Z", "scheduled_resolve_time": "2018-09-18T15:08:00Z", "actual_resolve_time": "2018-09-18T15:08:00Z", "open_time": "2018-06-06T07:00:00Z", "nr_forecasters": 90, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 962, "title": "Will there be a heavy fuel oil spill of 1,000 gallons or more in the Arctic between 6 June and 8 September 2018?", "created_at": "2018-06-06T19:31:55.343171Z", "open_time": "2018-06-06T07:00:00Z", "cp_reveal_time": "2018-06-07T11:28:22.394127Z", "spot_scoring_time": "2018-06-07T11:28:22.394127Z", "scheduled_resolve_time": "2018-09-18T15:08:00Z", "actual_resolve_time": "2018-09-18T15:08:00Z", "resolution_set_time": "2018-09-18T15:08:00Z", "scheduled_close_time": "2018-09-07T18:01:32Z", "actual_close_time": "2018-09-07T18:01:32Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "*This question was generated from the [IARPA Global Forecasting Challenge](https://www.iarpa.gov/challenges/gfchallenge.html).*\n\nAs shipping traffic via Arctic routes gradually increases due to less sea ice, international policy efforts are underway to ban heavy fuel oil (HFO) from the Arctic, as it is banned in the Antarctic, before there is a spill (<a href = \"https://www.ctvnews.ca/business/polluting-ship-fuel-to-be-banned-in-arctic-shipping-emissions-to-be-halved-1.3884237\"target=\"_blank\">CTV News</a>,<a href = \"https://www.theicct.org/publications/prevalence-heavy-fuel-oil-and-black-carbon-arctic-shipping-2015-2025\"target=\"_blank\">The ICCT</a>, <a href = \"https://response.restoration.noaa.gov/oil-and-chemical-spills/oil-spills/oil-types.html\"target=\"_blank\">NOAA</a>).", "fine_print": "", "post_id": 962, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1536341042.531611, "end_time": null, "forecaster_count": 90, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.04 ] } ], "latest": { "start_time": 1536341042.531611, "end_time": null, "forecaster_count": 90, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.04 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.045760806041736865 ], "histogram": [ [ 0.0, 11.543545896116393, 1.039324085321633, 0.10588159714711086, 0.6427196800271788, 1.9020406314803586, 0.04993005912383238, 0.0, 0.02191624253265282, 0.0019928216422991796, 0.035841453926795136, 0.0, 0.0, 0.0, 0.0036470026727336412, 0.17382496280948165, 0.0, 0.1441359186257884, 0.048968862998304136, 0.0, 0.311295185864724, 0.0, 0.0, 0.8024416162007773, 0.0, 0.0, 0.09580843479902573, 0.057637412486455035, 0.21268056066278043, 0.0, 0.0, 0.0, 0.0, 0.049484673575065724, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22608754657252977, 0.008069288595311508, 0.0, 0.0, 0.0, 0.0008784573170193555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 4.217817604861764, "coverage": 0.990769707205992, "baseline_score": 84.42112118840657, "spot_peer_score": 9.410305996922197, "peer_archived_score": 4.217817604861764, "baseline_archived_score": 84.42112118840657, "spot_peer_archived_score": 9.410305996922197 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1536313002.782117, "end_time": null, "forecaster_count": 89, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1536313002.782117, "end_time": null, "forecaster_count": 89, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9160678615994252, 0.0839321384005748 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 5, "user_vote": null }, "forecasts_count": 182, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 957, "title": "Will Donald Trump spend any time in jail or prison before June 30, 2028?", "short_title": "Donald Trump Spends Time in Jail or Prison", "url_title": "Donald Trump Spends Time in Jail or Prison", "slug": "donald-trump-spends-time-in-jail-or-prison", "author_id": 100014, "author_username": "converse", "coauthors": [], "created_at": "2018-06-05T18:03:19.997214Z", "published_at": "2018-06-08T07:00:00Z", "edited_at": "2025-09-05T17:29:28.196663Z", "curation_status": "approved", "curation_status_updated_at": "2018-06-08T07:00:00Z", "comment_count": 223, "status": "closed", "resolved": false, "actual_close_time": "2021-06-30T19:00:00Z", "scheduled_close_time": "2021-06-30T19:00:00Z", "scheduled_resolve_time": "2028-06-30T16:22:45.034000Z", "actual_resolve_time": null, "open_time": "2018-06-08T07:00:00Z", "nr_forecasters": 681, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3688, "name": "Law", "slug": "law", "emoji": "⚖️", "description": "Law", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 957, "title": "Will Donald Trump spend any time in jail or prison before June 30, 2028?", "created_at": "2018-06-05T18:03:19.997214Z", "open_time": "2018-06-08T07:00:00Z", "cp_reveal_time": "2018-06-08T12:37:34.769587Z", "spot_scoring_time": "2018-06-08T12:37:34.769587Z", "scheduled_resolve_time": "2028-06-30T16:22:45.034000Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2021-06-30T19:00:00Z", "actual_close_time": "2021-06-30T19:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "closed", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Please take this question not as an expression of partisan blood-lust (\"Lock him up!\"), but as an exercise in conjunctive probabilities.\n\nDonald J. Trump may or may not have committed crimes during his tenure as President, during his campaign, or previously in his life and business career.\n\nFor him to actually serve time as a result of being found guilty, several things would have to be true, with each one roughly dependent on the last.\n\n1) He would have to have committed a crime (on the generous theory that he will not be jailed if this is not the case).\n\n2) He would have to be eligible to be indicted. This means that either \n\na) he is no longer President, or \n\nb) it is decided that a sitting President can be indicted (a matter of legal controversy at the moment)\n\n3) A prosecutor or grand jury would have to decide that he should be indicted.\n\n4) He would have to be found guilty.\n\n5) A judge would have to decide on a sentence that included time in jail.\n\n6) The sentence starts before a pardon occurs.\n\nNumber 5 in particular may be quite a high bar, especially given the potentially incendiary impact of jailing a President or ex-President who won election and enjoyed the continuing support of a large percentage of the American public. The American political/judicial system has shied away from any such outcome in previous scandals. Richard Nixon resigned to avoid impeachment and then was preemptively pardoned; Bill Clinton was impeached by the House without conviction by the Senate, and did not suffer conviction on criminal charges (though he was cited and fined for civil contempt of court, had his law license suspended for five years, and settled a civil lawsuit out of court).", "resolution_criteria": "This question will resolve positively if Donald Trump is incarcerated for any period of time, no matter how brief, before June 30, 2028. It will resolve negatively either on June 30 2028 or upon Trump's death if he has not been so incarcerated\n\nNote that the reason for Trump being jailed need not have any connection to any particular scandal or to Trump's presidency or presidential campaign, nor to the 6 steps listed above. If Trump is held overnight in county lock-up pending arraignment on a charge of reckless driving, then the resolution is positive", "fine_print": "", "post_id": 957, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1625075410.174116, "end_time": null, "forecaster_count": 681, "interval_lower_bounds": [ 0.06 ], "centers": [ 0.11 ], "interval_upper_bounds": [ 0.21 ] } ], "latest": { "start_time": 1625075410.174116, "end_time": null, "forecaster_count": 681, "interval_lower_bounds": [ 0.06 ], "centers": [ 0.11 ], "interval_upper_bounds": [ 0.21 ], "forecast_values": [ 0.89, 0.11 ], "means": [ 0.1607086644515924 ], "histogram": [ [ 0.0, 4.0474515839556275, 0.7275035675507275, 1.0866643598474321, 2.6976315066023244, 3.577359338905465, 0.7050527372210035, 1.1165460105252276, 2.095378826837623, 1.1466480304761921, 5.289475156841316, 3.209499545607012, 1.234269862136298, 0.429418650454718, 0.8309656853299958, 4.641195536260779, 1.5957277968930317, 0.46706656170300537, 0.7949548960004086, 1.0176590051985388, 0.5044926918246954, 3.2289132082318277, 1.229228327232633, 0.18785000831688742, 0.2521980946131278, 0.15242105436666914, 0.6798054714188352, 0.13933718667904582, 0.010367031616792522, 0.0, 0.4471644480496801, 0.14823404547837035, 0.6537819483598992, 1.593483274408761, 0.0, 1.6379314046169746e-08, 0.0, 0.0006024926998343544, 0.008520777608937017, 0.5703871346820866, 1.4523474160026106, 0.0, 0.22860999994753395, 0.00892929017470698, 0.644158263574823, 0.5930047996641594, 5.425202376513036e-07, 0.0, 0.0, 0.0, 3.046490067188578e-07, 0.0, 0.0006884354634634909, 3.363218693281497e-08, 0.0074381397374044395, 0.00018456694412281066, 0.0, 0.0, 0.0, 0.0, 0.0012150656148390284, 0.0009195705878489498, 0.001707211778581978, 1.9126600611062532e-05, 0.0, 1.9758827358913825e-06, 0.10185399611128629, 0.02298302917664907, 0.0, 0.0, 1.7327239607337083e-05, 0.0, 0.0, 0.024405484049729902, 0.0, 0.6379874051810035, 0.0, 0.0, 0.0, 0.0, 0.000670579689811214, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0007858997015064212, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.44836573001358127 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1625075410.174116, "end_time": null, "forecaster_count": 682, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1625075410.174116, "end_time": null, "forecaster_count": 682, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9611872800550294, 0.038812719944970646 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 67, "user_vote": null }, "forecasts_count": 1635, "key_factors": [], "is_current_content_translated": false, "description": "Please take this question not as an expression of partisan blood-lust (\"Lock him up!\"), but as an exercise in conjunctive probabilities.\n\nDonald J. Trump may or may not have committed crimes during his tenure as President, during his campaign, or previously in his life and business career.\n\nFor him to actually serve time as a result of being found guilty, several things would have to be true, with each one roughly dependent on the last.\n\n1) He would have to have committed a crime (on the generous theory that he will not be jailed if this is not the case).\n\n2) He would have to be eligible to be indicted. This means that either \n\na) he is no longer President, or \n\nb) it is decided that a sitting President can be indicted (a matter of legal controversy at the moment)\n\n3) A prosecutor or grand jury would have to decide that he should be indicted.\n\n4) He would have to be found guilty.\n\n5) A judge would have to decide on a sentence that included time in jail.\n\n6) The sentence starts before a pardon occurs.\n\nNumber 5 in particular may be quite a high bar, especially given the potentially incendiary impact of jailing a President or ex-President who won election and enjoyed the continuing support of a large percentage of the American public. The American political/judicial system has shied away from any such outcome in previous scandals. Richard Nixon resigned to avoid impeachment and then was preemptively pardoned; Bill Clinton was impeached by the House without conviction by the Senate, and did not suffer conviction on criminal charges (though he was cited and fined for civil contempt of court, had his law license suspended for five years, and settled a civil lawsuit out of court)." }, { "id": 953, "title": "Will Mike Pence be elected president of the United States in 2024?", "short_title": "Mike Pence Wins 2024 US Pres Election", "url_title": "Mike Pence Wins 2024 US Pres Election", "slug": "mike-pence-wins-2024-us-pres-election", "author_id": 103634, "author_username": "AngraMainyu", "coauthors": [], "created_at": "2018-06-05T07:13:54.319637Z", "published_at": "2020-05-08T06:00:00Z", "edited_at": "2025-09-05T17:28:46.739191Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-08T06:00:00Z", "comment_count": 26, "status": "resolved", "resolved": true, "actual_close_time": "2022-08-02T02:00:00Z", "scheduled_close_time": "2022-08-02T02:00:00Z", "scheduled_resolve_time": "2025-02-01T07:01:00Z", "actual_resolve_time": "2024-11-25T18:35:00Z", "open_time": "2020-05-08T06:00:00Z", "nr_forecasters": 238, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3685, "name": "Elections", "slug": "elections", "emoji": "🗳️", "description": "Elections", "type": "category" } ] }, "question": { "id": 953, "title": "Will Mike Pence be elected president of the United States in 2024?", "created_at": "2018-06-05T07:13:54.319637Z", "open_time": "2020-05-08T06:00:00Z", "cp_reveal_time": "2020-05-08T18:20:05.566185Z", "spot_scoring_time": "2020-05-08T18:20:05.566185Z", "scheduled_resolve_time": "2025-02-01T07:01:00Z", "actual_resolve_time": "2024-11-25T18:35:00Z", "resolution_set_time": "2024-11-25T18:36:12.795181Z", "scheduled_close_time": "2022-08-02T02:00:00Z", "actual_close_time": "2022-08-02T02:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Mike Pence](https://en.wikipedia.org/wiki/Mike_Pence) is the 48th vice president of the United States. He was previously the governor of Indiana and a member of the US House of Representatives. Some have [speculated](https://www.bloomberg.com/news/articles/2020-02-24/mike-pence-s-2024-presidential-campaign-has-already-begun) that Pence will run for president in 2024.\n\n**Will Mike Pence be elected president of the United States in 2024?**\n\nThis question resolves positive if Mike Pence wins the US presidency and is sworn into office by February of 2025.", "fine_print": "", "post_id": 953, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1659402500.294063, "end_time": null, "forecaster_count": 238, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.02 ], "interval_upper_bounds": [ 0.04 ] } ], "latest": { "start_time": 1659402500.294063, "end_time": null, "forecaster_count": 238, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.02 ], "interval_upper_bounds": [ 0.04 ], "forecast_values": [ 0.98, 0.02 ], "means": [ 0.040445410039595946 ], "histogram": [ [ 0.0, 10.499451708570552, 7.099138188378488, 3.2834109663891478, 1.3522341674759684, 3.8486329767962983, 0.4268191474896282, 0.7508159186655863, 0.0002235228852124634, 0.05748172683625138, 0.09809628740966289, 0.005626330336532621, 0.09680797212785618, 0.008937159327821798, 0.013079461361269515, 0.0019613420652836704, 0.0, 0.0, 0.0, 0.3081662712882362, 1.0833407471343184, 0.0, 0.0, 0.000633031184612453, 0.0, 0.0, 0.0, 0.00590930355138076, 0.017866916972253313, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.31841753208212903, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07861836984472609, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0015292517575668835 ] ] }, "score_data": { "baseline_score": 93.85036009869124, "peer_score": 6.568527084179082, "coverage": 0.9997779153584584, "relative_legacy_score": 0.0, "weighted_coverage": 0.9997779153584584, "spot_peer_score": 2.011549866757113, "baseline_archived_score": 93.85036009869124, "peer_archived_score": 6.568527084179082, "relative_legacy_archived_score": 0.0, "spot_peer_archived_score": 2.011549866757113 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1659402500.326376, "end_time": null, "forecaster_count": 238, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1659402500.326376, "end_time": null, "forecaster_count": 238, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.995, 0.005 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 16, "user_vote": null }, "forecasts_count": 457, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 946, "title": "Will Moon Express successfully scout the moon by 2021?", "short_title": "", "url_title": "", "slug": "will-moon-express-successfully-scout-the-moon-by-2021", "author_id": 105658, "author_username": "ferb2", "coauthors": [], "created_at": "2018-06-03T16:03:45.679130Z", "published_at": "2018-06-06T16:03:45Z", "edited_at": "2025-09-05T17:28:58.463465Z", "curation_status": "approved", "curation_status_updated_at": "2018-06-06T16:03:45Z", "comment_count": 11, "status": "resolved", "resolved": true, "actual_close_time": "2019-06-16T03:59:00Z", "scheduled_close_time": "2019-06-16T03:59:00Z", "scheduled_resolve_time": "2021-01-02T11:18:00Z", "actual_resolve_time": "2021-01-02T11:18:00Z", "open_time": "2018-06-06T16:03:45Z", "nr_forecasters": 108, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32593, "name": "2016-2020 Leaderboard", "slug": "2016_2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" } ] }, "question": { "id": 946, "title": "Will Moon Express successfully scout the moon by 2021?", "created_at": "2018-06-03T16:03:45.679130Z", "open_time": "2018-06-06T16:03:45Z", "cp_reveal_time": "2018-06-07T18:36:09.242185Z", "spot_scoring_time": "2018-06-07T18:36:09.242185Z", "scheduled_resolve_time": "2021-01-02T11:18:00Z", "actual_resolve_time": "2021-01-02T11:18:00Z", "resolution_set_time": "2021-01-02T11:18:00Z", "scheduled_close_time": "2019-06-16T03:59:00Z", "actual_close_time": "2019-06-16T03:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Moon Express](http://www.moonexpress.com/about-us/) has released a plan for going to the moon and then mining the moon for resources. \n\nThey have [three missons planned.](http://www.moonexpress.com/expeditions/) The first will be a scout to survey the moon.\nThe second mission they will send up lunar prospecting equipement.\nThe third misson is to have their first sample return expedition \"harvest moon\" set to be in 2021.\n\nThis question concerns the first step:\n\n*** A mission associated with Moon Express successfully reach the moon by start of 2021? ***\n\nQuestion resolves positive if an instrument reaches the lunar surface intact by Jan 1, 2021. The instrument need not function perfectly but should not have been destroyed or majorly damaged upon impact.", "fine_print": "", "post_id": 946, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1560617555.777503, "end_time": null, "forecaster_count": 108, "interval_lower_bounds": [ 0.15 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.33 ] } ], "latest": { "start_time": 1560617555.777503, "end_time": null, "forecaster_count": 108, "interval_lower_bounds": [ 0.15 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.33 ], "forecast_values": [ 0.75, 0.25 ], "means": [ 0.26370205191222973 ], "histogram": [ [ 0.0, 0.11508082653772184, 0.0, 0.0, 0.2626534733870214, 0.03928592011631271, 0.0, 0.8943493913384859, 0.25767355725713653, 0.029110677578428227, 0.4304843361858951, 0.0, 1.2834242779885991, 0.42617175584096656, 0.0, 1.6023476458559143, 0.0, 0.26095512239865903, 0.014583116224150178, 0.041535686961594466, 1.9812925357499767, 0.0, 0.0, 0.20196887916462347, 1.2820520832911857, 1.1142991970930003, 0.0, 1.2915432188552771, 0.310127364008611, 0.0, 0.18583565258080542, 0.012781866717071258, 0.0, 2.668835189552148, 0.972758780552464, 0.008030243059253494, 0.11670488564426329, 0.0, 0.01044791386579032, 0.6974768278440606, 0.06645972874479679, 0.00043222964109477943, 0.0009797596903721127, 0.0, 0.07561843551496727, 0.7461714294025442, 0.0, 0.6498081441811664, 0.11003551809439585, 0.0545307742321855, 0.5043038595239657, 0.0, 0.0007245011671404842, 0.0, 0.0, 0.38353526245884173, 0.0, 0.0, 0.0, 0.0, 0.06381933945814662, 0.0, 0.0, 0.02705252689643328, 0.0, 0.0, 0.0, 0.0, 0.0021341984025259253, 0.004114140404594805, 0.004551470800426355, 0.0, 0.0, 0.0, 0.025119749415892158, 0.0005188593462568196, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05693682081721352 ] ] }, "score_data": { "peer_score": 9.27458719765591, "coverage": 0.9998137249849066, "baseline_score": 48.590851055329765, "spot_peer_score": 8.818325027895716, "peer_archived_score": 9.27458719765591, "baseline_archived_score": 48.590851055329765, "spot_peer_archived_score": 8.818325027895716 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1560617555.800513, "end_time": null, "forecaster_count": 108, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1560617555.800513, "end_time": null, "forecaster_count": 108, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.88698488459166, 0.11301511540833996 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 7, "user_vote": null }, "forecasts_count": 192, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 945, "title": "Will Moon Express have a successful harvest by 2023?", "short_title": "Moon Express Successful Harvest by 2023", "url_title": "Moon Express Successful Harvest by 2023", "slug": "moon-express-successful-harvest-by-2023", "author_id": 105658, "author_username": "ferb2", "coauthors": [], "created_at": "2018-06-03T04:42:33.134168Z", "published_at": "2018-06-06T07:00:00Z", "edited_at": "2025-09-05T17:29:23.641818Z", "curation_status": "approved", "curation_status_updated_at": "2018-06-06T07:00:00Z", "comment_count": 8, "status": "resolved", "resolved": true, "actual_close_time": "2020-06-16T03:59:00Z", "scheduled_close_time": "2020-06-16T03:59:00Z", "scheduled_resolve_time": "2023-01-01T00:00:00Z", "actual_resolve_time": "2023-01-01T00:00:00Z", "open_time": "2018-06-06T07:00:00Z", "nr_forecasters": 138, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" } ] }, "question": { "id": 945, "title": "Will Moon Express have a successful harvest by 2023?", "created_at": "2018-06-03T04:42:33.134168Z", "open_time": "2018-06-06T07:00:00Z", "cp_reveal_time": "2018-06-07T19:05:16.689691Z", "spot_scoring_time": "2018-06-07T19:05:16.689691Z", "scheduled_resolve_time": "2023-01-01T00:00:00Z", "actual_resolve_time": "2023-01-01T00:00:00Z", "resolution_set_time": "2023-01-01T00:00:00Z", "scheduled_close_time": "2020-06-16T03:59:00Z", "actual_close_time": "2020-06-16T03:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Moon Express](http://www.moonexpress.com/about-us/) has released a plan for going to the moon and then mining the moon for resources. \n\nThey have [three missons planned.](http://www.moonexpress.com/expeditions/) The first will be a scout to survey the moon.\nThe second mission they will send up lunar prospecting equipement.\nThe third misson is to have their first sample return expedition \"harvest moon\" set to be in 2021.\n\nThis question concerns the third mission. We'll give them an extra year:\n\n*** Will they bring any lunar material back by start of 2023? ***\n\nQuestion resolves positive if a successful lunar harvest is made by Moon Express by December 30,2022, returning lunar material safely to Earth.", "fine_print": "", "post_id": 945, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1592267853.14916, "end_time": null, "forecaster_count": 138, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.02 ], "interval_upper_bounds": [ 0.05 ] } ], "latest": { "start_time": 1592267853.14916, "end_time": null, "forecaster_count": 138, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.02 ], "interval_upper_bounds": [ 0.05 ], "forecast_values": [ 0.98, 0.02 ], "means": [ 0.04220984873468594 ], "histogram": [ [ 0.0, 10.174875123359234, 2.224320475965375, 2.2643870954161978, 1.1748195725774238, 1.550471286755378, 1.5302803937346412, 0.00015007089673996405, 0.6138103960521406, 0.16938338459105395, 0.9986705916166858, 0.009313667699328693, 0.124959000374467, 0.0, 0.24579444256613914, 0.0, 0.030653201518687168, 0.0, 0.007053143416873747, 0.0, 0.039962380018045154, 0.019151924318019715, 0.020788979790154795, 0.004775694421494681, 0.0, 0.39892819292495396, 0.0, 0.03809948463559229, 0.00018687652294719, 0.0, 0.019731070672936547, 0.017142502437725943, 0.08444744417014308, 0.0430662841878566, 0.09892839311609887, 0.0, 0.002630760422938089, 0.01738154880771244, 0.006977879943650259, 0.009992587649107681, 0.0022723532641139967, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0002527174455538913, 0.0, 0.008073752635453043, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00157132274383118, 0.0, 0.0, 0.0, 0.0020713063958986183, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04286162453764613 ] ] }, "score_data": { "peer_score": 20.502824877881665, "coverage": 0.9993956481142608, "baseline_score": 76.95223598840845, "spot_peer_score": 3.5162914704193042, "peer_archived_score": 20.502824877881665, "baseline_archived_score": 76.95223598840845, "spot_peer_archived_score": 3.5162914704193042 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1592267853.396952, "end_time": null, "forecaster_count": 138, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1592267853.396952, "end_time": null, "forecaster_count": 138, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9670106803290344, 0.032989319670965626 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 12, "user_vote": null }, "forecasts_count": 234, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 941, "title": "By the end of 2019 will a gene drive targeting malaria be initiated?", "short_title": "", "url_title": "", "slug": "by-the-end-of-2019-will-a-gene-drive-targeting-malaria-be-initiated", "author_id": 8, "author_username": "Anthony", "coauthors": [], "created_at": "2018-06-01T17:46:59.246133Z", "published_at": "2018-06-01T18:15:00.234000Z", "edited_at": "2025-09-05T17:29:21.002375Z", "curation_status": "approved", "curation_status_updated_at": "2018-06-01T18:15:00.234000Z", "comment_count": 4, "status": "resolved", "resolved": true, "actual_close_time": "2019-01-01T21:22:44Z", "scheduled_close_time": "2019-01-01T21:22:44Z", "scheduled_resolve_time": "2020-01-12T01:56:00Z", "actual_resolve_time": "2020-01-12T01:56:00Z", "open_time": "2018-06-01T18:15:00.234000Z", "nr_forecasters": 78, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32606, "name": "2018-2019 Leaderboard", "slug": "2018_2019_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" } ] }, "question": { "id": 941, "title": "By the end of 2019 will a gene drive targeting malaria be initiated?", "created_at": "2018-06-01T17:46:59.246133Z", "open_time": "2018-06-01T18:15:00.234000Z", "cp_reveal_time": "2018-06-02T21:43:36.445991Z", "spot_scoring_time": "2018-06-02T21:43:36.445991Z", "scheduled_resolve_time": "2020-01-12T01:56:00Z", "actual_resolve_time": "2020-01-12T01:56:00Z", "resolution_set_time": "2020-01-12T01:56:00Z", "scheduled_close_time": "2019-01-01T21:22:44Z", "actual_close_time": "2019-01-01T21:22:44Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "\"[Gene drives]\" are phenomena in a species' population in which one version of a gene, or allele, is probabilistically favored over other alleles that are otherwise equally favored by fitness. A gene drive in a particular allele shows up as a bias for the corresponding phenotype in the offspring. Consider two parents with different alleles for the same gene; if there exists a gene drive for one allele, it is highly likely that all of the parents' offspring will carry the driven gene's trait. \n\nWith new advances in genetic engineering using CRISPR, it is now much easier to modify an organism's genes. This makes *engineered* gene drives [tractable]: a gene coding for the CRISPR system itself can be encoded near to the gene being \"driven,\" so that if one copy of the driven allele and one \"wild\" allele are inherited, the CRISPR system modifies the wild gene so that the driven gene plus CRISPR system is inherited. This process can spread the driven gene expoentially throughout a population, at a rate far exceeding the spread of a gene that is merely favorable for survival.\n\nUses of this method include the potential to eliminate diseases like malaria or lyme disease that are spread by a fast-reproducing vector, by promoting disease-resistant traits. [Valentino Gantz et. al.] have genetically altered a primary malaria vector native to India, the [Anopheles stephensi] mosquito, to carry and pass on anti-malaria traits. Another [study] published in nature biotechnology offers a more drastic approach that would render female [Anopheles gambiae] mosquitoes, native to Africa, completely infertile, with the intent of wiping out the species in affected ecosystems. Similar studies have investigated [engineering mice] (a prime carrier) to be immune to Lyme disease. \n\nWith Malaria [afflicting hundreds of millions] of people per year, advances in gene drive research have insitgated public conversation about the usefulness, feasibility, and ethics of gene drives is being encouraged before testing them in wild ecosystems. \n\n*** By January 1st, 2020, will a credible reports indicate that a formal submission has been made to a regulatory body proposing to test a malaria-combatting gene drive in a wild population? ***\n\nFor positive resolution, the drive need not targeted for the US or home country of the researchers (indeed this is unlikely), but must be done under the purview of some regulatory body so that in principle parameters and details of the drive are provided and (potentially) approved by some authority. The drive itself need not be initiated to count. The wild population can be isolated (say on an island or even in an enclosure) to control spreading but should aim to replicate natural reproduction etc., and cannot be a laboratory setting.\n\n[afflicting hundreds of millions]:http://www.who.int/features/factfiles/malaria/en/\n[tractable]:http://www.nature.com/news/mosquitoes-engineered-to-pass-down-genes-that-would-wipe-out-their-species-1.18974\n[Gene drives]: http://wyss.harvard.edu/staticfiles/newsroom/pressreleases/Gene%20drives%20FAQ%20FINAL.pdf\n[Valentino Gantz et. al.]: http://www.pnas.org/content/112/49/E6736.abstract\n[study]: http://www.nature.com/nbt/journal/vaop/ncurrent/full/nbt.3439.html\n[Anopheles gambiae]: https://en.wikipedia.org/wiki/Anopheles_gambiae\n[Anopheles stephensi]: https://en.wikipedia.org/wiki/Anopheles_stephensi\n[engineering mice]:http://www.newyorker.com/magazine/2015/11/16/the-gene-hackers\"", "fine_print": "", "post_id": 941, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1546351271.878993, "end_time": null, "forecaster_count": 78, "interval_lower_bounds": [ 0.12 ], "centers": [ 0.2 ], "interval_upper_bounds": [ 0.37 ] } ], "latest": { "start_time": 1546351271.878993, "end_time": null, "forecaster_count": 78, "interval_lower_bounds": [ 0.12 ], "centers": [ 0.2 ], "interval_upper_bounds": [ 0.37 ], "forecast_values": [ 0.8, 0.2 ], "means": [ 0.23431322390327305 ], "histogram": [ [ 0.0, 0.9174087627868593, 0.2964170805706633, 0.138607890209693, 0.0, 0.7499437156931522, 0.0, 0.0, 0.0, 0.0, 1.2799036663690257, 0.0, 1.008778261244173, 0.0, 0.6280342341719295, 1.03859854627972, 0.4632431883235791, 0.0, 0.8922909885556951, 0.0, 2.0862504626854053, 0.0, 0.014275861551769443, 0.6510495644239456, 0.0, 0.2179241786726942, 0.0, 0.0, 0.17192566867158657, 0.0, 0.03657847736268022, 0.0, 0.12053584487345684, 0.6106438359628994, 0.058852749695106496, 0.023748298399676564, 0.707173321785911, 0.3600504240909274, 0.4352821374375108, 0.0, 1.031668596151889, 0.0, 0.6609105296956431, 0.19776846219557181, 0.0, 0.14217546802542774, 0.0024705024163893547, 0.2596434091051626, 0.4927652120469003, 0.002932907971702584, 0.0020882034224388763, 0.001078956545637215, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3376335396423815, 0.0, 0.0, 0.0, 0.04179800713285482, 0.005373855860101914, 0.0, 0.006157382690986979, 0.0, 0.0, 0.0, 0.0, 0.021671421535911298, 0.0, 0.0, 0.0, 0.0, 0.0006006204798844153, 0.0, 0.0, 0.0, 0.054167126145610874, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 19.853679557751455, "coverage": 0.9998728188839875, "baseline_score": 46.69994039023381, "spot_peer_score": 28.21131540668908, "peer_archived_score": 19.853679557751455, "baseline_archived_score": 46.69994039023381, "spot_peer_archived_score": 28.21131540668908 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1546337557.470128, "end_time": null, "forecaster_count": 77, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1546337557.470128, "end_time": null, "forecaster_count": 77, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8964583692037316, 0.1035416307962684 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 8, "user_vote": null }, "forecasts_count": 138, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 940, "title": "Will there be a locally-transmitted case of the Zika virus in Ukraine, Russia, Georgia or Armenia between 1 June 2018 and 31 August 2018?", "short_title": "", "url_title": "", "slug": "will-there-be-a-locally-transmitted-case-of-the-zika-virus-in-ukraine-russia-georgia-or-armenia-between-1-june-2018-and-31-august-2018", "author_id": 104439, "author_username": "IARPA Question Bot", "coauthors": [], "created_at": "2018-05-30T19:31:04.636090Z", "published_at": "2018-06-01T07:00:00Z", "edited_at": "2025-09-05T17:29:02.625394Z", "curation_status": "approved", "curation_status_updated_at": "2018-06-01T07:00:00Z", "comment_count": 3, "status": "resolved", "resolved": true, "actual_close_time": "2018-08-31T18:01:02Z", "scheduled_close_time": "2018-08-31T18:01:02Z", "scheduled_resolve_time": "2018-09-04T22:46:00Z", "actual_resolve_time": "2018-09-04T22:46:00Z", "open_time": "2018-06-01T07:00:00Z", "nr_forecasters": 86, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15865, "name": "Health & Pandemics", "slug": "biosecurity", "emoji": "🧬", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3691, "name": "Health & Pandemics", "slug": "health-pandemics", "emoji": "🦠", "description": "Health & Pandemics", "type": "category" } ] }, "question": { "id": 940, "title": "Will there be a locally-transmitted case of the Zika virus in Ukraine, Russia, Georgia or Armenia between 1 June 2018 and 31 August 2018?", "created_at": "2018-05-30T19:31:04.636090Z", "open_time": "2018-06-01T07:00:00Z", "cp_reveal_time": "2018-06-02T12:27:08.752567Z", "spot_scoring_time": "2018-06-02T12:27:08.752567Z", "scheduled_resolve_time": "2018-09-04T22:46:00Z", "actual_resolve_time": "2018-09-04T22:46:00Z", "resolution_set_time": "2018-09-04T22:46:00Z", "scheduled_close_time": "2018-08-31T18:01:02Z", "actual_close_time": "2018-08-31T18:01:02Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "*This question was generated from the [IARPA Global Forecasting Challenge](https://www.iarpa.gov/challenges/gfchallenge.html).*\n\nThe disease must have been contracted in the country of interest for this question to resolve as \"yes.\" Imported cases (e.g., where the patient traveled to an endemic country, contracted the disease, and brought it home) do not meet the resolution criteria, though a locally-transmitted case which originates from an imported case does. If there is no indication of whether a case was locally-transmitted, this question will resolve as “no.”", "fine_print": "", "post_id": 940, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1535707706.210295, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.05 ] } ], "latest": { "start_time": 1535707706.210295, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.05 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.05053113067980771 ], "histogram": [ [ 0.0, 9.568244669214598, 2.416493282962981, 0.0, 0.5152384099801852, 1.282095441283968, 0.26279932462319805, 0.6101565097522024, 0.013931281558975482, 0.010222105112996144, 0.9329181384243223, 0.0, 0.004513713150276017, 0.32712438146800016, 0.010336654008195868, 0.012631646953360397, 0.0, 0.0, 0.0, 0.0, 0.00025516032229147665, 0.0, 0.012592687257094694, 0.0, 0.21926230857741674, 0.0, 0.0, 0.001322981755186089, 0.020476985370305, 0.0, 0.10482441490490084, 0.02245161071843299, 0.0, 0.0, 0.003958218974807764, 0.0, 0.0041758202345270855, 0.0, 0.0, 0.0, 0.0, 0.0010872227688318024, 0.0, 0.0, 0.0, 0.357933793421635, 0.0, 0.0, 0.0, 0.0, 0.3368000919432843, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 8.94245785550672, "coverage": 0.9989645870599309, "baseline_score": 89.29124442636892, "spot_peer_score": 14.775104730490801, "peer_archived_score": 8.94245785550672, "baseline_archived_score": 89.29124442636892, "spot_peer_archived_score": 14.775104730490801 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1535381740.740285, "end_time": null, "forecaster_count": 84, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1535381740.740285, "end_time": null, "forecaster_count": 84, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8943971968231679, 0.10560280317683214 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 179, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 934, "title": "Will a MLB pitcher throw a perfect game in 2018?", "short_title": "", "url_title": "", "slug": "will-a-mlb-pitcher-throw-a-perfect-game-in-2018", "author_id": 104272, "author_username": "AdamKosloff", "coauthors": [], "created_at": "2018-05-25T17:26:24.466387Z", "published_at": "2018-05-28T07:00:00Z", "edited_at": "2025-09-05T17:28:59.095551Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-28T07:00:00Z", "comment_count": 5, "status": "resolved", "resolved": true, "actual_close_time": "2018-10-15T07:00:00Z", "scheduled_close_time": "2018-10-15T07:00:00Z", "scheduled_resolve_time": "2018-12-20T14:16:00Z", "actual_resolve_time": "2018-12-20T14:16:00Z", "open_time": "2018-05-28T07:00:00Z", "nr_forecasters": 80, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 934, "title": "Will a MLB pitcher throw a perfect game in 2018?", "created_at": "2018-05-25T17:26:24.466387Z", "open_time": "2018-05-28T07:00:00Z", "cp_reveal_time": "2018-05-30T07:00:00Z", "spot_scoring_time": "2018-05-30T07:00:00Z", "scheduled_resolve_time": "2018-12-20T14:16:00Z", "actual_resolve_time": "2018-12-20T14:16:00Z", "resolution_set_time": "2018-12-20T14:16:00Z", "scheduled_close_time": "2018-10-15T07:00:00Z", "actual_close_time": "2018-10-15T07:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "In Major League Baseball history, there have been just [299 no hitters](https://en.wikipedia.org/wiki/List_of_Major_League_Baseball_no-hitters) ever thrown. That averages to approximately 2 a season. The [last one](https://www.usatoday.com/story/sports/mlb/2018/04/21/athletics-sean-manaea-no-hitter-red-sox/539896002/) was pitched by Sean Manaea of the Oakland A's on April 21, 2018. \n\nStatistically speaking, we should expect another no hitter by the end of the 2018 season.\n\nBut perfect games are a whole different animal. To qualify for this honor, a team must put away every single batter through nine innings. No walks, men on base due to errors or hit batsmen allowed.\n\nTo date, the sport has witnesses only a vanishingly small number of perfect games. As MLB.com [colorfully explains](https://www.mlb.com/news/perfect-game/c-265862286): \n\n> There have been just 23 such outings among the hundreds of thousands of big league games played since Rutherford B. Hayes was in the White House.\n\nThat said, there *have* been 6 perfect games since 2009, with [3 of them](https://www.opposingviews.com/sports/3-explanations-3-perfect-games-2012) coming in 2012 alone! \n\nAnd there's plenty of awesome pitching talent playing in 2018, including [Clayton Kershaw](http://www.espn.com/mlb/player/_/id/28963/clayton-kershaw), [Corey Kluber](http://m.mlb.com/player/446372/corey-kluber), and [Max Scherzer](http://m.mlb.com/player/453286/max-scherzer).\n\nFurthermore, a 538 analysis suggests that baseball's becoming a more pitcher-friendly game. The fans might not love a slower pace, but this trend should increase our chances of seeing a perfect game soon. [538 explains](https://fivethirtyeight.com/features/pitchers-are-slowing-down-to-speed-up/):\n\n> The average delay between pitches has jumped a full second. It’s all part of a decadelong trend toward more sluggish play, and there’s an alarming reason baseball’s pace problem is likely to get even worse going forward: Slowing down helps pitchers throw faster.\n\nQuestion resolves positive if a MLB pitcher throws a perfect game in 2018 (either in the regular season or in the post season). Question closes retroactively at 1 hour before the start of a game in which this occurs, if it does.", "fine_print": "", "post_id": 934, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1539544058.310686, "end_time": null, "forecaster_count": 80, "interval_lower_bounds": [ 0.03 ], "centers": [ 0.06 ], "interval_upper_bounds": [ 0.1 ] } ], "latest": { "start_time": 1539544058.310686, "end_time": null, "forecaster_count": 80, "interval_lower_bounds": [ 0.03 ], "centers": [ 0.06 ], "interval_upper_bounds": [ 0.1 ], "forecast_values": [ 0.94, 0.06 ], "means": [ 0.08772016891753137 ], "histogram": [ [ 0.0, 3.072989001936567, 0.9454658663642979, 1.1206343378997072, 0.767568492964438, 2.193070535140525, 0.7101226959091163, 0.0, 0.07282349883545862, 0.17672342147353706, 3.536506278441596, 0.0, 0.3017051033688969, 0.0, 0.10687792566038573, 0.893587478567622, 0.07877593890689805, 0.0, 0.5285340505500855, 0.02591925143156949, 0.5002186663947418, 0.01936531092725104, 0.0, 0.0, 0.0, 0.21959992837324233, 0.010199944840930094, 0.0, 0.7582553513378513, 0.0, 0.02356201288525273, 0.01948027544888472, 0.004168619742933615, 0.08513379411538498, 0.0, 0.13123555048976673, 0.0, 0.04445312502789519, 0.012756731128426116, 0.0, 0.001839023418757394, 0.015789022061289532, 0.007124099762028338, 0.0, 0.0026208098393045902, 0.003696301342608286, 0.0, 0.0, 0.0, 0.0, 0.00035468804191158816, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0022076100250565805, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 5.285350549359983, "coverage": 0.9998098795887822, "baseline_score": 45.35956702013866, "spot_peer_score": -4.653254555515307, "peer_archived_score": 5.285350549359983, "baseline_archived_score": 45.35956702013866, "spot_peer_archived_score": -4.653254555515307 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1539527540.105033, "end_time": null, "forecaster_count": 80, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1539527540.105033, "end_time": null, "forecaster_count": 80, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8957528226537704, 0.1042471773462296 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 193, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 933, "title": "Will Prince Harry and Meghan Markle announce a pregnancy by year's end?", "short_title": "", "url_title": "", "slug": "will-prince-harry-and-meghan-markle-announce-a-pregnancy-by-years-end", "author_id": 104272, "author_username": "AdamKosloff", "coauthors": [], "created_at": "2018-05-25T17:23:36.269008Z", "published_at": "2018-05-28T07:00:00Z", "edited_at": "2025-09-05T17:29:26.832928Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-28T07:00:00Z", "comment_count": 4, "status": "resolved", "resolved": true, "actual_close_time": "2018-08-20T07:00:00Z", "scheduled_close_time": "2018-08-20T07:00:00Z", "scheduled_resolve_time": "2018-10-15T12:45:00Z", "actual_resolve_time": "2018-10-15T12:45:00Z", "open_time": "2018-05-28T07:00:00Z", "nr_forecasters": 105, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 933, "title": "Will Prince Harry and Meghan Markle announce a pregnancy by year's end?", "created_at": "2018-05-25T17:23:36.269008Z", "open_time": "2018-05-28T07:00:00Z", "cp_reveal_time": "2018-05-29T02:24:25.127209Z", "spot_scoring_time": "2018-05-29T02:24:25.127209Z", "scheduled_resolve_time": "2018-10-15T12:45:00Z", "actual_resolve_time": "2018-10-15T12:45:00Z", "resolution_set_time": "2018-10-15T12:45:00Z", "scheduled_close_time": "2018-08-20T07:00:00Z", "actual_close_time": "2018-08-20T07:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "The royal wedding of Prince Harry and Meghan Markle captivated a television audience of over [18 million people](http://deadline.com/2018/05/royal-wedding-ratings-18m-watch-prince-harry-meghan-markles-big-day-in-the-uk-1202394873/).\n\nThe affair offered symbolic power as well. The New York Times, for instance, celebrated its [\"Bicultural Blackness\"](https://www.nytimes.com/2018/05/20/arts/television/meghan-markle-royal-wedding-blackness.html), while [Time Magazine](http://time.com/5282658/royal-wedding-stand-by-me-meaning/) tuned into the couple's special song, Stand By Me, noting that: \n\n> For many, [the song's] lyrics also pack a deep political message. When [it] first rose to popularity during the civil rights movement, it was used as a rallying cry for solidarity amongst people of color.\n\nNow that the nuptials are over, though, the chatter amongst commoners has turned to questions of parenthood. The UK tabloid, Express, [summarized the situation](https://www.express.co.uk/news/royal/962962/meghan-markle-pregnant-royal-baby-latest-odds-prince-harry-royal-wedding-2018): \n\n> The odds of Meghan and Harry having a child this year have been slashed by bookmakers Paddy Power. Standing at 20/1 yesterday, they have slid to 4/1 today suggesting there is a chance Meghan may indeed have been pregnant as she walked down the aisle.\n\nExpress also notes that \"the Duke and Duchess of Cambridge waited 20 months after their wedding before announcing Kate’s pregnancy.\" \n\nWhat do you think? Question resolves positive if the new royal couple annouces Meghan's pregancy before January 1, 2019.", "fine_print": "", "post_id": 933, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1534699862.033651, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.26 ], "centers": [ 0.33 ], "interval_upper_bounds": [ 0.43 ] } ], "latest": { "start_time": 1534699862.033651, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.26 ], "centers": [ 0.33 ], "interval_upper_bounds": [ 0.43 ], "forecast_values": [ 0.6699999999999999, 0.33 ], "means": [ 0.3436205850941255 ], "histogram": [ [ 0.0, 0.8859885549970374, 0.0, 0.0, 0.2873797503305369, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6715995900657319, 0.0, 0.0, 0.0, 0.001305199350988809, 0.37772775166345485, 0.0, 1.3832392517849939, 0.0, 0.007544773957948514, 0.4413526996673396, 0.2511174648542843, 0.4123259485110589, 0.22946921943910759, 0.39868975821592434, 0.8242880119659365, 0.0, 1.0344406971318942, 0.0, 0.7811791531905958, 2.2815236301612565, 0.9065952309411648, 0.0, 0.16199242660606605, 0.7064832190648435, 0.03619814025345402, 0.33589982042854505, 0.31425722678474505, 0.0, 0.742987249762039, 1.1092353036691658, 0.17312069286410492, 1.1181477753977647, 0.17148074387356155, 0.0, 0.0, 0.0006000343579045155, 0.6690066986157793, 0.003895398651203332, 0.0, 0.0, 0.0, 1.3922212456158691, 0.006404218640100707, 0.0, 0.0, 0.0, 0.00014587839364207648, 0.0, 0.0, 0.0, 0.0, 0.0031144908151636737, 0.0, 0.012082479255944946, 0.0, 0.0, 0.0003318316524916961, 0.09320602700481646, 0.5192926305609384, 0.004757793444040947, 0.0, 0.2185412888666926, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00041077660837210574, 0.0182799076020529, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010151878502634997 ] ] }, "score_data": { "peer_score": -0.5337234152834602, "coverage": 0.9994342464959572, "baseline_score": -34.31243559065102, "spot_peer_score": 27.498290697980547, "peer_archived_score": -0.5337234152834602, "baseline_archived_score": -34.31243559065102, "spot_peer_archived_score": 27.498290697980547 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1533815711.806114, "end_time": null, "forecaster_count": 101, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1533815711.806114, "end_time": null, "forecaster_count": 101, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8082742272851409, 0.19172577271485908 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": -8, "user_vote": null }, "forecasts_count": 165, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 932, "title": "Will bitcoin either boom or bust (but not just boringly wander up or down) during 2018?", "short_title": "", "url_title": "", "slug": "will-bitcoin-either-boom-or-bust-but-not-just-boringly-wander-up-or-down-during-2018", "author_id": 104272, "author_username": "AdamKosloff", "coauthors": [], "created_at": "2018-05-25T13:54:17.237033Z", "published_at": "2018-05-28T07:00:00Z", "edited_at": "2025-09-05T17:29:25.595788Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-28T07:00:00Z", "comment_count": 21, "status": "resolved", "resolved": true, "actual_close_time": "2018-08-30T07:00:00Z", "scheduled_close_time": "2018-08-30T07:00:00Z", "scheduled_resolve_time": "2018-11-25T00:11:00Z", "actual_resolve_time": "2018-11-25T00:11:00Z", "open_time": "2018-05-28T07:00:00Z", "nr_forecasters": 149, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3693, "name": "Cryptocurrencies", "slug": "cryptocurrencies", "emoji": "💰", "description": "Cryptocurrencies", "type": "category" } ] }, "question": { "id": 932, "title": "Will bitcoin either boom or bust (but not just boringly wander up or down) during 2018?", "created_at": "2018-05-25T13:54:17.237033Z", "open_time": "2018-05-28T07:00:00Z", "cp_reveal_time": "2018-05-28T20:24:55.630084Z", "spot_scoring_time": "2018-05-28T20:24:55.630084Z", "scheduled_resolve_time": "2018-11-25T00:11:00Z", "actual_resolve_time": "2018-11-25T00:11:00Z", "resolution_set_time": "2018-11-25T00:11:00Z", "scheduled_close_time": "2018-08-30T07:00:00Z", "actual_close_time": "2018-08-30T07:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "The world's best-known cryptocurrency took the world by storm in 2017, peaking at nearly $20,000 in December, 2017. \n\n2018 has been less kind to bitcoin and to cryptocurrencies in general. As of May 22, bitcoin is floating around $8,200. Other \"Altcoins\" like Ethereum and Ripple have been [similarly hammered](https://www.forbes.com/sites/outofasia/2018/03/05/explaining-the-dip-what-caused-the-recent-cryptocurrency-downturn/#550c267c5ccd) this year.\n\nIn discussing \"what's next\" for cryptos – and bitcoin, specifically – much of the chatter has pitted bulls versus bears. \n\nFor instance, on the bullish side... \n\n* Tom Lee of Fundstrat Global Advisors predicts [$25,000](https://www.ccn.com/25000-in-2018-bitcoin-bull-tom-lee-sticks-to-strong-forecast-despite-failed-prediction/) for bitcoin in 2018.\n\n* Tim Draper argues for a [$250,000](https://investorplace.com/2018/04/bitcoin-bulls-four-investors-bold-predictions/) bitcoin price by 2022.\n\n* John McAfee, who represents an extreme end of the spectrum, says bitcoin will [reach $1M](https://www.investopedia.com/news/mcafee-tracker-predicts-1-bitcoin1m-2020/) by 2020.\n\nThe bears, meanwhile, see doom ahead:\n\n* Joe Davis of Vanguard thinks that there's a substantial chance bitcoin will [fall to zero](http://www.etf.com/sections/etf-industry-perspective/vanguard-bitcoin-presents-quandary).\n\n* Warren Buffett just called the cryptocurrency [\"rat poison\"](http://money.cnn.com/2018/05/07/investing/warren-buffett-bitcoin/index.html) and Buffett's friend, Charlie Munger, went even further, saying that cryptos are \"just dementia.\"\n\n* Journalist Hamilton Nolan outlines the bear's case with acid humor [here](https://splinternews.com/all-this-bitcoin-stuff-is-fake-1797714696).\n\nBut the purpose of *this* question is not to choose between the bears and the bulls. It's to choose between the moderates and the extremists. \n\nFor this question to resolve positive (i.e. the extremists win):\n\n* Bitcoin must rocket to the moon--defined as 2X-ing its May 22 price to at least touch $16,400 once by year's end;\n\n* OR bitcoin must plummet to the bottom of the ocean--defined as 1/2-xing its May 22 price to at least touch $4,100 once by year's end.", "fine_print": "", "post_id": 932, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1535590007.633999, "end_time": null, "forecaster_count": 149, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.35 ], "interval_upper_bounds": [ 0.49 ] } ], "latest": { "start_time": 1535590007.633999, "end_time": null, "forecaster_count": 149, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.35 ], "interval_upper_bounds": [ 0.49 ], "forecast_values": [ 0.65, 0.35 ], "means": [ 0.3759966102742672 ], "histogram": [ [ 0.0, 0.2836525064914626, 0.0, 0.8838131193539324, 0.0, 0.0, 0.0, 0.0, 0.0004885933933727166, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2608590320824648, 0.32758126760720613, 0.0, 0.00010037919274766585, 0.0342251423215965, 0.7317999420897705, 0.024944815070536832, 0.44717674280557984, 0.509669388629438, 0.37495587717637013, 3.052237948338667, 0.34374666976972135, 0.015854564049884626, 0.0, 0.0, 1.2024904447749836, 0.00018392098162773335, 0.5724254261140374, 1.735719439319393, 0.2992261563539556, 1.4097257792098035, 0.964937558197935, 0.6054061614174733, 0.027208377720026298, 0.0, 0.00842755397448133, 0.07708050724165688, 0.00352062379201601, 0.30387404925608663, 0.6985347673876986, 0.5324198952065967, 0.4733217743329031, 0.3642432070959603, 0.08117110857126116, 0.8532926765213028, 2.1169264045856186, 0.016864959513436837, 0.015150212661143261, 0.0001377691877834185, 0.0009927288490396851, 0.5031166381261467, 0.48892258614504247, 0.02722861062909397, 0.0, 0.09344719006778415, 0.4510284405927646, 0.0, 0.0054805182258429634, 0.002190132826910873, 0.0037979036519217007, 0.05263149772289673, 2.055632140530972e-05, 0.0, 8.455329680677371e-05, 0.0, 0.043074857690829464, 0.0, 0.0063131079052799735, 0.008725658383234459, 0.0, 0.4135974927051904, 0.0, 0.0, 0.0, 0.02149456104736915, 0.926376603839836, 0.0, 0.0, 0.0, 0.0, 0.002376463994267002, 0.0, 0.05324043471794, 0.0, 0.0, 0.04947035724207705, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10952519652217849 ] ] }, "score_data": { "peer_score": 8.10572975962893, "coverage": 0.9997271042459529, "baseline_score": 5.435850187747621, "spot_peer_score": 11.787665040466461, "peer_archived_score": 8.10572975962893, "baseline_archived_score": 5.435850187747621, "spot_peer_archived_score": 11.787665040466461 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1535538126.542774, "end_time": null, "forecaster_count": 147, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1535538126.542774, "end_time": null, "forecaster_count": 147, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7338419196460298, 0.26615808035397015 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 12, "user_vote": null }, "forecasts_count": 268, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 931, "title": "Will Donald Trump face a significant primary challenge in 2020?", "short_title": "", "url_title": "", "slug": "will-donald-trump-face-a-significant-primary-challenge-in-2020", "author_id": 100638, "author_username": "James", "coauthors": [], "created_at": "2018-05-25T13:19:40.773266Z", "published_at": "2018-05-28T07:00:00Z", "edited_at": "2025-09-05T17:29:12.294453Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-28T07:00:00Z", "comment_count": 42, "status": "resolved", "resolved": true, "actual_close_time": "2019-11-03T00:00:00Z", "scheduled_close_time": "2019-11-03T00:00:00Z", "scheduled_resolve_time": "2020-08-17T00:11:00Z", "actual_resolve_time": "2020-08-17T00:11:00Z", "open_time": "2018-05-28T07:00:00Z", "nr_forecasters": 233, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32593, "name": "2016-2020 Leaderboard", "slug": "2016_2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3685, "name": "Elections", "slug": "elections", "emoji": "🗳️", "description": "Elections", "type": "category" } ] }, "question": { "id": 931, "title": "Will Donald Trump face a significant primary challenge in 2020?", "created_at": "2018-05-25T13:19:40.773266Z", "open_time": "2018-05-28T07:00:00Z", "cp_reveal_time": "2018-05-28T19:24:47.171194Z", "spot_scoring_time": "2018-05-28T19:24:47.171194Z", "scheduled_resolve_time": "2020-08-17T00:11:00Z", "actual_resolve_time": "2020-08-17T00:11:00Z", "resolution_set_time": "2020-08-17T00:11:00Z", "scheduled_close_time": "2019-11-03T00:00:00Z", "actual_close_time": "2019-11-03T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "It is usual for incumbent first-term presidents to receive their party's nomination for a second term, with Jimmy Carter in 1980 the last sitting president to lose at least one primary contest. However, in many ways, Donald Trump is an unusual president.\n\nThis question therefore asks whether Donald Trump will face a serious challenge for the Republican Party's nomination in 2020.\n\nThe question will resolve positively if a candidate other than Trump wins a primary or caucus election in any state or territory that provides bound delegates at the Republican national convention. This also includes the case where Trump finishes his first term but does not run for a second term, or drops out during the race. For clarity, the last sitting president who would have resolved this question positively would have been Carter, and Ford's '76 campaign would have also resolved positively.\n\nThe question resolves negatively if Trump successfully wins all of the Republican primary elections, or if he is the only candidate running. The re-election campaigns of Obama, Bush, Clinton, Bush and Reagan would all have resolved negatively.\n\nShould Trump never make it to 2020 (eg. he resigns, is impeached or dies before first primary) the resolution is ambiguous.", "fine_print": "", "post_id": 931, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1572738368.611758, "end_time": null, "forecaster_count": 233, "interval_lower_bounds": [ 0.09 ], "centers": [ 0.15 ], "interval_upper_bounds": [ 0.25 ] } ], "latest": { "start_time": 1572738368.611758, "end_time": null, "forecaster_count": 233, "interval_lower_bounds": [ 0.09 ], "centers": [ 0.15 ], "interval_upper_bounds": [ 0.25 ], "forecast_values": [ 0.85, 0.15 ], "means": [ 0.19678841027206223 ], "histogram": [ [ 0.0, 2.552780366222904, 0.0027850039067339057, 0.5900377740694907, 0.5296183192938407, 1.5572235228712235, 0.0, 0.7222986599345538, 0.8946385886297479, 0.4263128489574565, 4.386074991338392, 0.012284719053897566, 0.16972089941762167, 0.7937068090104388, 1.1254157879645421, 3.0834224260667016, 0.902050197267243, 0.09609242327124365, 0.6733844995327986, 0.16326455326514994, 1.47262195949716, 0.000446307185097794, 0.0, 0.0, 0.2137327435704413, 1.669402398883292, 0.02725730466256806, 0.13060458348599316, 0.19158053507417896, 0.0, 0.5759233381630403, 0.13214548293839945, 0.17770403925759523, 0.7885179325949877, 0.2926819732876292, 0.5427138850048262, 0.2536262548919775, 0.0, 0.0016083014511967205, 0.02299274618139252, 0.1809741893826372, 0.7020814067912866, 0.0, 0.02228771400185276, 0.0, 0.41426568040752504, 0.0, 0.0, 0.0, 0.8766989903362322, 0.8677229826851123, 3.481511822944615e-05, 0.046991811928327964, 0.002501864495247859, 0.0, 0.0002091753670170887, 0.001499123224609609, 5.123064266767588e-05, 0.0, 0.006621808516665446, 0.02009244918529345, 0.0731511901393905, 9.902927517487927e-06, 0.0, 2.7200840437653547e-06, 0.004641746652334586, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.700113406186428e-06, 1.7352873781265335e-06, 0.0, 0.00041758487165749676, 0.0, 4.717000147115696e-06, 0.00029666437936815704, 0.0, 0.0, 0.00517281876963899, 0.0005089342245558604, 0.0, 0.0, 0.0, 0.0, 6.79818313825031e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6277025096335064, 0.0027113476747572572 ] ] }, "score_data": { "peer_score": 37.201611305058236, "coverage": 0.9999519931078857, "baseline_score": 36.67984135447814, "spot_peer_score": -9.748050165935945, "peer_archived_score": 37.201611305058236, "baseline_archived_score": 36.67984135447814, "spot_peer_archived_score": -9.748050165935945 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1572676717.788656, "end_time": null, "forecaster_count": 231, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1572676717.788656, "end_time": null, "forecaster_count": 231, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7537064890683668, 0.24629351093163324 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 23, "user_vote": null }, "forecasts_count": 541, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 930, "title": "Will the winner of the 2020 US Presidential election have been elected to any *other* previous significant political office?", "short_title": "", "url_title": "", "slug": "will-the-winner-of-the-2020-us-presidential-election-have-been-elected-to-any-other-previous-significant-political-office", "author_id": 100638, "author_username": "James", "coauthors": [], "created_at": "2018-05-24T15:52:46.036261Z", "published_at": "2018-05-27T07:00:00Z", "edited_at": "2025-09-05T17:29:20.981013Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-27T07:00:00Z", "comment_count": 25, "status": "resolved", "resolved": true, "actual_close_time": "2020-01-01T00:00:00Z", "scheduled_close_time": "2020-01-01T00:00:00Z", "scheduled_resolve_time": "2020-12-31T11:54:00Z", "actual_resolve_time": "2020-12-31T11:54:00Z", "open_time": "2018-05-27T07:00:00Z", "nr_forecasters": 174, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32593, "name": "2016-2020 Leaderboard", "slug": "2016_2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3685, "name": "Elections", "slug": "elections", "emoji": "🗳️", "description": "Elections", "type": "category" } ] }, "question": { "id": 930, "title": "Will the winner of the 2020 US Presidential election have been elected to any *other* previous significant political office?", "created_at": "2018-05-24T15:52:46.036261Z", "open_time": "2018-05-27T07:00:00Z", "cp_reveal_time": "2018-05-27T16:29:15.376181Z", "spot_scoring_time": "2018-05-27T16:29:15.376181Z", "scheduled_resolve_time": "2020-12-31T11:54:00Z", "actual_resolve_time": "2020-12-31T11:54:00Z", "resolution_set_time": "2020-12-31T11:54:00Z", "scheduled_close_time": "2020-01-01T00:00:00Z", "actual_close_time": "2020-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Donald Trump won the 2016 election, to the surprise of many (including many here on Metaculus), having never been elected to a political office before. This is rare amongst US presidents, with the last such president being Eisenhower, who had been a military leader instead.\n\nBut perhaps the age of the politician president is over and the age of the celebrity president has begun?\n\nThis question asks whether the winner of the 2020 election will have been elected to any previous significant political office.\n\nPositive resolution occurs if the winner of the 2020 election has been previously elected to the House of Representatives, the Senate, the Governorship of any state or the Vice Presidency (but not the Presidency itself, since Trump first appeared in the celebrity rather than politician category). Examples in this category include Mike Pence, Paul Ryan, Bernie Sanders, Elizabeth Warren and others.\n\nNegative resolution occurs if the winner is has never been elected to such an office. Examples in this category include a Trump re-election, Oprah Winfrey, Michelle Obama, Mark Zuckerberg, Dwayne 'the Rock' Johnson and most other Americans.", "fine_print": "", "post_id": 930, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1577828943.177779, "end_time": null, "forecaster_count": 174, "interval_lower_bounds": [ 0.45 ], "centers": [ 0.54 ], "interval_upper_bounds": [ 0.59 ] } ], "latest": { "start_time": 1577828943.177779, "end_time": null, "forecaster_count": 174, "interval_lower_bounds": [ 0.45 ], "centers": [ 0.54 ], "interval_upper_bounds": [ 0.59 ], "forecast_values": [ 0.45999999999999996, 0.54 ], "means": [ 0.5129865938942539 ], "histogram": [ [ 0.0, 0.3126297425378901, 0.0, 0.0, 0.0, 0.015132569184820968, 0.0, 0.0, 0.0, 0.0, 0.21779411500951124, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4766340085455838, 0.0, 0.16396584987177212, 0.0, 0.0, 0.7949909393337649, 0.0, 0.46693172852340936, 0.0, 0.0, 0.0, 0.0, 0.07336769520537706, 0.0, 0.0, 0.1826090466668963, 0.11181540262759139, 0.2084570734136025, 0.0, 0.27956214420917774, 0.0, 0.037201320476189986, 1.5317499988019976, 0.0017727002004439155, 0.0, 0.13391284905938738, 1.0980677285728435, 3.2314786692947215, 0.927014565208072, 0.0, 0.5943687965228289, 0.4398010564815009, 0.42208282740405223, 0.06701642927633186, 0.1637919957600888, 0.05802823796293837, 1.7529996957367908, 1.6427812456302968, 0.7548915566277257, 0.7952854123213338, 0.0, 2.1167899777596872, 1.2033578113084036, 0.49688082286198215, 0.45504931941582827, 0.018847754422204512, 0.0067006336233970095, 0.4738023837997421, 0.0, 0.005358205146391805, 0.022423693011035202, 0.35865982669028557, 0.2652256787420323, 0.028982075875720163, 0.08483045483025874, 0.013531817144758502, 0.42228368463105737, 0.1618214929188752, 0.016899480804899277, 0.08898989157810736, 0.050143472574200466, 0.0, 0.7932256955414204, 0.0015808021415807273, 0.17457814486370077, 0.00044667451487717, 0.0, 5.076411425656614e-06, 0.13944669666976484, 0.0, 0.03193311839747926, 0.0023590942411641836, 0.2698110795663002, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22487266614278362 ] ] }, "score_data": { "peer_score": 13.060117871896981, "coverage": 0.9999597390993676, "baseline_score": 16.662008964272225, "spot_peer_score": 13.002282773608764, "peer_archived_score": 13.060117871896981, "baseline_archived_score": 16.662008964272225, "spot_peer_archived_score": 13.002282773608764 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1577782108.639086, "end_time": null, "forecaster_count": 174, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1577782108.639086, "end_time": null, "forecaster_count": 174, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.5261287432933253, 0.4738712567066748 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 14, "user_vote": null }, "forecasts_count": 362, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 928, "title": "Will Kim Jong-un meet the head of government from any G7 nation in-person before 15 June 2018?", "short_title": "", "url_title": "", "slug": "will-kim-jong-un-meet-the-head-of-government-from-any-g7-nation-in-person-before-15-june-2018", "author_id": 104439, "author_username": "IARPA Question Bot", "coauthors": [], "created_at": "2018-05-23T19:31:26.082811Z", "published_at": "2018-05-26T07:00:00Z", "edited_at": "2025-09-05T17:29:08.707704Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-26T07:00:00Z", "comment_count": 16, "status": "resolved", "resolved": true, "actual_close_time": "2018-06-12T00:00:00Z", "scheduled_close_time": "2018-06-12T00:00:00Z", "scheduled_resolve_time": "2018-06-12T01:44:00Z", "actual_resolve_time": "2018-06-12T01:44:00Z", "open_time": "2018-05-26T07:00:00Z", "nr_forecasters": 144, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 928, "title": "Will Kim Jong-un meet the head of government from any G7 nation in-person before 15 June 2018?", "created_at": "2018-05-23T19:31:26.082811Z", "open_time": "2018-05-26T07:00:00Z", "cp_reveal_time": "2018-05-26T21:07:45.459780Z", "spot_scoring_time": "2018-05-26T21:07:45.459780Z", "scheduled_resolve_time": "2018-06-12T01:44:00Z", "actual_resolve_time": "2018-06-12T01:44:00Z", "resolution_set_time": "2018-06-12T01:44:00Z", "scheduled_close_time": "2018-06-12T00:00:00Z", "actual_close_time": "2018-06-12T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "*This question was generated from the [IARPA Global Forecasting Challenge](https://www.iarpa.gov/challenges/gfchallenge.html).*\n\nThe first face-to-face meeting between American and North Korean leaders is set for June 12th (and perhaps [June 13th](https://www.cnn.com/2018/06/06/politics/donald-trump-potential-second-day-meeting-kim-jong-un/index.html)!) on Singapore's Sentosa Island. Months of brinksmanship between the US and the hermit kingdom have placed the historic gathering in and out of play, as \"[open hostility](https://www.cnn.com/2018/05/24/politics/trump-north-korea/index.html),\" John Bolton's invocation of the \"Libya model\" of denuclearization, and now Rudy Giuliani's \"[hands and knees](https://cnn.com/2018/06/06/politics/giuliani-comments-north-korea-summit-intl/index.html)\" comment all complicate an already highly delicate political moment. \n \nThe question is aimed at Donald Trump's scheduled meeting with Kim Jong-un, but technically would be resolved positively if the North Korean leader meets with a head of government from any G7 nation before the 15th.", "fine_print": "", "post_id": 928, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1528767481.553282, "end_time": null, "forecaster_count": 145, "interval_lower_bounds": [ 0.85 ], "centers": [ 0.96 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1528767481.553282, "end_time": null, "forecaster_count": 145, "interval_lower_bounds": [ 0.85 ], "centers": [ 0.96 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.040000000000000036, 0.96 ], "means": [ 0.883737454780335 ], "histogram": [ [ 0.0, 0.03826451636102858, 0.0, 0.006015647134756919, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0005999705047301742, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00028341133569470734, 0.7450868767444424, 0.011200911237254208, 1.6021250210039893e-05, 0.0, 6.826558242707053e-05, 0.0, 0.0, 0.0, 0.0005159717078196932, 0.0, 0.00018829645220658223, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00021690668860572797, 0.0, 0.0, 9.971769103633934e-05, 0.0, 0.00011838191657698926, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6426970879077707, 0.0, 4.3550273315147114e-05, 0.0030610959196725866, 0.003037876338948597, 0.137426433081863, 0.045169975631807344, 0.0, 0.0, 0.0, 0.07203311537002502, 0.023760017681260952, 0.40411093257450387, 0.0027417162159521645, 0.0, 0.0684049391477525, 0.019946971491009727, 0.03881850691583642, 0.0064634336424816245, 0.0, 0.31212240734380975, 0.48292878473777406, 0.0015766168487706329, 0.0021571570201977886, 0.0, 0.6044693706982107, 0.27069478782701756, 0.0, 0.08629949503652228, 0.0, 0.09352322617269647, 0.17967576270652255, 0.26851719719040046, 0.3363295249837287, 0.0, 0.9989142836381257, 0.0, 0.15825374524438343, 0.05948386846057627, 0.0, 1.1616635358849046, 0.12982153507663804, 0.0, 1.1514387095112786, 0.2337247517629093, 1.8915720545270864, 1.9370370079610528, 0.6654212385327403, 1.8484385808518704, 7.442202465833492 ] ] }, "score_data": { "peer_score": 21.27777129652414, "coverage": 0.9999879319887812, "baseline_score": 11.563276415018711, "spot_peer_score": 18.948022535368086, "peer_archived_score": 21.27777129652414, "baseline_archived_score": 11.563276415018711, "spot_peer_archived_score": 18.948022535368086 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1528754625.550666, "end_time": null, "forecaster_count": 144, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1528754625.550666, "end_time": null, "forecaster_count": 144, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.18308677799222994, 0.8169132220077701 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 11, "user_vote": null }, "forecasts_count": 359, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 924, "title": "Will China execute or be targeted in an acknowledged national military attack before 1 September 2018?", "short_title": "", "url_title": "", "slug": "will-china-execute-or-be-targeted-in-an-acknowledged-national-military-attack-before-1-september-2018", "author_id": 104439, "author_username": "IARPA Question Bot", "coauthors": [], "created_at": "2018-05-23T19:31:25.896205Z", "published_at": "2018-05-27T07:00:00Z", "edited_at": "2025-09-05T17:29:25.954461Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-27T07:00:00Z", "comment_count": 3, "status": "resolved", "resolved": true, "actual_close_time": "2018-08-31T18:01:29Z", "scheduled_close_time": "2018-08-31T18:01:29Z", "scheduled_resolve_time": "2018-09-02T14:27:00Z", "actual_resolve_time": "2018-09-02T14:27:00Z", "open_time": "2018-05-27T07:00:00Z", "nr_forecasters": 126, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2964, "type": "question_series", "name": "IARPA 2018 Global Forecasting Challenge", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2018-03-30T07:00:00Z", "close_date": "2018-09-08T18:01:32Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:19.635650Z", "edited_at": "2024-02-29T10:13:49.489209Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 924, "title": "Will China execute or be targeted in an acknowledged national military attack before 1 September 2018?", "created_at": "2018-05-23T19:31:25.896205Z", "open_time": "2018-05-27T07:00:00Z", "cp_reveal_time": "2018-05-27T21:16:08.148430Z", "spot_scoring_time": "2018-05-27T21:16:08.148430Z", "scheduled_resolve_time": "2018-09-02T14:27:00Z", "actual_resolve_time": "2018-09-02T14:27:00Z", "resolution_set_time": "2018-09-02T14:27:00Z", "scheduled_close_time": "2018-08-31T18:01:29Z", "actual_close_time": "2018-08-31T18:01:29Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "*This question was generated from the [IARPA Global Forecasting Challenge](https://www.iarpa.gov/challenges/gfchallenge.html).*\n\n\"Acknowledged” means the government of the country executing the attack claims responsibility for it. A “national military attack” includes the employment of conventional or unconventional weapons by one country’s national military forces on another country’s military, military assets, or territory, including citizens located within that territory, but excluding territorial waters, foreign missions, and/or exclusive economic zones. A “cyber” attack will not qualify as a “national military attack.” Attacks by specific military or paramilitary units or non-state actors will not qualify as “national military attacks” unless the government claims responsibility for them. Taiwan is considered a part of China and not an independent country.", "fine_print": "", "post_id": 924, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1535707732.800339, "end_time": null, "forecaster_count": 126, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1535707732.800339, "end_time": null, "forecaster_count": 126, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.01898943547049541 ], "histogram": [ [ 0.0, 18.559853338504368, 1.0003450234997144, 0.03677428947168366, 0.02802116527942212, 0.5669963481376584, 0.06876466851363891, 0.007443479684201042, 0.0, 0.0, 0.06724002917598365, 0.0, 0.0, 0.0, 0.0, 0.0004260854925110702, 0.0, 0.0, 0.05736207576458694, 0.0, 0.003016935954032928, 0.0, 0.34092914976729904, 0.042310736294626096, 0.0, 0.0, 0.050762502529960826, 0.0017891892856764617, 0.0008235662727110964, 0.0, 0.0, 0.004947403540845617, 0.10221260723848555, 0.0, 0.0009281367527084389, 0.0, 0.0, 0.0, 0.0016138371106303707, 0.0, 0.00018797137840382663, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010924271437989063, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 6.735449221535745, "coverage": 0.9997531415918541, "baseline_score": 96.85876911614884, "spot_peer_score": 4.501914033667226, "peer_archived_score": 6.735449221535745, "baseline_archived_score": 96.85876911614884, "spot_peer_archived_score": 4.501914033667226 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1535696842.806818, "end_time": null, "forecaster_count": 126, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1535696842.806818, "end_time": null, "forecaster_count": 126, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8967567874454845, 0.10324321255451549 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 6, "user_vote": null }, "forecasts_count": 201, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 919, "title": "Will there be armed conflict between racial groups in South Africa before 2023?", "short_title": "", "url_title": "", "slug": "will-there-be-armed-conflict-between-racial-groups-in-south-africa-before-2023", "author_id": 104553, "author_username": "stefanj", "coauthors": [], "created_at": "2018-05-23T05:48:53.892771Z", "published_at": "2018-05-26T07:00:00Z", "edited_at": "2025-09-05T17:29:29.221486Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-26T07:00:00Z", "comment_count": 23, "status": "resolved", "resolved": true, "actual_close_time": "2019-08-01T11:00:00Z", "scheduled_close_time": "2019-08-01T11:00:00Z", "scheduled_resolve_time": "2023-01-01T16:15:00Z", "actual_resolve_time": "2023-01-01T16:15:00Z", "open_time": "2018-05-26T07:00:00Z", "nr_forecasters": 119, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 919, "title": "Will there be armed conflict between racial groups in South Africa before 2023?", "created_at": "2018-05-23T05:48:53.892771Z", "open_time": "2018-05-26T07:00:00Z", "cp_reveal_time": "2018-05-27T10:24:26.178748Z", "spot_scoring_time": "2018-05-27T10:24:26.178748Z", "scheduled_resolve_time": "2023-01-01T16:15:00Z", "actual_resolve_time": "2023-01-01T16:15:00Z", "resolution_set_time": "2023-01-01T16:15:00Z", "scheduled_close_time": "2019-08-01T11:00:00Z", "actual_close_time": "2019-08-01T11:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Recently racial tensions in South Africa have been high. \n\nThere is an upcoming land expropriation vote which will allow the government to [take land without compensation](https://www.news24.com/Columnists/GuestColumn/land-expropriation-without-compensation-what-does-it-mean-20180304-5). This is an attempt to redistribute land due to [unequal land ownership](https://www.news24.com/SouthAfrica/News/sas-land-audit-makes-case-for-land-tax-20180204-2) as well as find productive land. Since the majority of privately owned land is owned by white groups\nthis policy has sparked massive debate. [Some think](https://qz.com/1218309/south-africa-to-take-land-without-compensation-as-zimbabwe-backtracks-on-seizing-white-farms/) this will hurt agricultural production and hurt the economy of the country.\n\nThis, in addition to the high rate of [farm attacks in South Africa](https://africacheck.org/factsheets/factsheet-statistics-farm-attacks-murders-sa/) has led to conservative [white groups gearing up for a fight](https://www.youtube.com/watch?v=1NyVYaI_V6w), [some have been preparing for a revolution even earlier](https://suidlanders.org/).\n\n*** Will there be more than 100 violent deaths of white insurgents before 2023, within a period of 12 months, as reported by a verifiable news outlet? ***\n\nThe answer is meant to be indicative of actual armed conflict, as a proxy to the question: will there be armed conflict between racial groups in South Africa before 2023.", "fine_print": "", "post_id": 919, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1564645766.068955, "end_time": null, "forecaster_count": 119, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.32 ], "interval_upper_bounds": [ 0.44 ] } ], "latest": { "start_time": 1564645766.068955, "end_time": null, "forecaster_count": 119, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.32 ], "interval_upper_bounds": [ 0.44 ], "forecast_values": [ 0.6799999999999999, 0.32 ], "means": [ 0.33727466513018983 ], "histogram": [ [ 0.0, 0.3298773035135659, 0.28222198026277695, 0.0, 0.0, 0.002454744419980374, 0.0, 0.0, 0.0787000215845657, 0.0, 0.520710780851208, 0.004791327568396407, 0.0, 0.009431368072545939, 0.0, 1.8588059880280174, 0.0, 0.0005845837517301532, 0.0, 0.028431918152454893, 2.6437657211022896, 0.05454593356184775, 0.0, 0.2288402807874698, 0.0, 0.18974743806214617, 0.0, 0.010212357276173556, 1.4698490712033878, 0.0, 0.9256121927959586, 0.955107266911675, 0.7220168009275107, 1.2534687391252688, 0.6259995631503564, 0.8489289087481837, 0.0, 0.03156651797907995, 0.004376584997741002, 0.0, 1.5262365643068838, 0.14389214747425877, 0.2001716733267841, 0.03965388446818618, 0.9081679749214331, 1.0882568716550267, 0.0022141635174052336, 0.6869622802823657, 0.0, 0.0, 0.8764692032812847, 0.0, 0.15671491120360884, 0.0, 0.0, 0.032536350391486085, 0.14823516009057155, 0.0, 0.0, 0.0, 0.5489952543010435, 0.0, 0.011047093942862699, 0.0003675276093553673, 0.0, 0.04215625429512881, 0.0, 0.15963053269782818, 0.0, 0.0, 0.00013520585154472867, 0.0, 0.0, 0.0, 0.1261738202665928, 0.058049779671705354, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.48515246426301095 ] ] }, "score_data": { "peer_score": 31.574222020486438, "coverage": 0.9999990887441372, "baseline_score": 25.841188598034012, "spot_peer_score": 31.212091670792866, "peer_archived_score": 31.574222020486438, "baseline_archived_score": 25.841188598034012, "spot_peer_archived_score": 31.212091670792866 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1558887990.063834, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1558887990.063834, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7639591752765722, 0.23604082472342786 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 13, "user_vote": null }, "forecasts_count": 196, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 917, "title": "Will a space elevator successfully be built on Earth before 2100?", "short_title": "Space Elevator on Earth by 2100", "url_title": "Space Elevator on Earth by 2100", "slug": "space-elevator-on-earth-by-2100", "author_id": 104272, "author_username": "AdamKosloff", "coauthors": [], "created_at": "2018-05-18T22:01:40.493863Z", "published_at": "2018-05-21T07:00:00Z", "edited_at": "2025-11-22T23:52:55.082738Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-21T07:00:00Z", "comment_count": 34, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2100-01-01T08:00:00Z", "scheduled_resolve_time": "2101-01-01T08:00:00Z", "actual_resolve_time": null, "open_time": "2018-05-21T07:00:00Z", "nr_forecasters": 334, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" } ] }, "question": { "id": 917, "title": "Will a space elevator successfully be built on Earth before 2100?", "created_at": "2018-05-18T22:01:40.493863Z", "open_time": "2018-05-21T07:00:00Z", "cp_reveal_time": "2018-05-21T17:24:38.114669Z", "spot_scoring_time": "2018-05-21T17:24:38.114669Z", "scheduled_resolve_time": "2101-01-01T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2100-01-01T08:00:00Z", "actual_close_time": "2100-01-01T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Ahh, to build a space elevator. Wouldn't that be grand? We could move material into space at a fraction of the cost of conventional rockets. Space tourism would [boom](https://www.herox.com/crowdsourcing-news/137-going-up-the-case-for-a-space-elevator). We'd launch interplanetary missions. Oh happy day.\n\n[Futurism explains](https://futurism.com/why-space-elevators-are-the-future-of-space-travel/):\n\n> According to [a NASA] study, a flexible and durable cable with a space station counterweight could serve as a viable space elevator. A mechanical “climber” — using magnetic levitation or rollers along the tether — would then carry many tons of equipment or people into orbit. Although such a project would cost in the tens of billions, it would eventually pay for itself by providing much cheaper space travel to a greatly expanded market.\n\nThe question is: can we do this?\n\nKurzgesagt (a.k.a. \"in a nutshell\") explores the state of affairs in [this entertaining video](https://www.youtube.com/watch?v=kxqnCwMvEpg)\n\nHere are some of the problems with this plan. Problems which engineers and scientists may *never* be able to overcome:\n\n* Maybe we'll never make a material [strong enough](https://futurism.com/why-space-elevators-are-the-future-of-space-travel/) to support the space elevator.\n* Maybe [terrorists](https://worldbuilding.stackexchange.com/questions/20311/how-to-protect-a-space-elevator-against-terrorism) will attack any elevator that we build.\n* Maybe we'll never get the [costs of construction/maintenance](https://www.quora.com/How-much-money-would-it-cost-to-make-a-space-elevator-including-R-D) down.\n* Maybe one or more [space elevator disasters](http://www.niac.usra.edu/files/studies/final_report/472Edwards.pdf) will fling debris into orbit or crash down on a populated area, turning the population against the process.\n* Maybe rocket engineers will build on the successes of companies like [Space X](https://www.popsci.com/spacexs-falcon-heavy-launch-was-joyful-success), and there will therefore never be enough political or economic pressure to incentivize construction of an elevator. \n\nWill we or our descendants overcome these [obstacles](http://sploid.gizmodo.com/how-would-a-real-space-elevator-work-and-is-it-even-pos-1769925946)?", "resolution_criteria": "This question will resolve as **Yes** if a working space elevator is constructed on Earth before January 1, 2100 and is maintained in operation for at least 365 days", "fine_print": "", "post_id": 917, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763855564.43867, "end_time": 1785226979.230969, "forecaster_count": 332, "interval_lower_bounds": [ 0.02 ], "centers": [ 0.09 ], "interval_upper_bounds": [ 0.18 ] } ], "latest": { "start_time": 1763855564.43867, "end_time": 1785226979.230969, "forecaster_count": 332, "interval_lower_bounds": [ 0.02 ], "centers": [ 0.09 ], "interval_upper_bounds": [ 0.18 ], "forecast_values": [ 0.91, 0.09 ], "means": [ 0.1468740103068048 ], "histogram": [ [ 1.9479479573732825, 6.348997847888682, 1.668490462743562, 0.6803911061318823, 2.784801890227638, 1.6175098805639823, 1.1514914190191878, 0.026528049068294944, 0.6332106974005368, 2.414794938443296, 2.2622751150788396, 0.12759522091124267, 1.3690882131168423, 1.6382406395651905e-06, 0.5793812520381525, 1.177267792990226, 0.21853090420550775, 0.6594340290968378, 0.746510168414835, 0.037340280261949604, 2.628716205412374, 0.04711724212135077, 0.13907686806772662, 0.895747349285572, 0.0, 0.1917279135359249, 4.381587398828799e-05, 0.0, 0.0, 0.0, 1.5028360512919723, 0.08410462816845976, 0.02780863652213625, 0.0002185800243690959, 0.007052536096346849, 0.0935835756001624, 0.01634013089908075, 0.0009793957218467702, 0.0, 0.05924565470621881, 0.002651416029764414, 0.0, 0.0, 0.0008004546718888783, 0.0, 0.004285354227521244, 0.04558752347667032, 0.0, 0.0, 0.0, 0.005621202820789439, 0.0, 0.03602244023141209, 0.0, 0.00988045656465226, 0.0035512626233432975, 0.0, 0.0, 0.0, 0.0, 0.848859080904963, 0.0, 0.3515852303678066, 0.0, 0.0, 0.008839705418661898, 0.0, 0.0073230200376936385, 0.0, 0.0, 0.17156764987604428, 0.0, 0.0, 0.0, 0.0, 0.14841456346209586, 0.0004206451776705269, 0.0, 0.0, 0.0, 0.0, 1.0002622735686993e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.003803793131490754, 0.0, 0.0005051165589325583, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0033765423307788073, 0.8475570952070836, 0.2971710606949308 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287320.236176, "end_time": null, "forecaster_count": 332, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287320.236176, "end_time": null, "forecaster_count": 332, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9605189493615288, 0.03948105063847122 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 40, "user_vote": null }, "forecasts_count": 583, "key_factors": [], "is_current_content_translated": false, "description": "Ahh, to build a space elevator. Wouldn't that be grand? We could move material into space at a fraction of the cost of conventional rockets. Space tourism would [boom](https://www.herox.com/crowdsourcing-news/137-going-up-the-case-for-a-space-elevator). We'd launch interplanetary missions. Oh happy day.\n\n[Futurism explains](https://futurism.com/why-space-elevators-are-the-future-of-space-travel/):\n\n> According to [a NASA] study, a flexible and durable cable with a space station counterweight could serve as a viable space elevator. A mechanical “climber” — using magnetic levitation or rollers along the tether — would then carry many tons of equipment or people into orbit. Although such a project would cost in the tens of billions, it would eventually pay for itself by providing much cheaper space travel to a greatly expanded market.\n\nThe question is: can we do this?\n\nKurzgesagt (a.k.a. \"in a nutshell\") explores the state of affairs in [this entertaining video](https://www.youtube.com/watch?v=kxqnCwMvEpg)\n\nHere are some of the problems with this plan. Problems which engineers and scientists may *never* be able to overcome:\n\n* Maybe we'll never make a material [strong enough](https://futurism.com/why-space-elevators-are-the-future-of-space-travel/) to support the space elevator.\n* Maybe [terrorists](https://worldbuilding.stackexchange.com/questions/20311/how-to-protect-a-space-elevator-against-terrorism) will attack any elevator that we build.\n* Maybe we'll never get the [costs of construction/maintenance](https://www.quora.com/How-much-money-would-it-cost-to-make-a-space-elevator-including-R-D) down.\n* Maybe one or more [space elevator disasters](http://www.niac.usra.edu/files/studies/final_report/472Edwards.pdf) will fling debris into orbit or crash down on a populated area, turning the population against the process.\n* Maybe rocket engineers will build on the successes of companies like [Space X](https://www.popsci.com/spacexs-falcon-heavy-launch-was-joyful-success), and there will therefore never be enough political or economic pressure to incentivize construction of an elevator. \n\nWill we or our descendants overcome these [obstacles](http://sploid.gizmodo.com/how-would-a-real-space-elevator-work-and-is-it-even-pos-1769925946)?" }, { "id": 916, "title": "Will another 9/11 on U.S. soil be prevented at least through 2030?", "short_title": "", "url_title": "", "slug": "will-another-911-on-us-soil-be-prevented-at-least-through-2030", "author_id": 104272, "author_username": "AdamKosloff", "coauthors": [], "created_at": "2018-05-18T21:58:46.677800Z", "published_at": "2018-05-21T07:00:00Z", "edited_at": "2025-09-05T17:29:28.963667Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-21T07:00:00Z", "comment_count": 14, "status": "closed", "resolved": false, "actual_close_time": "2025-06-15T07:00:00Z", "scheduled_close_time": "2025-06-15T07:00:00Z", "scheduled_resolve_time": "2030-12-31T08:00:00Z", "actual_resolve_time": null, "open_time": "2018-05-21T07:00:00Z", "nr_forecasters": 179, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 916, "title": "Will another 9/11 on U.S. soil be prevented at least through 2030?", "created_at": "2018-05-18T21:58:46.677800Z", "open_time": "2018-05-21T07:00:00Z", "cp_reveal_time": "2018-05-21T15:04:53.258273Z", "spot_scoring_time": "2018-05-21T15:04:53.258273Z", "scheduled_resolve_time": "2030-12-31T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2025-06-15T07:00:00Z", "actual_close_time": "2025-06-15T07:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "closed", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": false, "open_lower_bound": false, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": false, "open_lower_bound": false, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "As of mid-2018, it's been almost 17 years since the September 11, 2001 terrorist attacks that destroyed the Twin Towers in New York and damaged the Pentagon. Nearly 3,000 people died in the attack. Since that time, fortunately, there hasn't been another attack on the U.S. homeland that's anywhere close to the size and scale of 9/11.\n\nHowever, we cannot rest easy.\n\nAs The Atlantic [reported](https://www.theatlantic.com/magazine/archive/2016/09/are-we-any-safer/492761/) in September 2016:\n\n> Are we safer? Yes, we’re safer from the kind of orchestrated attack that shocked us on that September morning. It’s harder for terrorists to get into the country, and harder for them to pull off something spectacular if they do. But we have not plugged some of the most threatening security gaps. \n\nA special report compiled by the Heritage Foundation examined [60 terrorist plots](https://www.heritage.org/terrorism/report/60-terrorist-plots-911-continued-lessons-domestic-counterterrorism) that have unfolded since 9/11.\n\nHow long can our luck – and the good work of law enforcement – hold out?\n\n*** Can we prevent a US terrorist attack equal to (or worse than) 9/11 in terms of lives lost, at least through the year 2030? ***\n\nFor these purposes, a terrorist attack will point to something purposeful but not directly implemented by a nation-state's government and military.", "fine_print": "", "post_id": 916, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1745665201.884835, "end_time": null, "forecaster_count": 178, "interval_lower_bounds": [ 0.7 ], "centers": [ 0.8 ], "interval_upper_bounds": [ 0.85 ] } ], "latest": { "start_time": 1745665201.884835, "end_time": null, "forecaster_count": 178, "interval_lower_bounds": [ 0.7 ], "centers": [ 0.8 ], "interval_upper_bounds": [ 0.85 ], "forecast_values": [ 0.19999999999999996, 0.8 ], "means": [ 0.7473425629585042 ], "histogram": [ [ 0.0, 0.08513797185705392, 0.0, 0.0, 0.0, 9.918156094626113e-05, 0.0, 0.0, 0.0, 0.0, 0.0006479713574037531, 0.0, 0.0, 1.8603248883486592e-05, 0.0, 0.04991253245142499, 0.0, 0.0, 0.0, 0.0, 0.027464261005621708, 0.0, 0.0, 0.0, 0.10063018194904833, 0.0005471912571133907, 0.28501596612413604, 0.001891100673931783, 0.0, 0.0, 0.9353690820950491, 0.0, 0.001524620940044264, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6563051760103071, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0020289524865603694, 0.006505936565005258, 0.0, 0.014264337958424227, 0.0, 0.0, 0.5204369542223122, 0.0, 0.12706120010090005, 0.08005749317517596, 0.1530558022498266, 0.0, 0.0, 0.4362167561715159, 0.7227988185610674, 0.03221067924067377, 0.7517124845845736, 0.17219043502498588, 0.644654449963253, 0.2036532680653466, 0.0005941057357460815, 0.01621011892349815, 0.004221114061686956, 1.7456312686397097, 0.023517721926693803, 0.0, 0.009814776948893166, 0.033648402666735217, 0.21524064034741955, 0.01288787882384917, 0.530606553178061, 1.28600754941508, 0.0012863211893342054, 6.156129419273027, 0.7493944073397552, 0.32138726551203933, 0.7892767690887382, 0.20783365000345386, 0.8690132119955545, 1.5901378698733697, 0.42677980259685916, 0.9631661305838046, 0.0026705640424390664, 0.8161229457018111, 0.24043902008825413, 0.8486305922479954, 0.38490091471545845, 0.0, 0.5766544891838616, 0.0, 0.0, 0.0, 0.35283820778582 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728290052.946845, "end_time": null, "forecaster_count": 173, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728290052.946845, "end_time": null, "forecaster_count": 173, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.24994264223775853, 0.7500573577622415 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 13, "user_vote": null }, "forecasts_count": 340, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 915, "title": "Will we discover clear evidence of proton decay by 2040?", "short_title": "", "url_title": "", "slug": "will-we-discover-clear-evidence-of-proton-decay-by-2040", "author_id": 104272, "author_username": "AdamKosloff", "coauthors": [], "created_at": "2018-05-18T21:52:52.436951Z", "published_at": "2018-05-21T07:00:00Z", "edited_at": "2025-09-11T07:41:53.197160Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-21T07:00:00Z", "comment_count": 5, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2030-02-15T08:00:00Z", "scheduled_resolve_time": "2040-01-01T08:00:00Z", "actual_resolve_time": null, "open_time": "2018-05-21T07:00:00Z", "nr_forecasters": 107, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" } ] }, "question": { "id": 915, "title": "Will we discover clear evidence of proton decay by 2040?", "created_at": "2018-05-18T21:52:52.436951Z", "open_time": "2018-05-21T07:00:00Z", "cp_reveal_time": "2018-05-21T18:54:19.346230Z", "spot_scoring_time": "2018-05-21T18:54:19.346230Z", "scheduled_resolve_time": "2040-01-01T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2030-02-15T08:00:00Z", "actual_close_time": "2030-02-15T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Protons are durable little subatomic particles. Our collective best guess that they should take at least \\(10^{34}\\) years to decay... if they do so at all. \n\nWhy do scientists want to figure this out? Symmetry Magazine explains the situation: \n\n> Much [the theoritical work on Grand Unified Theories of the universe] rests on the existence of proton decay, and yet we’ve never seen a proton die. The reason may simply be that protons rarely decay, a hypothesis borne out by both experiment and theory... Because of quantum physics, the time any given proton decays is random, so a tiny fraction will decay long before that \\(10^{34}\\)-year lifetime. So, “what you need to do is to get a whole bunch of protons together,” says [University of California's Jonathan Feng]. Increasing the number of protons increases the chance that one of them will decay while you’re watching.\n\nSeveral experiments around the world have attempted (and will be attempting) to quantify the whys and wherefores of proton decay. \n\nTwo of the most important include:\n\n[Super-Kamiokande](http://www-sk.icrr.u-tokyo.ac.jp/sk/sk/pdecay-e.html) in Japan:\n\n> If we can collect many protons and some of them decay, we can estimate proton lifetime unless waiting for so long time. Super-Kamiokande uses 50,000 tons of pure water and it contains \\(7 \\times 10^{33}\\) protons. We are measuring proton lifetime with huge number of protons... however, we have not observed any evidence of proton decay yet.\n\n[Hyper-Kamiokande](http://www.hyper-k.org/en/physics/phys-protondecay.html): \n\n> Hyper-Kamiokande is about 10 times larger than SK [Super-Kamiokande] and it can overtake the current reach by SK within two years... Hyper-Kamiokande has sensitivity up to more than one order longer than the current lower lifetime of proton.\n\n\n*** What do you think? Will we discover proton decay before 2040? ***\n\nQuestion resolves positive if experimental evidence for proton decay is published in a top peer-reviewed journal prior to 2040.", "fine_print": "", "post_id": 915, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1761331979.1303, "end_time": 1765065491.199409, "forecaster_count": 71, "interval_lower_bounds": [ 0.13 ], "centers": [ 0.244 ], "interval_upper_bounds": [ 0.35 ] } ], "latest": { "start_time": 1761331979.1303, "end_time": 1765065491.199409, "forecaster_count": 71, "interval_lower_bounds": [ 0.13 ], "centers": [ 0.244 ], "interval_upper_bounds": [ 0.35 ], "forecast_values": [ 0.756, 0.244 ], "means": [ 0.2780781935809903 ], "histogram": [ [ 0.0, 0.8678334419864759, 0.0, 0.0, 0.0, 0.9421888867040361, 0.0, 0.0, 0.8873410162862754, 0.0, 0.9101690158626126, 0.10416951339998208, 0.0, 0.19324049261670082, 0.7392557304732013, 0.1222613328297847, 0.15432241690319634, 0.0, 0.0, 0.0, 1.9031333746350478, 0.0, 0.29669600331087087, 0.0, 0.8378949725905285, 1.7891367621247602, 0.377836139774715, 0.0, 0.516959776484777, 0.0, 0.6015322874595478, 0.0, 0.20794218954840368, 0.01128708155673866, 0.0012381970298981316, 0.8346900266069526, 0.0, 0.0, 0.3875436010351392, 0.0, 0.4667023473528153, 0.0684544220545421, 0.026507746979498387, 0.0, 0.0025372866159333904, 0.05736130050041941, 0.0, 0.0, 0.0, 0.0, 0.4075424772430253, 0.0, 0.0, 0.0, 0.0, 0.15589365820400852, 0.0, 0.0, 0.0, 0.0, 0.005175226808777721, 0.41631406571392837, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05239604340077941, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0119604515856335 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287551.874574, "end_time": null, "forecaster_count": 106, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287551.874574, "end_time": null, "forecaster_count": 106, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9262287860553228, 0.07377121394467721 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 20, "user_vote": null }, "forecasts_count": 244, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 913, "title": "Is Arnold in the red marble, in Westworld?", "short_title": "", "url_title": "", "slug": "is-arnold-in-the-red-marble-in-westworld", "author_id": 8, "author_username": "Anthony", "coauthors": [], "created_at": "2018-05-16T23:13:53.226106Z", "published_at": "2018-05-18T07:00:00Z", "edited_at": "2025-09-05T17:29:07.725004Z", "curation_status": "approved", "curation_status_updated_at": "2018-05-18T07:00:00Z", "comment_count": 1, "status": "resolved", "resolved": true, "actual_close_time": "2018-06-02T07:00:00Z", "scheduled_close_time": "2018-06-02T07:00:00Z", "scheduled_resolve_time": "2018-06-03T03:00:00Z", "actual_resolve_time": "2018-06-03T03:00:00Z", "open_time": "2018-05-18T07:00:00Z", "nr_forecasters": 32, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32599, "name": "2018 Leaderboard", "slug": "2018_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 913, "title": "Is Arnold in the red marble, in Westworld?", "created_at": "2018-05-16T23:13:53.226106Z", "open_time": "2018-05-18T07:00:00Z", "cp_reveal_time": "2018-05-19T18:19:48.817856Z", "spot_scoring_time": "2018-05-19T18:19:48.817856Z", "scheduled_resolve_time": "2018-06-03T03:00:00Z", "actual_resolve_time": "2018-06-03T03:00:00Z", "resolution_set_time": "2018-06-03T03:00:00Z", "scheduled_close_time": "2018-06-02T07:00:00Z", "actual_close_time": "2018-06-02T07:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "*** SPOILERS ***\n\nAs described in more detail [here](https://www.vanityfair.com/hollywood/2018/05/westworld-who-did-bernard-print-red-ball-season-2-episode-4-riddle-of-the-sphinx), a lot went down in the S2E04 of Westworld, wherein it was revealed that the \"real\" purpose (or at least real*er*) is to digitize humans and load them into hosts for personal immortality.\n\nIn the episode, at some unclear point in time, Bernard-the-host enters a secret lab and makes off with the red marble-like sphere that apparently represents one of these uploaded minds. But we have no idea whose it is. The three main contenders are (a) Ford, who transferred before he dies, (b) William/MIB, or (c) Arnold, the person on whom Bernard is based. The last seems to be something of a favorite, so we'll ask:\n\n*** By the end of season 2, will it be revealed that the marble taken from the lab by Bernard contains the mind of Arnold? ***\n\nThere's an accompanying theory that the guy who woke up on the beach all confused is Arnold (in a host) rather than Bernard-as-host; but that's a side bet you can make on your own. (If necessary, question will close retroactively to prior to the episode in which a relevant reveal is made.)", "fine_print": "", "post_id": 913, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1528304212.995183, "end_time": null, "forecaster_count": 41, "interval_lower_bounds": [ 0.3 ], "centers": [ 0.5 ], "interval_upper_bounds": [ 0.5 ] } ], "latest": { "start_time": 1528304212.995183, "end_time": null, "forecaster_count": 41, "interval_lower_bounds": [ 0.3 ], "centers": [ 0.5 ], "interval_upper_bounds": [ 0.5 ], "forecast_values": [ 0.5, 0.5 ], "means": [ 0.44311568117678246 ], "histogram": [ [ 0.0, 0.8895643965396423, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7591555979297057, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14500482192877664, 0.0, 0.0, 0.0, 0.0, 0.9244383481296953, 0.0, 0.0, 0.0, 0.0, 0.2714154180589184, 0.0, 0.0, 0.0, 0.0, 0.24582773872283623, 0.0, 0.0, 0.0, 0.0, 0.6888944248518107, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3613315105385659, 4.478802151633145, 0.12948046043583153, 0.0, 0.0, 1.064404306377434, 0.045661515517217234, 0.0, 0.0, 0.0, 0.0, 0.02802391257647418, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.023344988892241045, 0.0, 0.0, 0.004502492091281563, 0.10228230303850093, 0.0, 0.09043497114579632, 0.0, 0.0391307538660947, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4741317757842507, 0.0, 0.4337183691184219, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.115269369020162 ] ] }, "score_data": { "peer_score": 12.017760155930846, "coverage": 0.9998248239043688, "baseline_score": -6.333497482308677, "spot_peer_score": 18.474858575173776, "peer_archived_score": 12.017760155930846, "baseline_archived_score": -6.333497482308677, "spot_peer_archived_score": 18.474858575173776 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1527922110.852191, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1527922110.852191, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.6726474584417358, 0.3273525415582642 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": -4, "user_vote": null }, "forecasts_count": 117, "key_factors": [], "is_current_content_translated": false, "description": "" } ] }