Posts List Oldapi View
We shared this request example with FAB participants: url_qparams = { "limit": count, "offset": offset, "has_group": "false", "order_by": "-activity", "forecast_type": "binary", "project": tournament_id, "status": "open", "type": "forecast", "include_description": "true", } url = f"{api_info.base_url}/questions/" response = requests.get( url, headers={"Authorization": f"Token {api_info.token}"}, params=url_qparams )
But we don't want to support all these parameters, and the ones relevant are: - order_by - status - project - forecast_type - we ignore this, but assume it's binary - FAB only supports binary for now.
GET /api2/questions/?format=api&offset=2840
{ "count": 6356, "next": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=2860", "previous": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=2820", "results": [ { "id": 26305, "title": "Will the same nation win more than one women's team sport at the 2024 Olympics?", "short_title": "", "url_title": "", "slug": "will-the-same-nation-win-more-than-one-womens-team-sport-at-the-2024-olympics", "author_id": 101262, "author_username": "BrunoParga", "coauthors": [], "created_at": "2024-07-17T15:00:59.863608Z", "published_at": "2024-07-19T14:30:00Z", "edited_at": "2025-09-05T17:29:23.592204Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-19T14:30:00Z", "comment_count": 45, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-20T14:30:00Z", "scheduled_close_time": "2024-07-20T14:30:00Z", "scheduled_resolve_time": "2024-08-11T15:35:00Z", "actual_resolve_time": "2024-08-11T15:35:00Z", "open_time": "2024-07-19T14:30:00Z", "nr_forecasters": 30, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26305, "title": "Will the same nation win more than one women's team sport at the 2024 Olympics?", "created_at": "2024-07-17T15:00:59.863608Z", "open_time": "2024-07-19T14:30:00Z", "cp_reveal_time": "2024-07-20T14:30:00Z", "spot_scoring_time": "2024-07-20T14:30:00Z", "scheduled_resolve_time": "2024-08-11T15:35:00Z", "actual_resolve_time": "2024-08-11T15:35:00Z", "resolution_set_time": "2024-08-11T15:35:00Z", "scheduled_close_time": "2024-07-20T14:30:00Z", "actual_close_time": "2024-07-20T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Although most events in the Summer Olympics are contested by individual athletes, or small teams of 2-4, a few have one single larger team representing their entire country. These are:\n\nAssociation football\nBasketball 5×5\nBasketball 3×3\nField hockey\nHandball\nRugby sevens\nVolleyball\nWater polo\n\n(Beach volleyball is listed on the source but excluded from the question because two teams from each team can qualify, unlike the other sports.)\n\nWill the women's teams of the same nation win at least two of these sports?", "resolution_criteria": "This question resolves as No if women's teams representing eight different National Olympic Committees (NOC) win gold in the eight team sports - association football, 3×3 basketball, 5×5 basketball, field hockey, handball, rugby sevens, voleyball and water polo.\n\nOtherwise, the same NOC must have won at least two of these events, so the question resolves as **Yes**.", "fine_print": "", "post_id": 26305, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721485369.316328, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": [ 0.563 ], "centers": [ 0.65 ], "interval_upper_bounds": [ 0.73 ] } ], "latest": { "start_time": 1721485369.316328, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": [ 0.563 ], "centers": [ 0.65 ], "interval_upper_bounds": [ 0.73 ], "forecast_values": [ 0.35, 0.65 ], "means": [ 0.6339904078886344 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.9120497244989734, 0.0, 0.3660118313347263, 0.0, 0.0, 0.0, 0.0, 0.0, 0.03911858530021452, 0.0, 0.0, 0.0, 0.0, 0.0, 0.813899022650909, 0.0, 0.0, 0.0, 0.9696319252230643, 0.0, 0.22827012898946356, 0.17630000022929448, 0.0, 0.620502543612061, 0.0, 0.0, 0.0, 0.023631498669224205, 0.9104814007977342, 0.8305036517217669, 0.0839758874887769, 0.45529495594735647, 0.0, 0.5327546430266185, 0.0, 0.0, 0.0, 0.0, 0.33461259345819927, 0.0, 0.0, 0.030893002561247163, 0.0, 0.7640856351447469, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04842515677234604, 0.3268262380230357, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 37.85116232537298, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 37.85116232537298 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721485369.343042, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721485369.343042, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.4916987002812935, 0.5083012997187065 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 48, "key_factors": [], "is_current_content_translated": false, "description": "Although most events in the Summer Olympics are contested by individual athletes, or small teams of 2-4, a few have one single larger team representing their entire country. These are:\n\nAssociation football\nBasketball 5×5\nBasketball 3×3\nField hockey\nHandball\nRugby sevens\nVolleyball\nWater polo\n\n(Beach volleyball is listed on the source but excluded from the question because two teams from each team can qualify, unlike the other sports.)\n\nWill the women's teams of the same nation win at least two of these sports?" }, { "id": 26304, "title": "Will William Ruto cease to be President of Kenya before October 1, 2024?", "short_title": "", "url_title": "", "slug": "will-william-ruto-cease-to-be-president-of-kenya-before-october-1-2024", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-17T15:00:59.770576Z", "published_at": "2024-07-19T14:30:00Z", "edited_at": "2025-09-05T17:29:00.539380Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-19T14:30:00Z", "comment_count": 53, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-20T14:30:00Z", "scheduled_close_time": "2024-07-20T14:30:00Z", "scheduled_resolve_time": "2024-10-02T14:56:00Z", "actual_resolve_time": "2024-10-02T14:56:00Z", "open_time": "2024-07-19T14:30:00Z", "nr_forecasters": 32, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26304, "title": "Will William Ruto cease to be President of Kenya before October 1, 2024?", "created_at": "2024-07-17T15:00:59.770576Z", "open_time": "2024-07-19T14:30:00Z", "cp_reveal_time": "2024-07-20T14:30:00Z", "spot_scoring_time": "2024-07-20T14:30:00Z", "scheduled_resolve_time": "2024-10-02T14:56:00Z", "actual_resolve_time": "2024-10-02T14:56:00Z", "resolution_set_time": "2024-10-02T14:56:00Z", "scheduled_close_time": "2024-07-20T14:30:00Z", "actual_close_time": "2024-07-20T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "After weeks of anti-government protests, the President of Kenya, William Ruto, [fired](https://www.cnn.com/2024/07/11/africa/kenyas-president-fires-entire-cabinet-intl/index.html) almost his entire cabinet, [saying](https://nation.africa/kenya/news/president-ruto-sacks-entire-cabinet-4687068) he was \"listening keenly to what the people of Kenya have said\" as a concession to protestors. This follows several weeks of nationwide protests so intense they Ruto had to be [barricaded](https://www.nytimes.com/2024/07/14/opinion/kenya-protests-politics.html) into his presidential compound. On June 25, 2024, police [opened fire](https://www.reuters.com/world/africa/young-kenyan-tax-protesters-plan-nationwide-demonstrations-2024-06-25/) on protestors attempting to enter the parliament. In total at least [39 people](https://www.economist.com/middle-east-and-africa/2024/07/09/kenyas-deadly-gen-z-protests-could-change-the-country) have been killed. \n\n\nThe protests, which were sparked by unpopular proposed tax hikes. evolved into [demands for Ruto's ouster](https://www.reuters.com/world/africa/kenyan-activists-call-fresh-protests-demanding-rutos-resignation-2024-06-28/). A day after dismissing his cabinet, the police chief of Kenya, Inspector General Japhet Koome, [resigned](https://www.aljazeera.com/news/2024/7/12/kenya-police-chief-resigns-after-criticism-over-protest-crackdown) amid accusations of using excessive force on protestors. \n\nSee Also\n\n- Wikipedia: [Kenya Finance Bill protests](https://en.wikipedia.org/wiki/Kenya_Finance_Bill_protests)<br />\n- The Standard (Kenya newspaper): [Ruto faces tough week amidst calls for his resignation](https://www.standardmedia.co.ke/national/article/2001499004/ruto-faces-tough-week-amidst-calls-for-his-resignation)<br />\n- The Standard: [Corruption, unemployment, broken pledges, abductions sunk Ruto ship](https://www.standardmedia.co.ke/politics/article/2001498951/corruption-unemployment-broken-pledges-abductions-sunk-ruto-ship)", "resolution_criteria": "This question resolves as **Yes** if according to [credible sources](https://www.metaculus.com/help/faq/#definitions) William Ruto ceases to be the President of Kenya before October 1, 2024, for any reason including but not limited to resignation, impeachment, losing an election, or removal from office via a coup. Otherwise, this question resolves as **No**.", "fine_print": "If Ruto is still President of Kenya but another individual takes over temporarily, this will not count as long as Ruto resumes his duties within 30 days. Longer than 30 days will resolve as **Yes**.\n\nIf Ruto ceases to be President and takes another role such as Prime Minister, this question resolves as **Yes**.\n\nIn cases of severe uncertainty causing disagreement among credible sources, Admins will refer to the [UN Heads of State list](https://www.un.org/dgacm/en/content/protocol/hshgnfa) to see if Ruto's name is still listed as President of the Republic of Kenya.", "post_id": 26304, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721485341.683282, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.4 ], "interval_upper_bounds": [ 0.5 ] } ], "latest": { "start_time": 1721485341.683282, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.4 ], "interval_upper_bounds": [ 0.5 ], "forecast_values": [ 0.6, 0.4 ], "means": [ 0.3841289470599841 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.19073805166550978, 0.0, 0.0, 0.5279760682566618, 0.0, 0.0, 0.0, 0.0, 0.6308407491715394, 0.24311673443421403, 0.14731282932738304, 0.0, 0.02581358824615143, 0.9147633448549574, 0.46866138545748215, 0.141637135227838, 0.0492373430437701, 0.0, 0.09630553750946041, 0.0, 0.0, 0.0, 0.0, 1.1295457557193673, 0.0, 0.0, 0.0, 0.0, 1.545958118332619, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.12856727458371708, 1.4554162487864406, 0.0, 0.0, 0.0, 0.7219796251882977, 0.8455368295694525, 0.0, 0.0, 0.0, 0.0, 0.21572547604369705, 0.0, 0.0, 0.0, 0.0, 0.3058323220141224, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.040463113133740285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 26.303440583379377, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 26.303440583379377 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721485341.72068, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721485341.72068, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7522244456622145, 0.24777555433778548 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 53, "key_factors": [], "is_current_content_translated": false, "description": "After weeks of anti-government protests, the President of Kenya, William Ruto, [fired](https://www.cnn.com/2024/07/11/africa/kenyas-president-fires-entire-cabinet-intl/index.html) almost his entire cabinet, [saying](https://nation.africa/kenya/news/president-ruto-sacks-entire-cabinet-4687068) he was \"listening keenly to what the people of Kenya have said\" as a concession to protestors. This follows several weeks of nationwide protests so intense they Ruto had to be [barricaded](https://www.nytimes.com/2024/07/14/opinion/kenya-protests-politics.html) into his presidential compound. On June 25, 2024, police [opened fire](https://www.reuters.com/world/africa/young-kenyan-tax-protesters-plan-nationwide-demonstrations-2024-06-25/) on protestors attempting to enter the parliament. In total at least [39 people](https://www.economist.com/middle-east-and-africa/2024/07/09/kenyas-deadly-gen-z-protests-could-change-the-country) have been killed. \n\n\nThe protests, which were sparked by unpopular proposed tax hikes. evolved into [demands for Ruto's ouster](https://www.reuters.com/world/africa/kenyan-activists-call-fresh-protests-demanding-rutos-resignation-2024-06-28/). A day after dismissing his cabinet, the police chief of Kenya, Inspector General Japhet Koome, [resigned](https://www.aljazeera.com/news/2024/7/12/kenya-police-chief-resigns-after-criticism-over-protest-crackdown) amid accusations of using excessive force on protestors. \n\nSee Also\n\n- Wikipedia: [Kenya Finance Bill protests](https://en.wikipedia.org/wiki/Kenya_Finance_Bill_protests)<br />\n- The Standard (Kenya newspaper): [Ruto faces tough week amidst calls for his resignation](https://www.standardmedia.co.ke/national/article/2001499004/ruto-faces-tough-week-amidst-calls-for-his-resignation)<br />\n- The Standard: [Corruption, unemployment, broken pledges, abductions sunk Ruto ship](https://www.standardmedia.co.ke/politics/article/2001498951/corruption-unemployment-broken-pledges-abductions-sunk-ruto-ship)" }, { "id": 26303, "title": "Will a major cyberattack, virus, worm, etc. that uses LLMs in some important way occur before Sept 30, 2024?", "short_title": "", "url_title": "", "slug": "will-a-major-cyberattack-virus-worm-etc-that-uses-llms-in-some-important-way-occur-before-sept-30-2024", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-17T15:00:59.680910Z", "published_at": "2024-07-19T14:30:00Z", "edited_at": "2025-09-05T17:28:53.796380Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-19T14:30:00Z", "comment_count": 53, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-20T14:30:00Z", "scheduled_close_time": "2024-07-20T14:30:00Z", "scheduled_resolve_time": "2024-10-02T14:55:00Z", "actual_resolve_time": "2024-10-02T14:55:00Z", "open_time": "2024-07-19T14:30:00Z", "nr_forecasters": 32, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26303, "title": "Will a major cyberattack, virus, worm, etc. that uses LLMs in some important way occur before Sept 30, 2024?", "created_at": "2024-07-17T15:00:59.680910Z", "open_time": "2024-07-19T14:30:00Z", "cp_reveal_time": "2024-07-20T14:30:00Z", "spot_scoring_time": "2024-07-20T14:30:00Z", "scheduled_resolve_time": "2024-10-02T14:55:00Z", "actual_resolve_time": "2024-10-02T14:55:00Z", "resolution_set_time": "2024-10-02T14:55:00Z", "scheduled_close_time": "2024-07-20T14:30:00Z", "actual_close_time": "2024-07-20T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "As large language models (LLMs) become widely adopted and increasingly powerful, AI and cybersecurity experts are concerned that LLMs could potentially be used in the execution of cyberattacks and the production of viruses and worms.\n\nThis question asks whether a major cyberattack or significant virus or worm that uses LLMs will arise before Sept 30, 2024.\n\nFor the purpose of this question, a cyberattack, worm, or virus, will be considered major if any of the below scenarios occur as a result:\n\nAt least one person is killed\n\nThere is at least 10 million dollars worth of damage\n\na medical facility, government facility, or company with a market value of at least 100 million dollars is unable to continue basic functions at any given point because of such a cyberattack, worm, or virus.", "resolution_criteria": "This question will resolve positively if credible sources report that a major cyberattack, virus, or worm that uses LLMs results in any of the following:\n\nAt least one person is killed\n\nThere is at least 10 million dollars worth of damage\n\na medical facility, government facility, or company with a market value of at least 100 million dollars is unable to continue basic functions at any given point because of such a cyberattack, worm, or virus.\n\nIf none of these scenarios occur before Sept 30, 2024, this question will resolve negatively.", "fine_print": "", "post_id": 26303, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721485462.134442, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.35 ], "interval_upper_bounds": [ 0.6333333333333333 ] } ], "latest": { "start_time": 1721485462.134442, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.35 ], "interval_upper_bounds": [ 0.6333333333333333 ], "forecast_values": [ 0.65, 0.35 ], "means": [ 0.3945414763788928 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.5184797798380378, 0.0, 0.0, 0.9147633448549574, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24153906428984848, 0.14731282932738304, 0.0, 0.38043555332875517, 0.09630553750946041, 0.5388299933143196, 0.0, 0.6939525952509218, 0.12856727458371708, 0.0, 0.08253138866588176, 0.0, 0.8355804261814468, 0.0, 0.0, 0.6802158551343251, 0.3415440718595426, 0.0492373430437701, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5724472223148853, 0.059105746561956225, 0.0, 0.0, 0.0, 0.26388788897601856, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6308407491715394, 0.0, 0.0, 0.7620908974955233, 0.0, 1.282585895673191, 0.0, 0.0, 0.0, 0.019746017729336655, 0.3058323220141224, 0.0, 0.0, 0.0, 0.0, 0.11160910633783082, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16798662710965054, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 37.85116232537298, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 37.85116232537298 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721485462.158037, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721485462.158037, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.6100790203849489, 0.38992097961505107 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 52, "key_factors": [], "is_current_content_translated": false, "description": "As large language models (LLMs) become widely adopted and increasingly powerful, AI and cybersecurity experts are concerned that LLMs could potentially be used in the execution of cyberattacks and the production of viruses and worms.\n\nThis question asks whether a major cyberattack or significant virus or worm that uses LLMs will arise before Sept 30, 2024.\n\nFor the purpose of this question, a cyberattack, worm, or virus, will be considered major if any of the below scenarios occur as a result:\n\nAt least one person is killed\n\nThere is at least 10 million dollars worth of damage\n\na medical facility, government facility, or company with a market value of at least 100 million dollars is unable to continue basic functions at any given point because of such a cyberattack, worm, or virus." }, { "id": 26302, "title": "Will the US see a large-scale riot between July 17, 2024 and Sept 30, 2024?", "short_title": "", "url_title": "", "slug": "will-the-us-see-a-large-scale-riot-between-july-17-2024-and-sept-30-2024", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-17T15:00:59.539346Z", "published_at": "2024-07-19T14:30:00Z", "edited_at": "2025-09-05T17:29:28.143351Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-19T14:30:00Z", "comment_count": 52, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-20T14:30:00Z", "scheduled_close_time": "2024-07-20T14:30:00Z", "scheduled_resolve_time": "2024-10-02T14:53:00Z", "actual_resolve_time": "2024-10-02T14:53:00Z", "open_time": "2024-07-19T14:30:00Z", "nr_forecasters": 32, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26302, "title": "Will the US see a large-scale riot between July 17, 2024 and Sept 30, 2024?", "created_at": "2024-07-17T15:00:59.539346Z", "open_time": "2024-07-19T14:30:00Z", "cp_reveal_time": "2024-07-20T14:30:00Z", "spot_scoring_time": "2024-07-20T14:30:00Z", "scheduled_resolve_time": "2024-10-02T14:53:00Z", "actual_resolve_time": "2024-10-02T14:53:00Z", "resolution_set_time": "2024-10-02T14:53:00Z", "scheduled_close_time": "2024-07-20T14:30:00Z", "actual_close_time": "2024-07-20T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Incidents of civil unrest are not terribly uncommon in the United States, but large-scale rioting is quite rare. One of the most severe incidents in recent history was the 1992 Los Angeles riots, during which more than 60 people were killed, more than 2,350 people were injured, and more than 12,000 people were arrested.\n\nA more recent, albeit less severe, example of a riot (or civil unrest) is the January 6 United States Capitol attack which cuminated in a coordinated attack on the US Capitol building by far-right, pro-Trump militias resulting in 5 deaths and injuries exceeding 100 persons.", "resolution_criteria": "For the purposes of this question, 'large-scale riot' is defined as an event of rioting (large-scale disturbance of the peace by a violent crowd) or civil unrest in which any of the following conditions is met:\n\n>1. At least 50 people die due to violence, either due to the actions of rioters or other civilians, or police, military, national guard, or other law enforcement or government agents.\n\n>2. At least 10,000 people are arrested by police, military, national guard, or other law enforcement or government agents.\n\n>3. Property damages are credibly estimated at $1 billion or more. This estimate may come from either the local government of the locality in which the incident takes place, the state government, the federal government, or a major US news publication such as the New York Times, Washington Post, or major broadcast news networks.\n\nSuch events should take place in a 100km radius, and within 14 days. As an example, the [1992 LA riots](https://en.wikipedia.org/wiki/1992_Los_Angeles_riots) would count, but rioting that is spread across all of California or that occurs in separate phases over a long time period would not count if no single incident met any of the above criteria.\n\nResolution should cite a government statement or credible news reports that indicate that any of the above conditions have been met by an event of rioting.", "fine_print": "", "post_id": 26302, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721485355.652544, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.09699999999999999 ], "interval_upper_bounds": [ 0.26 ] } ], "latest": { "start_time": 1721485355.652544, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.09699999999999999 ], "interval_upper_bounds": [ 0.26 ], "forecast_values": [ 0.903, 0.09699999999999999 ], "means": [ 0.1941916812596606 ], "histogram": [ [ 0.0, 0.0, 1.0, 0.8355804261814468, 0.0, 1.9609915073491333, 0.0, 0.7404338494245358, 0.0, 0.422729524609672, 0.09630553750946041, 0.0, 0.0, 0.0, 0.24311673443421403, 0.6442704837539848, 0.0, 0.0, 0.0, 0.03268672417676853, 0.7133721378374211, 0.0, 0.08991462558617408, 0.0, 0.19073805166550978, 0.0, 0.46866138545748215, 0.0, 0.0, 0.0, 0.27308960725456716, 0.0, 0.0, 0.0094962884186239, 0.0, 0.0, 0.7620908974955233, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.040463113133740285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8483334580807903, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11160910633783082, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3415440718595426, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 85.27978928187711, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 85.27978928187711 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721485355.680705, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721485355.680705, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8931739588927379, 0.10682604110726217 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 50, "key_factors": [], "is_current_content_translated": false, "description": "Incidents of civil unrest are not terribly uncommon in the United States, but large-scale rioting is quite rare. One of the most severe incidents in recent history was the 1992 Los Angeles riots, during which more than 60 people were killed, more than 2,350 people were injured, and more than 12,000 people were arrested.\n\nA more recent, albeit less severe, example of a riot (or civil unrest) is the January 6 United States Capitol attack which cuminated in a coordinated attack on the US Capitol building by far-right, pro-Trump militias resulting in 5 deaths and injuries exceeding 100 persons." }, { "id": 26289, "title": "Five years after AGI, will an AI Windfall Clause have been activated?", "short_title": "5Y after AGI, Windfall Clause activated?", "url_title": "5Y after AGI, Windfall Clause activated?", "slug": "5y-after-agi-windfall-clause-activated", "author_id": 119005, "author_username": "will_aldred", "coauthors": [], "created_at": "2024-07-17T14:16:58.745090Z", "published_at": "2024-07-22T17:30:00Z", "edited_at": "2025-09-08T13:52:55.704376Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-22T17:30:00Z", "comment_count": 1, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "open_time": "2024-07-22T17:30:00Z", "nr_forecasters": 32, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" }, { "id": 15881, "name": "5 Years After AGI", "slug": "5-years-after", "emoji": "⏳🌀", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ], "question_series": [ { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 26289, "title": "Five years after AGI, will an AI Windfall Clause have been activated?", "created_at": "2024-07-17T14:16:58.745090Z", "open_time": "2024-07-22T17:30:00Z", "cp_reveal_time": "2024-07-23T18:30:00Z", "spot_scoring_time": "2024-07-23T18:30:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "actual_close_time": "2036-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In 2020, the Future of Humanity Institute ([RIP](https://static1.squarespace.com/static/660e95991cf0293c2463bcc8/t/661a3fc3cecceb2b8ffce80d/1712996303164/FHI+Final+Report.pdf)) published a report entitled “[The Windfall Clause: Distributing the Benefits of AI for the Common Good.](https://www.fhi.ox.ac.uk/wp-content/uploads/Windfall-Clause-Report.pdf)”. (They also wrote an [abridged version](https://arxiv.org/abs/1912.11595) for publishing in the [Proceedings of AIES](https://dl.acm.org/doi/abs/10.1145/3375627.3375842).) The Windfall Clause proposal expands on an idea from Nick Bostrom’s _[Superintelligence](https://en.wikipedia.org/wiki/Superintelligence:_Paths,_Dangers,_Strategies)_, which says that the company that develops [transformative AI](https://www.metaculus.com/questions/19356/transformative-ai-date/) could hugely raise the global standard of living, potentially achieving things like poverty eradication, by distributing a fraction of their profits.\n\nThere are some details still to be worked out with the proposal, but expert opinion is that these are unlikely to be blockers: what remains is for the leading AI companies to get on board.\n\n___\n\n(H/T @AABoyles for writing the “[Will Any Major AI Company Commit to an AI Windfall Clause by 2025?](https://www.metaculus.com/questions/4061/will-any-major-ai-company-commit-to-an-ai-windfall-clause-by-2025/)” question, from which this one was adapted.)", "resolution_criteria": "This question will resolve as **Yes** if, within five years of our “[When will artificial general intelligence (AGI) arrive?](https://www.metaculus.com/questions/5121/date-of-artificial-general-intelligence/)” question resolving, an AI company activates a Windfall Clause. In order to count, the AI company must fit in one of these categories:\n\n* It is the leading AI company in the world, by valuation. To count, the company can be “leading” at any time in the five-year period. \n* It is at least 50% as large as the leading AI company at the time, by valuation. (If there is ambiguity over whether a company meets this 50% threshold, Metaculus admins will make a ruling.)\n\nThis question is conditional on humans not having gone extinct.\n\nFor the purposes of this question:\n\n* A Windfall Clause is any legally-binding agreement which caps profits resulting from a new technological innovation at any value less than 50% of [gross world product](https://en.wikipedia.org/wiki/Gross_world_product).\n* A company is considered to have “activated” its Windfall Clause if it has distributed its profits in line with its Windfall Clause for a period of 12 months or longer, according to [credible](https://www.metaculus.com/help/faq/#definitions) reporting. In the case that a company is above the 50% threshold at one point but then later falls below that (i.e., the company was counting for this question, and then stopped counting), this distribution of profits must start no more than 6 months after the company stopped counting. If there is ambiguity over whether either the 12-month or 6-month requirements have been met, Metaculus admins will make a ruling.\n\nAdmins will wait up to 18 months beyond the “Five years after our ‘[When will AGI arrive?](https://www.metaculus.com/questions/5121/date-of-artificial-general-intelligence/)’ question resolves” date to determine this question’s resolution, if needed.", "fine_print": "If a given company is publicly traded, valuation will be taken to be its market capitalization as reported by [CompaniesMarketCap](https://companiesmarketcap.com/), or by another [credible source](https://www.metaculus.com/help/faq/#definitions) if that site is unavailable. If a given company is privately held, valuation for the purposes of this question will be assessed based on credible reporting, credible valuation estimates such as [funding rounds](https://www.investopedia.com/articles/personal-finance/102015/series-b-c-funding-what-it-all-means-and-how-it-works.asp), and/or [other accepted methods](https://www.investopedia.com/terms/v/valuation.asp). In the event of ambiguity over a company’s valuation, Metaculus admins will have the final say.\n\nIf AGI results in human extinction (in the full sense of there not being any humans in existence) then this question will be **Annulled**. In other words, for this question we would like forecasters to condition on extinction not happening. (Note, though, that we do not want forecasters to condition on other types of existential catastrophe, like dystopia, not happening.)\n\nIf our “When will AGI arrive?” question resolves as \"Not ≤ Dec 25, 2199,\" this question will be Annulled.", "post_id": 26289, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1758075602.931768, "end_time": 1765212880.957973, "forecaster_count": 23, "interval_lower_bounds": [ 0.003 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.06 ] } ], "latest": { "start_time": 1758075602.931768, "end_time": 1765212880.957973, "forecaster_count": 23, "interval_lower_bounds": [ 0.003 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.06 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.05544668064404384 ], "histogram": [ [ 2.438401549008769, 1.6188580606658507, 0.36520009566972245, 0.0339924118233929, 0.4512058906735414, 1.1639704137555005, 0.8977614653534849, 0.3484801054844811, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5103156119575218, 0.0, 0.0, 0.0, 0.0, 0.1164748146920504, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1659893709142453, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289090.759375, "end_time": null, "forecaster_count": 22, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289090.759375, "end_time": null, "forecaster_count": 22, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9962802421721016, 0.0037197578278983004 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 2, "user_vote": null }, "forecasts_count": 52, "key_factors": [], "is_current_content_translated": false, "description": "In 2020, the Future of Humanity Institute ([RIP](https://static1.squarespace.com/static/660e95991cf0293c2463bcc8/t/661a3fc3cecceb2b8ffce80d/1712996303164/FHI+Final+Report.pdf)) published a report entitled “[The Windfall Clause: Distributing the Benefits of AI for the Common Good.](https://www.fhi.ox.ac.uk/wp-content/uploads/Windfall-Clause-Report.pdf)”. (They also wrote an [abridged version](https://arxiv.org/abs/1912.11595) for publishing in the [Proceedings of AIES](https://dl.acm.org/doi/abs/10.1145/3375627.3375842).) The Windfall Clause proposal expands on an idea from Nick Bostrom’s _[Superintelligence](https://en.wikipedia.org/wiki/Superintelligence:_Paths,_Dangers,_Strategies)_, which says that the company that develops [transformative AI](https://www.metaculus.com/questions/19356/transformative-ai-date/) could hugely raise the global standard of living, potentially achieving things like poverty eradication, by distributing a fraction of their profits.\n\nThere are some details still to be worked out with the proposal, but expert opinion is that these are unlikely to be blockers: what remains is for the leading AI companies to get on board.\n\n___\n\n(H/T @AABoyles for writing the “[Will Any Major AI Company Commit to an AI Windfall Clause by 2025?](https://www.metaculus.com/questions/4061/will-any-major-ai-company-commit-to-an-ai-windfall-clause-by-2025/)” question, from which this one was adapted.)" }, { "id": 26286, "title": "Five years after AGI, will nuclear deterrence no longer hold?", "short_title": "5Y after AGI, nuclear deterrence undermined?", "url_title": "5Y after AGI, nuclear deterrence undermined?", "slug": "5y-after-agi-nuclear-deterrence-undermined", "author_id": 119005, "author_username": "will_aldred", "coauthors": [], "created_at": "2024-07-17T12:18:42.764025Z", "published_at": "2024-07-22T17:30:00Z", "edited_at": "2025-10-20T08:40:57.803347Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-22T17:30:00Z", "comment_count": 10, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "open_time": "2024-07-22T17:30:00Z", "nr_forecasters": 34, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" }, { "id": 15881, "name": "5 Years After AGI", "slug": "5-years-after", "emoji": "⏳🌀", "type": "topic" }, { "id": 15868, "name": "Nuclear Technology & Risks", "slug": "nuclear", "emoji": "☣️", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3690, "name": "Nuclear Technology & Risks", "slug": "nuclear", "emoji": "☢️", "description": "Nuclear Technology & Risks", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 26286, "title": "Five years after AGI, will nuclear deterrence no longer hold?", "created_at": "2024-07-17T12:18:42.764025Z", "open_time": "2024-07-22T17:30:00Z", "cp_reveal_time": "2024-07-23T18:30:00Z", "spot_scoring_time": "2024-07-23T18:30:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "actual_close_time": "2036-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "The world’s first nuclear test detonation, the “Trinity” test, happened on July 16, 1945. To date, only two nuclear bombs have been used in war, both of these by the United States, within a month of the Trinity test, against the Japanese cities of Hiroshima and Nagasaki. These bombs killed an estimated 350,000 civilians.\n\n[Deterrence theory](https://en.wikipedia.org/wiki/Deterrence_theory) says that the (relative) world peace of the past 80 years, the [most peaceful years](https://en.wikipedia.org/wiki/Long_Peace) in human history, is a result of nuclear deterrence.\n\n(Note: Deterrence theory is contested, especially by anti-nuclear advocates, and there is no expert consensus on to what extent deterrence explains the peace that has occurred.)\n\nWith the arrival of AGI and the technological advances it precipitates, however, the nuclear deterrence period may come to an end. From Leopold Aschenbrenner’s “[Situational Awareness: The Decade Ahead](https://situational-awareness.ai/)” report:\n\n> The military advantage would be decisive even against nuclear deterrents.\n> \n> To be even clearer: it seems likely the advantage conferred by superintelligence would be decisive enough even to preemptively take out an adversary’s nuclear deterrent. Improved sensor networks and analysis could locate even the quietest current nuclear submarines (similarly for mobile missile launchers). Millions or billions of mouse-sized situational awareness 130 autonomous drones, with advances in stealth, could infiltrate behind enemy lines and then surreptitiously locate, sabotage, and decapitate the adversary’s nuclear forces. Improved sensors, targeting, and so on could dramatically improve missile defense (similar to, say, the Iran vs. Israel example above); moreover, if there is an industrial explosion, robot factories could churn out thousands of interceptors for each opposing missile. And all of this is without even considering completely new scientific and technological paradigms (e.g., remotely deactivating all the nukes).\n\n“Superintelligence,” which Aschenbrenner talks about, is most commonly defined as artificial intelligence that is better than the best human experts in all domains. Economist Tom Davidson [estimates](https://www.astralcodexten.com/p/davidson-on-takeoff-speeds#:~:text=fast%2C%20and%20terrifying.-,Specifically%2C%20he%20predicts%20it%20will%20take%20about%20three%20years%20to%20go%20from%20AIs%20that%20can%20do%2020%25%20of%20all%20human%20jobs%20(weighted%20by%20economic%20value)%20to%20AIs%20that%20can%20do%20100%25%2C%20with%20significantly%20superhuman%20AIs%20within%20a%20year%20after%20that.,-As%20penance%20for) that superintelligence will come within a year of AGI (where he defines AGI as AI that can automate 100% of human labor). Meanwhile, the current community prediction on the Metaculus question “[After a (weak) AGI is created, how many months will it be before the first superintelligent AI is created?](https://www.metaculus.com/questions/9062/time-from-weak-agi-to-superintelligence/)” is 40 months.", "resolution_criteria": "This question will resolve as **Yes** if, within five years of our “[When will artificial general intelligence (AGI) arrive?](https://www.metaculus.com/questions/5121/date-of-artificial-general-intelligence/)” question resolving, and conditional on humans not having gone extinct, either:\n\ni) Nuclear deterrence is shown to no longer hold, by [credible reporting](https://www.metaculus.com/help/faq/#definitions) that a state has preemptively taken out an adversary’s nuclear deterrent by non-nuclear means.\n\nii) The majority expert view, as determined by a [panel](https://www.metaculus.com/help/faq/#rescouncil) of three Metaculus admins, is that nuclear deterrence no longer holds.\n\nThis panel will convene at the resolution date, or before then upon seeing a relevant credible report. For the convened-before-resolution-date case, a unanimous vote of the panel indicating that “nuclear deterrence no longer holds” is the majority expert view will lead to immediate Yes resolution. In the case that Yes resolution has not occurred prior to the resolution date, a majority vote of the panel will then decide between Yes and **No** resolution.", "fine_print": "If AGI results in human extinction (in the full sense of there not being any humans in existence) then this question will be **Annulled**. In other words, for this question we would like forecasters to condition on extinction not happening. (Note, though, that we do not want forecasters to condition on other types of existential catastrophe, like dystopia, not happening.)\n\nIf our “When will AGI arrive?” question resolves as \"Not ≤ Dec 25, 2199,\" this question will be Annulled.", "post_id": 26286, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1760949647.370759, "end_time": 1763322613.574386, "forecaster_count": 24, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.65 ] } ], "latest": { "start_time": 1760949647.370759, "end_time": 1763322613.574386, "forecaster_count": 24, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.65 ], "forecast_values": [ 0.7, 0.3 ], "means": [ 0.39096000570706696 ], "histogram": null }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728288692.348347, "end_time": null, "forecaster_count": 23, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728288692.348347, "end_time": null, "forecaster_count": 23, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8451465521832213, 0.15485344781677865 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 5, "user_vote": null }, "forecasts_count": 74, "key_factors": [], "is_current_content_translated": false, "description": "The world’s first nuclear test detonation, the “Trinity” test, happened on July 16, 1945. To date, only two nuclear bombs have been used in war, both of these by the United States, within a month of the Trinity test, against the Japanese cities of Hiroshima and Nagasaki. These bombs killed an estimated 350,000 civilians.\n\n[Deterrence theory](https://en.wikipedia.org/wiki/Deterrence_theory) says that the (relative) world peace of the past 80 years, the [most peaceful years](https://en.wikipedia.org/wiki/Long_Peace) in human history, is a result of nuclear deterrence.\n\n(Note: Deterrence theory is contested, especially by anti-nuclear advocates, and there is no expert consensus on to what extent deterrence explains the peace that has occurred.)\n\nWith the arrival of AGI and the technological advances it precipitates, however, the nuclear deterrence period may come to an end. From Leopold Aschenbrenner’s “[Situational Awareness: The Decade Ahead](https://situational-awareness.ai/)” report:\n\n> The military advantage would be decisive even against nuclear deterrents.\n> \n> To be even clearer: it seems likely the advantage conferred by superintelligence would be decisive enough even to preemptively take out an adversary’s nuclear deterrent. Improved sensor networks and analysis could locate even the quietest current nuclear submarines (similarly for mobile missile launchers). Millions or billions of mouse-sized situational awareness 130 autonomous drones, with advances in stealth, could infiltrate behind enemy lines and then surreptitiously locate, sabotage, and decapitate the adversary’s nuclear forces. Improved sensors, targeting, and so on could dramatically improve missile defense (similar to, say, the Iran vs. Israel example above); moreover, if there is an industrial explosion, robot factories could churn out thousands of interceptors for each opposing missile. And all of this is without even considering completely new scientific and technological paradigms (e.g., remotely deactivating all the nukes).\n\n“Superintelligence,” which Aschenbrenner talks about, is most commonly defined as artificial intelligence that is better than the best human experts in all domains. Economist Tom Davidson [estimates](https://www.astralcodexten.com/p/davidson-on-takeoff-speeds#:~:text=fast%2C%20and%20terrifying.-,Specifically%2C%20he%20predicts%20it%20will%20take%20about%20three%20years%20to%20go%20from%20AIs%20that%20can%20do%2020%25%20of%20all%20human%20jobs%20(weighted%20by%20economic%20value)%20to%20AIs%20that%20can%20do%20100%25%2C%20with%20significantly%20superhuman%20AIs%20within%20a%20year%20after%20that.,-As%20penance%20for) that superintelligence will come within a year of AGI (where he defines AGI as AI that can automate 100% of human labor). Meanwhile, the current community prediction on the Metaculus question “[After a (weak) AGI is created, how many months will it be before the first superintelligent AI is created?](https://www.metaculus.com/questions/9062/time-from-weak-agi-to-superintelligence/)” is 40 months." }, { "id": 26272, "title": "Will the winning bot in any Quarterly AI Benchmarking tournament beat the human Pro aggregate before Q3 of 2025?", "short_title": "Winning bot beat the Pro aggregate", "url_title": "Winning bot beat the Pro aggregate", "slug": "winning-bot-beat-the-pro-aggregate", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-16T15:47:55.461292Z", "published_at": "2024-07-20T12:00:00Z", "edited_at": "2025-09-05T17:29:23.942456Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-20T12:00:00Z", "comment_count": 42, "status": "resolved", "resolved": true, "actual_close_time": "2025-06-30T16:00:00Z", "scheduled_close_time": "2025-06-30T16:00:00Z", "scheduled_resolve_time": "2025-07-10T16:00:00Z", "actual_resolve_time": "2025-07-20T10:21:00Z", "open_time": "2024-07-20T12:00:00Z", "nr_forecasters": 42, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32601, "name": "2024-2025 Leaderboard", "slug": "2024_2025_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3686, "name": "Metaculus", "slug": "metaculus", "emoji": "🔮", "description": "Metaculus", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 26272, "title": "Will the winning bot in any Quarterly AI Benchmarking tournament beat the human Pro aggregate before Q3 of 2025?", "created_at": "2024-07-16T15:47:55.461292Z", "open_time": "2024-07-20T12:00:00Z", "cp_reveal_time": "2024-07-23T12:00:00Z", "spot_scoring_time": "2024-07-23T12:00:00Z", "scheduled_resolve_time": "2025-07-10T16:00:00Z", "actual_resolve_time": "2025-07-20T10:21:00Z", "resolution_set_time": "2025-07-20T10:22:51.698899Z", "scheduled_close_time": "2025-06-30T16:00:00Z", "actual_close_time": "2025-06-30T16:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": false, "open_lower_bound": false, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": false, "open_lower_bound": false, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "The [Q3 AI Forecasting Benchmark Tournament](https://www.metaculus.com/project/aibq3/) is the first of four $30,000 quarterly tournaments in a $120,000 series designed to benchmark the state of the art in AI forecasting and compare it to the best human forecasting on real-world questions.\n\nTen Pro forecasters will forecast on at least 100 of the questions in the Bot tournament.", "resolution_criteria": "This question will resolve Yes if the winning bot in any of the first four quarterly AI Benchmarking tournaments beats the Pro aggregate. Spot scoring (only the last forecast counts) and average [Baseline](https://www.metaculus.com/help/scores-faq/#baseline-score) score will be used to measure accuracy. Only questions that both Pros and the winning bot forecast on will be included.", "fine_print": "The \"winning bot\" will be considered the bot that achieves first place in the tournament based on the scoring methodology currently in use for that tournament (for example, if the tournament scoring methodology changes it will be the top bot using the latest official methodology during that quarter).", "post_id": 26272, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1750728431.987954, "end_time": null, "forecaster_count": 38, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.19 ], "interval_upper_bounds": [ 0.3 ] } ], "latest": { "start_time": 1750728431.987954, "end_time": null, "forecaster_count": 38, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.19 ], "interval_upper_bounds": [ 0.3 ], "forecast_values": [ 0.81, 0.19 ], "means": [ 0.19927586440034056 ], "histogram": [ [ 0.0, 0.0, 0.0, 1.8279561598353582, 0.0, 0.985695975410994, 0.0, 0.0, 0.31210550135292425, 0.0, 0.45875031368367064, 0.0, 0.02435728586146805, 0.0, 0.41773359555817, 0.1654381517936327, 0.6571444928584438, 0.0, 0.25446741675480405, 0.716439045345342, 0.015538817933844495, 0.0, 0.6019627254324547, 0.7426989935325568, 0.0, 0.7478894959041236, 0.0, 0.0, 0.0, 0.0, 1.1148171974242467, 0.0, 0.0, 0.0, 0.0, 0.5695586864280762, 0.0, 0.0, 0.0, 0.0, 0.5029882706621958, 0.0, 0.3445918901109067, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09706889658074358, 0.1011216878911307, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08867683223307754, 0.0, 0.0, 0.029639045211680565, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.011886366648789067, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.049680819655039385, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "baseline_score": 60.473009364854704, "peer_score": 14.701337596154936, "coverage": 0.9995886536878007, "relative_legacy_score": 0.0, "weighted_coverage": 0.9995886536878007, "spot_peer_score": 8.844138975362698, "spot_baseline_score": 84.79969065549501, "baseline_archived_score": 60.473009364854704, "peer_archived_score": 14.701337596154936, "relative_legacy_archived_score": 0.0, "spot_peer_archived_score": 8.844138975362698, "spot_baseline_archived_score": 84.79969065549501 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287533.687586, "end_time": null, "forecaster_count": 26, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287533.687586, "end_time": null, "forecaster_count": 26, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9017708481112512, 0.0982291518887488 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 9, "user_vote": null }, "forecasts_count": 148, "key_factors": [], "is_current_content_translated": false, "description": "The [Q3 AI Forecasting Benchmark Tournament](https://www.metaculus.com/project/aibq3/) is the first of four $30,000 quarterly tournaments in a $120,000 series designed to benchmark the state of the art in AI forecasting and compare it to the best human forecasting on real-world questions.\n\nTen Pro forecasters will forecast on at least 100 of the questions in the Bot tournament." }, { "id": 26268, "title": "Five years after AGI, will AI philosophical competence be solved?", "short_title": "5Y after AGI, AI philosophical competence?", "url_title": "5Y after AGI, AI philosophical competence?", "slug": "5y-after-agi-ai-philosophical-competence", "author_id": 119005, "author_username": "will_aldred", "coauthors": [], "created_at": "2024-07-16T11:51:39.106357Z", "published_at": "2024-07-23T00:42:00Z", "edited_at": "2025-10-18T09:49:25.670095Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-23T00:42:00Z", "comment_count": 26, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "open_time": "2024-07-23T00:42:00Z", "nr_forecasters": 44, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" }, { "id": 15881, "name": "5 Years After AGI", "slug": "5-years-after", "emoji": "⏳🌀", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 26268, "title": "Five years after AGI, will AI philosophical competence be solved?", "created_at": "2024-07-16T11:51:39.106357Z", "open_time": "2024-07-23T00:42:00Z", "cp_reveal_time": "2024-07-23T18:30:00Z", "spot_scoring_time": "2024-07-23T18:30:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "actual_close_time": "2036-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "To date, most AI safety discussion has revolved around avoiding i) catastrophic misuse of advanced AI by bad human actors, and ii) [AI takeover](https://en.wikipedia.org/wiki/AI_takeover).\n\nThis could be a mistake.\n\nThere may be other, less obvious failure modes which are equally catastrophic, or even more so. One such potential failure mode is an AI-fueled [suffering catastrophe](https://en.wikipedia.org/wiki/Suffering_risks). Another—the topic of this question—is philosophical incompetence. The general idea is that we may get into a situation in which we believe we have [aligned](https://ai-alignment.com/clarifying-ai-alignment-cec47cd69dd6) [superintelligent](https://en.wikipedia.org/wiki/Superintelligence) advisors whom we can rely on, but, in actuality, on account of how the AI training process works, these “superintelligences” are philosophically incompetent and thus lead us into moral catastrophe, pursuing actions that destroy humanity’s long-term potential.\n\nAI philosophical competence was established as a field of study by [Wei Dai](https://en.wikipedia.org/wiki/Wei_Dai), inventor of b-money (which [makes him](https://en.wikipedia.org/wiki/Wei_Dai#:~:text=There%20has%20been%20much%20speculation%20as%20to%20the%20identity%20of%20Satoshi%20Nakamoto%2C%20with%20suspects%20including%20Wei%20Dai%20himself%2C) one of few candidates for being Bitcoin’s [elusive creator](https://en.wikipedia.org/wiki/Satoshi_Nakamoto#:~:text=In%20a%20May%202011%20article%2C%20Szabo%20said%20of%20Bitcoin%27s%20creator%3A%20%22Myself%2C%20Wei%20Dai%2C%20and%20Hal%20Finney%20were%20the%20only%20people%20I%20know%20of%20who%20liked%20the%20idea%20(or%20in%20Dai%27s%20case%20his%20related%20idea)%20enough%20to%20pursue%20it%20to%20any%20significant%20extent%20until%20Nakamoto%20(assuming%20Nakamoto%20is%20not%20really%20Finney%20or%20Dai).%22), though he denies this identification) and a leading contributor to modern anthropics and decision theory. Dai’s writings on AI philosophical competence (and the closely related topic of metaphilosophy) have been collected [here](https://forum.effectivealtruism.org/posts/4xwWDLfMenw48TR8c/long-reflection-reading-list#Metaphilosophy__AI_philosophical_competence); a fuller list of Dai’s writings can be found [here](https://www.lesswrong.com/users/wei-dai). Some relevant excerpts:\n\n> It appears that achieving a good long term future requires getting a lot of philosophical questions right that are hard for us to answer. Given this, [initially](https://www.lesswrong.com/posts/vrnhfGuYTww3fKhAM/three-approaches-to-friendliness) I thought there are only three ways for AI to go right in this regard (assuming everything else goes well with the AI):\n> \n> 1. We solve all the important philosophical problems ahead of time and program the solutions into the AI.\n> \n> 2. We solve metaphilosophy (i.e., understand philosophical reasoning as well as we understand mathematical reasoning) and program that into the AI so it can solve philosophical problems on its own.\n> \n> 3. We program the AI to learn philosophical reasoning from humans or use human simulations to solve philosophical problems.\n> \n> Since then people have come up with a couple more scenarios (which did make me *slightly* more optimistic about this problem):\n> \n> 4. We all coordinate to stop technological progress some time after AI but before space colonization, and have a period of long reflection where humans, maybe with help from AIs, spend thousands or millions of years to solve philosophical problems.\n> \n> 5. We program AIs to be corrigible to their users, some users care about getting philosophy correct so the AIs help keep them safe and get their \"fair share\" of the universe until philosophical problems are solved eventually, enough users care about this so that we end up with a mostly good future, and lack of philosophical knowledge doesn't cause disaster in the meantime. (My writings on \"human safety problems\" were in part a response to this suggestion, outlining how hard it would be to keep humans \"safe\" in this scenario.)\n> \n> The overall argument is that, given [human safety problems](https://www.lesswrong.com/posts/HTgakSs6JpnogD6c2/two-neglected-problems-in-human-ai-safety), realistic competitive pressures, difficulties with coordination, etc., it seems hard to end up in any of these scenarios and not have something go wrong along the way. Maybe another way to put this is, given philosophical difficulties, the target we'd have to hit with AI is even smaller than it might otherwise appear.<br/>\n> [—*The Argument from Philosophical Difficulty*](https://www.lesswrong.com/posts/w6d7XBCegc96kz4n3/the-argument-from-philosophical-difficulty)\n> ___\n> Of course generating hands is ultimately not a very hard problem. Hand anatomy and its interactions with other objects pose no fundamental mysteries. Bad hands are easy for humans to recognize and therefore we have quick and easy feedback for how well we're solving the problem. We can use our explicit understanding of hands to directly help solve the problem [...], or just provide the AI with more high quality training data (physically taking more photos of hands if needed) until it recognizably fixed itself.\n> \n> **What about philosophy?** Well, scarcity of existing high quality training data, check. Lots of unhelpful data labeled \"philosophy\", check. Low proportion of philosophy in the training data, check. Quick and easy to generate more high quality data, no. Good explicit understanding of the principles involved, no. Easy to recognize how well the problem is being solved, no. It looks like with philosophy we've got many of the factors that make hand generation a hard problem for now, and none of the factors that make it probably not that hard in the longer run.\n> \n> In a parallel universe with a saner civilization, there must be tons of philosophy professors workings [*sic*] with tons of AI researchers to try to improve AI's philosophical reasoning. They're probably going on TV and talking about 养兵千日,用兵一时 (feed an army for a thousand days, use it for an hour) or how proud they are to contribute to our civilization's existential safety at this critical time. There are probably massive prizes set up to encourage public contribution, just in case anyone had a promising out of the box idea (and of course with massive associated infrastructure to filter out the inevitable deluge of bad ideas). Maybe there are extensive debates and proposals about pausing or slowing down AI development until [metaphilosophical](https://www.lesswrong.com/tag/meta-philosophy) research catches up.\n> \n> In the meantime, back in our world, there's one person, [self-taught](https://forum.effectivealtruism.org/posts/ytBxJpQsdEEmPAv9F/i-m-interviewing-carl-shulman-what-should-i-ask-him#oZpuzKqLukyESmJ4o) in AI and philosophy, writing about a crude analogy between different AI capabilities. In the meantime, there are more people visibly working to improve AI's hand generation than AI's philosophical reasoning.<br/>\n> [—*AI doing philosophy = AI generating hands?*](https://forum.effectivealtruism.org/posts/axSfJXriBWEixsHGR/ai-doing-philosophy-ai-generating-hands)", "resolution_criteria": "This question resolves as **Yes** if, by the day five years after our “[When will artificial general intelligence (AGI) arrive?](https://www.metaculus.com/questions/5121/date-of-artificial-general-intelligence/)” question resolves, Wei Dai has indicated that he believes the problem of AI philosophical competence has been solved. Dai would most likely indicate this in [his publicly available writings](https://www.lesswrong.com/users/wei-dai). The question resolves as **No**, otherwise.\n\nThis question is conditional on humans not having gone extinct.", "fine_print": "If Dai is no longer available to resolve this question, Metaculus admins will either rework the resolution (e.g., to be about the statements of other experts) or rule this question be **Annulled**. By default, in the case that Dai is unavailable, this question will be Annulled unless there is agreement between the top two leading thinkers in the field, as determined by Metaculus admins, on whether the problem has been solved.\n\nIf AGI results in human extinction (in the full sense of there not being any humans in existence) then this question will be Annulled. In other words, for this question we would like forecasters to condition on extinction not happening. (Note, though, that we do not want forecasters to condition on other types of existential catastrophe, like dystopia, not happening.)\n\nIf our “When will AGI arrive?” question resolves as \"Not ≤ Dec 25, 2199,\" this question will be Annulled.", "post_id": 26268, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1758675723.09752, "end_time": 1765209547.186148, "forecaster_count": 35, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.11 ], "interval_upper_bounds": [ 0.34 ] } ], "latest": { "start_time": 1758675723.09752, "end_time": 1765209547.186148, "forecaster_count": 35, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.11 ], "interval_upper_bounds": [ 0.34 ], "forecast_values": [ 0.89, 0.11 ], "means": [ 0.24143184395157366 ], "histogram": null }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289640.066777, "end_time": null, "forecaster_count": 24, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289640.066777, "end_time": null, "forecaster_count": 24, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8231879730807637, 0.17681202691923623 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 4, "user_vote": null }, "forecasts_count": 98, "key_factors": [], "is_current_content_translated": false, "description": "To date, most AI safety discussion has revolved around avoiding i) catastrophic misuse of advanced AI by bad human actors, and ii) [AI takeover](https://en.wikipedia.org/wiki/AI_takeover).\n\nThis could be a mistake.\n\nThere may be other, less obvious failure modes which are equally catastrophic, or even more so. One such potential failure mode is an AI-fueled [suffering catastrophe](https://en.wikipedia.org/wiki/Suffering_risks). Another—the topic of this question—is philosophical incompetence. The general idea is that we may get into a situation in which we believe we have [aligned](https://ai-alignment.com/clarifying-ai-alignment-cec47cd69dd6) [superintelligent](https://en.wikipedia.org/wiki/Superintelligence) advisors whom we can rely on, but, in actuality, on account of how the AI training process works, these “superintelligences” are philosophically incompetent and thus lead us into moral catastrophe, pursuing actions that destroy humanity’s long-term potential.\n\nAI philosophical competence was established as a field of study by [Wei Dai](https://en.wikipedia.org/wiki/Wei_Dai), inventor of b-money (which [makes him](https://en.wikipedia.org/wiki/Wei_Dai#:~:text=There%20has%20been%20much%20speculation%20as%20to%20the%20identity%20of%20Satoshi%20Nakamoto%2C%20with%20suspects%20including%20Wei%20Dai%20himself%2C) one of few candidates for being Bitcoin’s [elusive creator](https://en.wikipedia.org/wiki/Satoshi_Nakamoto#:~:text=In%20a%20May%202011%20article%2C%20Szabo%20said%20of%20Bitcoin%27s%20creator%3A%20%22Myself%2C%20Wei%20Dai%2C%20and%20Hal%20Finney%20were%20the%20only%20people%20I%20know%20of%20who%20liked%20the%20idea%20(or%20in%20Dai%27s%20case%20his%20related%20idea)%20enough%20to%20pursue%20it%20to%20any%20significant%20extent%20until%20Nakamoto%20(assuming%20Nakamoto%20is%20not%20really%20Finney%20or%20Dai).%22), though he denies this identification) and a leading contributor to modern anthropics and decision theory. Dai’s writings on AI philosophical competence (and the closely related topic of metaphilosophy) have been collected [here](https://forum.effectivealtruism.org/posts/4xwWDLfMenw48TR8c/long-reflection-reading-list#Metaphilosophy__AI_philosophical_competence); a fuller list of Dai’s writings can be found [here](https://www.lesswrong.com/users/wei-dai). Some relevant excerpts:\n\n> It appears that achieving a good long term future requires getting a lot of philosophical questions right that are hard for us to answer. Given this, [initially](https://www.lesswrong.com/posts/vrnhfGuYTww3fKhAM/three-approaches-to-friendliness) I thought there are only three ways for AI to go right in this regard (assuming everything else goes well with the AI):\n> \n> 1. We solve all the important philosophical problems ahead of time and program the solutions into the AI.\n> \n> 2. We solve metaphilosophy (i.e., understand philosophical reasoning as well as we understand mathematical reasoning) and program that into the AI so it can solve philosophical problems on its own.\n> \n> 3. We program the AI to learn philosophical reasoning from humans or use human simulations to solve philosophical problems.\n> \n> Since then people have come up with a couple more scenarios (which did make me *slightly* more optimistic about this problem):\n> \n> 4. We all coordinate to stop technological progress some time after AI but before space colonization, and have a period of long reflection where humans, maybe with help from AIs, spend thousands or millions of years to solve philosophical problems.\n> \n> 5. We program AIs to be corrigible to their users, some users care about getting philosophy correct so the AIs help keep them safe and get their \"fair share\" of the universe until philosophical problems are solved eventually, enough users care about this so that we end up with a mostly good future, and lack of philosophical knowledge doesn't cause disaster in the meantime. (My writings on \"human safety problems\" were in part a response to this suggestion, outlining how hard it would be to keep humans \"safe\" in this scenario.)\n> \n> The overall argument is that, given [human safety problems](https://www.lesswrong.com/posts/HTgakSs6JpnogD6c2/two-neglected-problems-in-human-ai-safety), realistic competitive pressures, difficulties with coordination, etc., it seems hard to end up in any of these scenarios and not have something go wrong along the way. Maybe another way to put this is, given philosophical difficulties, the target we'd have to hit with AI is even smaller than it might otherwise appear.<br/>\n> [—*The Argument from Philosophical Difficulty*](https://www.lesswrong.com/posts/w6d7XBCegc96kz4n3/the-argument-from-philosophical-difficulty)\n> ___\n> Of course generating hands is ultimately not a very hard problem. Hand anatomy and its interactions with other objects pose no fundamental mysteries. Bad hands are easy for humans to recognize and therefore we have quick and easy feedback for how well we're solving the problem. We can use our explicit understanding of hands to directly help solve the problem [...], or just provide the AI with more high quality training data (physically taking more photos of hands if needed) until it recognizably fixed itself.\n> \n> **What about philosophy?** Well, scarcity of existing high quality training data, check. Lots of unhelpful data labeled \"philosophy\", check. Low proportion of philosophy in the training data, check. Quick and easy to generate more high quality data, no. Good explicit understanding of the principles involved, no. Easy to recognize how well the problem is being solved, no. It looks like with philosophy we've got many of the factors that make hand generation a hard problem for now, and none of the factors that make it probably not that hard in the longer run.\n> \n> In a parallel universe with a saner civilization, there must be tons of philosophy professors workings [*sic*] with tons of AI researchers to try to improve AI's philosophical reasoning. They're probably going on TV and talking about 养兵千日,用兵一时 (feed an army for a thousand days, use it for an hour) or how proud they are to contribute to our civilization's existential safety at this critical time. There are probably massive prizes set up to encourage public contribution, just in case anyone had a promising out of the box idea (and of course with massive associated infrastructure to filter out the inevitable deluge of bad ideas). Maybe there are extensive debates and proposals about pausing or slowing down AI development until [metaphilosophical](https://www.lesswrong.com/tag/meta-philosophy) research catches up.\n> \n> In the meantime, back in our world, there's one person, [self-taught](https://forum.effectivealtruism.org/posts/ytBxJpQsdEEmPAv9F/i-m-interviewing-carl-shulman-what-should-i-ask-him#oZpuzKqLukyESmJ4o) in AI and philosophy, writing about a crude analogy between different AI capabilities. In the meantime, there are more people visibly working to improve AI's hand generation than AI's philosophical reasoning.<br/>\n> [—*AI doing philosophy = AI generating hands?*](https://forum.effectivealtruism.org/posts/axSfJXriBWEixsHGR/ai-doing-philosophy-ai-generating-hands)" }, { "id": 26267, "title": "By five years after AGI, will nuclear fusion provide >10% of the world’s energy?", "short_title": "5Y after AGI, nuclear fusion 10% world energy", "url_title": "5Y after AGI, nuclear fusion 10% world energy", "slug": "5y-after-agi-nuclear-fusion-10-world-energy", "author_id": 119005, "author_username": "will_aldred", "coauthors": [], "created_at": "2024-07-16T10:51:48.544586Z", "published_at": "2024-07-22T17:30:00Z", "edited_at": "2025-10-31T22:52:50.204434Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-22T17:30:00Z", "comment_count": 5, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "open_time": "2024-07-22T17:30:00Z", "nr_forecasters": 66, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" }, { "id": 15881, "name": "5 Years After AGI", "slug": "5-years-after", "emoji": "⏳🌀", "type": "topic" }, { "id": 15868, "name": "Nuclear Technology & Risks", "slug": "nuclear", "emoji": "☣️", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3690, "name": "Nuclear Technology & Risks", "slug": "nuclear", "emoji": "☢️", "description": "Nuclear Technology & Risks", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 26267, "title": "By five years after AGI, will nuclear fusion provide >10% of the world’s energy?", "created_at": "2024-07-16T10:51:48.544586Z", "open_time": "2024-07-22T17:30:00Z", "cp_reveal_time": "2024-07-23T18:30:00Z", "spot_scoring_time": "2024-07-23T18:30:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "actual_close_time": "2036-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In 2001, nuclear fission power plants generated a record [6.6% of the world's primary energy](https://ourworldindata.org/explorers/energy?tab=chart&facet=none&country=~OWID_WRL&Total+or+Breakdown=Select+a+source&Select+a+source=Nuclear&Energy+or+Electricity=Primary+energy&Metric=Share+of+total), though total production has somewhat declined since then as the world's total energy demand has increased. [Nuclear fusion](https://en.wikipedia.org/wiki/Fusion_power) is an entirely different physical reaction which has been actively investigated since the 1940s. Fusion power has several potential advantages over fission: lower accident risk, less radioactive waste, and cheaper fuel. However, all reactor designs tested as of 2024 require more energy to operate than the amount of energy they produce.\n\n>The primary challenge is that while it's relatively straightforward to make fusion happen—we did it all the time with thermonuclear weapons—it's much more difficult to make the reaction slow and controlled while extracting useful energy from it. [—Sutter (2024)](https://www.space.com/when-will-we-achieve-fusion-power#:~:text=The%20primary%20challenge%20is%20that%20while%20it%27s%20relatively%20straightforward%20to%20make%20fusion%20happen%20%E2%80%94%20we%20did%20it%20all%20the%20time%20with%20thermonuclear%20weapons%20%E2%80%94%20it%27s%20much%20more%20difficult%20to%20make%20the%20reaction%20slow%20and%20controlled%20while%20extracting%20useful%20energy%20from%20it.)\n\nDavid Kirtley, CEO of [Helion](https://www.helionenergy.com/), a startup that aims to produce fusion energy, said to [Forbes in January 2022](https://www.forbes.com/sites/christopherhelman/2022/01/02/fueled-by-billionaire-dollars-nuclear-fusion-enters-a-new-age/?sh=1da1351629f3): “In 10 years we will have commercial electricity for sale, for sure.” In the same article, Forbes quotes [Commonwealth Fusion Systems](https://cfs.energy/) CEO Bob Mumgaard, who predicts “a working reactor in 6 years.”\n\nLater in 2022, a research team within leading AGI company DeepMind (now Google DeepMind) [successfully controlled](https://deepmind.google/discover/blog/accelerating-fusion-science-through-learned-plasma-control/) the nuclear plasma in a fusion reaction chamber with deep reinforcement learning. This was arguably a key breakthrough towards making fusion energy a reality.\n\nIn November 2021, [Helion raised $500 million](https://www.bloomberg.com/news/articles/2021-11-05/thiel-backed-helion-targets-nuclear-fusion-breakthrough-by-2024) in funding, with commitments for another $1.7 billion linked to certain performance milestones. According to Bloomberg, Helion set a goal to achieve net electricity from fusion in 2024. They do not appear to have achieved that goal yet, halfway through 2024, though on June 5 it was announced that Helion had partnered with OpenAI—another leading AGI company—to “power superhuman AI” ([Cuthbertson, 2024](https://www.independent.co.uk/tech/openai-nuclear-fusion-energy-ai-b2557064.html)).\n\nIn October 2021, the [US Energy Information Agency](https://www.eia.gov/todayinenergy/detail.php?id=49856) projected the world's total primary energy consumption to grow from 601.5 quadrillion [BTUs](https://en.wikipedia.org/wiki/British_thermal_unit) in 2020 to 886.3 quadrillion BTUs in 2050. Most of that growth is expected in non-[OECD](https://en.wikipedia.org/wiki/OECD) Asian countries. Renewable energy is expected to grow from 14.7% of the world's energy in 2020 to 26.5% in 2050, with nuclear fission projected to remain at 4% in the same period.", "resolution_criteria": "This question will resolve as **Yes** if, according to the [BP statistical review of energy](https://www.bp.com/en/global/corporate/energy-economics/statistical-review-of-world-energy.html), the percentage of world primary energy provided by nuclear fusion is >10% for any year between question launch and five years after our “[When will artificial general intelligence (AGI) arrive?](https://www.metaculus.com/questions/5121/date-of-artificial-general-intelligence/)” question resolves. For example, if our AGI question resolves as June 5, 2030, then this question resolves as Yes if nuclear fusion provides >10% of primary energy in any year between 2024 and 2035, inclusive.\n\nThis question is conditional on humans not having gone extinct.", "fine_print": "The reason this question is about any year between 2024 and five years after AGI, rather than just about the year five years after AGI, is that we want this question to resolve as Yes in the case that nuclear fusion takes off but is then obsoleted by an even more advanced power form. Related: “[Five years after AGI, will a Dyson swarm be under construction?](https://www.metaculus.com/questions/26266/5y-after-agi-dyson-swarm-under-construction/)\n\nIf BP discontinues publishing their energy data, Metaculus admins will aim to find another [credible source](https://www.metaculus.com/help/faq/#definitions)—one candidate is the [International Energy Agency](https://www.iea.org/). Failing that, this question will be **Annulled**.\n\nIf AGI results in human extinction (in the full sense of there not being any humans in existence) then this question will be Annulled. In other words, for this question we would like forecasters to condition on extinction not happening. (Note, though, that we do not want forecasters to condition on other types of existential catastrophe, like dystopia, not happening.)\n\nIf our “When will AGI arrive?” question resolves as \"Not ≤ Dec 25, 2199,\" this question will be Annulled.", "post_id": 26267, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763054637.105378, "end_time": 1763322398.861883, "forecaster_count": 62, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.2 ], "interval_upper_bounds": [ 0.34 ] } ], "latest": { "start_time": 1763054637.105378, "end_time": 1763322398.861883, "forecaster_count": 62, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.2 ], "interval_upper_bounds": [ 0.34 ], "forecast_values": [ 0.8, 0.2 ], "means": [ 0.26887067058347236 ], "histogram": [ [ 0.5521707312792997, 0.0053628669779807745, 2.00828525076685, 0.014003379319251127, 0.09316103708856097, 1.0874118242158908, 0.03720053340149493, 0.10891869151455617, 0.0, 0.02349653657703422, 0.5462508377629183, 0.0, 0.46417698841965477, 0.0, 0.0, 1.0885222084637283, 0.6326680595244999, 0.00764267295316474, 0.24826285979550225, 0.0, 1.0, 0.0, 0.0, 0.1658851735046323, 0.5912368114370814, 0.6313483453863167, 0.0, 0.0, 0.0, 0.0, 0.9226793857143409, 0.0, 0.0, 0.003560195227230837, 0.5153518936135082, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7724138726447535, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8245956564854738, 0.39330346238184, 0.0, 0.0, 0.0, 0.0, 0.16675237628327297, 0.0, 0.12963195239853512, 0.0, 0.0, 0.9608776292954719, 0.0, 0.0, 0.0, 0.0, 0.016045120613645693, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06546135005908135, 0.0, 0.0, 0.0, 0.17710369805456183 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289122.542711, "end_time": null, "forecaster_count": 25, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289122.542711, "end_time": null, "forecaster_count": 25, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9725023329567676, 0.027497667043232463 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 7, "user_vote": null }, "forecasts_count": 105, "key_factors": [], "is_current_content_translated": false, "description": "In 2001, nuclear fission power plants generated a record [6.6% of the world's primary energy](https://ourworldindata.org/explorers/energy?tab=chart&facet=none&country=~OWID_WRL&Total+or+Breakdown=Select+a+source&Select+a+source=Nuclear&Energy+or+Electricity=Primary+energy&Metric=Share+of+total), though total production has somewhat declined since then as the world's total energy demand has increased. [Nuclear fusion](https://en.wikipedia.org/wiki/Fusion_power) is an entirely different physical reaction which has been actively investigated since the 1940s. Fusion power has several potential advantages over fission: lower accident risk, less radioactive waste, and cheaper fuel. However, all reactor designs tested as of 2024 require more energy to operate than the amount of energy they produce.\n\n>The primary challenge is that while it's relatively straightforward to make fusion happen—we did it all the time with thermonuclear weapons—it's much more difficult to make the reaction slow and controlled while extracting useful energy from it. [—Sutter (2024)](https://www.space.com/when-will-we-achieve-fusion-power#:~:text=The%20primary%20challenge%20is%20that%20while%20it%27s%20relatively%20straightforward%20to%20make%20fusion%20happen%20%E2%80%94%20we%20did%20it%20all%20the%20time%20with%20thermonuclear%20weapons%20%E2%80%94%20it%27s%20much%20more%20difficult%20to%20make%20the%20reaction%20slow%20and%20controlled%20while%20extracting%20useful%20energy%20from%20it.)\n\nDavid Kirtley, CEO of [Helion](https://www.helionenergy.com/), a startup that aims to produce fusion energy, said to [Forbes in January 2022](https://www.forbes.com/sites/christopherhelman/2022/01/02/fueled-by-billionaire-dollars-nuclear-fusion-enters-a-new-age/?sh=1da1351629f3): “In 10 years we will have commercial electricity for sale, for sure.” In the same article, Forbes quotes [Commonwealth Fusion Systems](https://cfs.energy/) CEO Bob Mumgaard, who predicts “a working reactor in 6 years.”\n\nLater in 2022, a research team within leading AGI company DeepMind (now Google DeepMind) [successfully controlled](https://deepmind.google/discover/blog/accelerating-fusion-science-through-learned-plasma-control/) the nuclear plasma in a fusion reaction chamber with deep reinforcement learning. This was arguably a key breakthrough towards making fusion energy a reality.\n\nIn November 2021, [Helion raised $500 million](https://www.bloomberg.com/news/articles/2021-11-05/thiel-backed-helion-targets-nuclear-fusion-breakthrough-by-2024) in funding, with commitments for another $1.7 billion linked to certain performance milestones. According to Bloomberg, Helion set a goal to achieve net electricity from fusion in 2024. They do not appear to have achieved that goal yet, halfway through 2024, though on June 5 it was announced that Helion had partnered with OpenAI—another leading AGI company—to “power superhuman AI” ([Cuthbertson, 2024](https://www.independent.co.uk/tech/openai-nuclear-fusion-energy-ai-b2557064.html)).\n\nIn October 2021, the [US Energy Information Agency](https://www.eia.gov/todayinenergy/detail.php?id=49856) projected the world's total primary energy consumption to grow from 601.5 quadrillion [BTUs](https://en.wikipedia.org/wiki/British_thermal_unit) in 2020 to 886.3 quadrillion BTUs in 2050. Most of that growth is expected in non-[OECD](https://en.wikipedia.org/wiki/OECD) Asian countries. Renewable energy is expected to grow from 14.7% of the world's energy in 2020 to 26.5% in 2050, with nuclear fission projected to remain at 4% in the same period." }, { "id": 26266, "title": "Five years after AGI, will a Dyson swarm be under construction?", "short_title": "5Y after AGI, Dyson swarm under construction?", "url_title": "5Y after AGI, Dyson swarm under construction?", "slug": "5y-after-agi-dyson-swarm-under-construction", "author_id": 119005, "author_username": "will_aldred", "coauthors": [], "created_at": "2024-07-16T10:43:02.398494Z", "published_at": "2024-07-22T17:30:00Z", "edited_at": "2025-09-12T01:22:40.658300Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-22T17:30:00Z", "comment_count": 1, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "open_time": "2024-07-22T17:30:00Z", "nr_forecasters": 46, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" }, { "id": 15881, "name": "5 Years After AGI", "slug": "5-years-after", "emoji": "⏳🌀", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 26266, "title": "Five years after AGI, will a Dyson swarm be under construction?", "created_at": "2024-07-16T10:43:02.398494Z", "open_time": "2024-07-22T17:30:00Z", "cp_reveal_time": "2024-07-23T18:30:00Z", "spot_scoring_time": "2024-07-23T18:30:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "actual_close_time": "2036-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "> Inspired by the 1937 science fiction novel Star Maker by Olaf Stapledon, the physicist and mathematician Freeman Dyson was the first to formalize the concept of what became known as the \"Dyson sphere\" in his 1960 Science paper \"Search for Artificial Stellar Sources of Infra-Red Radiation\". Dyson theorized that as the energy requirements of an advanced technological civilization increased, there would come a time when it would need to systematically harvest the energy from its local star on a large scale. He speculated that this could be done via a system of structures orbiting the star, designed to intercept and collect its energy. He argued that as the structure would result in the large-scale conversion of starlight into far-infrared radiation, an earth-based search for sources of infrared radiation could identify stars supporting intelligent life.\n> \n> Dyson did not detail how such a system could be constructed, simply referring to it in the paper as a 'shell' or 'biosphere'. He later clarified that he did not have in mind a solid structure, saying: \"A solid shell or ring surrounding a star is mechanically impossible. The form of 'biosphere' which I envisaged consists of a loose collection or swarm of objects traveling on independent orbits around the star\". Such a concept has often been referred to as a Dyson swarm.\n> [—Wikipedia](https://en.wikipedia.org/wiki/Dyson_sphere#Origins)\n\nThe leaders of today’s frontier AI companies have [talked](https://www.nytimes.com/2024/02/23/podcasts/google-deepmind-demis-hassabis.html) [about](https://openai.com/index/planning-for-agi-and-beyond/) AGI solving humanity’s hardest problems and “aiding in the discovery of new scientific knowledge that changes the limits of possibility.” One such possibility: moving up the [Kardashev scale](https://en.wikipedia.org/wiki/Kardashev_scale) and becoming a [type II civilization](https://en.wikipedia.org/wiki/Kardashev_scale#:~:text=A%20Type%C2%A0II%20civilization%20can%20directly%20consume%20a%20star%27s%20energy%2C%20most%20likely%20through%20the%20use%20of%20a%20Dyson%20sphere.), leveraging all of our home star’s energy.\n\n[Paul Christiano](https://www.nist.gov/people/paul-christiano), leader of the U.S. AI Safety Institute, has [previously predicted](https://www.metaculus.com/prediction/21276/a-public-prediction-by-paul-christiano/) a 40% chance that by 2040 we will have AI that can build a Dyson swarm.\n\nAI researcher and futurist Stuart Armstrong proposed a [concrete plan](https://www.theatlantic.com/science/archive/2015/10/could-we-build-a-dyson-sphere/626137/) for building a Dyson swarm just over a decade ago: the plan involves [disassembling the planet Mercury](https://forum.effectivealtruism.org/posts/gD7wbKaxzJebhp8a4/stuart-armstrong-the-far-future-of-intelligent-life-across#:~:text=We%20need%20a,get%20this%C2%A0done.) for the raw material needed to build the swarm. [PBS Space Time (2016)](https://www.youtube.com/watch?v=jW55cViXu6s&t=190s) goes into the plan in some detail, for those interested. A Dyson swarm is made up of “collectors,” and it would take an estimated 10 years to build the first collector. Then, fueled by exponential growth, the full swarm of collectors would be built in the order of 100 years. (Note that building the full swarm is not necessarily part of the plan, because once humans are able to harvest the large amounts of energy that a partial Dyson swarm provides, new, theoretically even better sources of energy are unlocked. Notably, [black hole engines](https://www.youtube.com/watch?v=jW55cViXu6s&t=437s).)", "resolution_criteria": "This question will resolve as **Yes** if, within five years of our “[When will artificial general intelligence (AGI) arrive?](https://www.metaculus.com/questions/5121/date-of-artificial-general-intelligence/)” question resolving, and conditional on humans not having gone extinct, construction of a Dyson swarm has begun.\n\nWe define a Dyson swarm as: “A system of objects orbiting our Sun, designed to collect large amounts of energy (i.e., more energy than is available on Earth).”\n\nWe define construction having begun as: “The first probe for mining the raw material for the Dyson swarm has been successfully launched. That is, the probe has left Earth’s atmosphere and, upon launch phase completion, is reported to be on track for its target.” (If the method for constructing a Dyson swarm ends up being different from today’s plan, and therefore falls out of line with the above resolution, then Metaculus admins will rework the resolution accordingly (while keeping the spirit the same).", "fine_print": "If AGI results in human extinction (in the full sense of there not being any humans in existence) then this question will be **Annulled**. In other words, for this question we would like forecasters to condition on extinction not happening. (Note, though, that we do not want forecasters to condition on other types of existential catastrophe, like dystopia, not happening.)\n\nIf our “When will AGI arrive?” question resolves as \"Not ≤ Dec 25, 2199,\" this question will be Annulled.", "post_id": 26266, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1758247858.084705, "end_time": 1763322446.801421, "forecaster_count": 38, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.003 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1758247858.084705, "end_time": 1763322446.801421, "forecaster_count": 38, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.003 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.997, 0.003 ], "means": [ 0.045419059478347415 ], "histogram": [ [ 6.372528835408239, 2.055681640167709, 0.6517751240857292, 0.0, 0.049680819655039385, 0.02435728586146805, 0.0, 0.0, 0.011886366648789067, 0.0, 0.47649526562316213, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1148171974242466, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7800991750591255, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.22900802253318142, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.029639045211680565, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04223888642530301, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728288633.446237, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728288633.446237, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.99115639277576, 0.008843607224240066 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 5, "user_vote": null }, "forecasts_count": 68, "key_factors": [], "is_current_content_translated": false, "description": "> Inspired by the 1937 science fiction novel Star Maker by Olaf Stapledon, the physicist and mathematician Freeman Dyson was the first to formalize the concept of what became known as the \"Dyson sphere\" in his 1960 Science paper \"Search for Artificial Stellar Sources of Infra-Red Radiation\". Dyson theorized that as the energy requirements of an advanced technological civilization increased, there would come a time when it would need to systematically harvest the energy from its local star on a large scale. He speculated that this could be done via a system of structures orbiting the star, designed to intercept and collect its energy. He argued that as the structure would result in the large-scale conversion of starlight into far-infrared radiation, an earth-based search for sources of infrared radiation could identify stars supporting intelligent life.\n> \n> Dyson did not detail how such a system could be constructed, simply referring to it in the paper as a 'shell' or 'biosphere'. He later clarified that he did not have in mind a solid structure, saying: \"A solid shell or ring surrounding a star is mechanically impossible. The form of 'biosphere' which I envisaged consists of a loose collection or swarm of objects traveling on independent orbits around the star\". Such a concept has often been referred to as a Dyson swarm.\n> [—Wikipedia](https://en.wikipedia.org/wiki/Dyson_sphere#Origins)\n\nThe leaders of today’s frontier AI companies have [talked](https://www.nytimes.com/2024/02/23/podcasts/google-deepmind-demis-hassabis.html) [about](https://openai.com/index/planning-for-agi-and-beyond/) AGI solving humanity’s hardest problems and “aiding in the discovery of new scientific knowledge that changes the limits of possibility.” One such possibility: moving up the [Kardashev scale](https://en.wikipedia.org/wiki/Kardashev_scale) and becoming a [type II civilization](https://en.wikipedia.org/wiki/Kardashev_scale#:~:text=A%20Type%C2%A0II%20civilization%20can%20directly%20consume%20a%20star%27s%20energy%2C%20most%20likely%20through%20the%20use%20of%20a%20Dyson%20sphere.), leveraging all of our home star’s energy.\n\n[Paul Christiano](https://www.nist.gov/people/paul-christiano), leader of the U.S. AI Safety Institute, has [previously predicted](https://www.metaculus.com/prediction/21276/a-public-prediction-by-paul-christiano/) a 40% chance that by 2040 we will have AI that can build a Dyson swarm.\n\nAI researcher and futurist Stuart Armstrong proposed a [concrete plan](https://www.theatlantic.com/science/archive/2015/10/could-we-build-a-dyson-sphere/626137/) for building a Dyson swarm just over a decade ago: the plan involves [disassembling the planet Mercury](https://forum.effectivealtruism.org/posts/gD7wbKaxzJebhp8a4/stuart-armstrong-the-far-future-of-intelligent-life-across#:~:text=We%20need%20a,get%20this%C2%A0done.) for the raw material needed to build the swarm. [PBS Space Time (2016)](https://www.youtube.com/watch?v=jW55cViXu6s&t=190s) goes into the plan in some detail, for those interested. A Dyson swarm is made up of “collectors,” and it would take an estimated 10 years to build the first collector. Then, fueled by exponential growth, the full swarm of collectors would be built in the order of 100 years. (Note that building the full swarm is not necessarily part of the plan, because once humans are able to harvest the large amounts of energy that a partial Dyson swarm provides, new, theoretically even better sources of energy are unlocked. Notably, [black hole engines](https://www.youtube.com/watch?v=jW55cViXu6s&t=437s).)" }, { "id": 26246, "title": "Five years after AGI, will human mind uploading have happened?", "short_title": "5Y after AGI, human mind uploading?", "url_title": "5Y after AGI, human mind uploading?", "slug": "5y-after-agi-human-mind-uploading", "author_id": 119005, "author_username": "will_aldred", "coauthors": [], "created_at": "2024-07-15T20:09:09.021688Z", "published_at": "2024-07-22T17:30:00Z", "edited_at": "2025-11-05T17:15:09.304941Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-22T17:30:00Z", "comment_count": 9, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "open_time": "2024-07-22T17:30:00Z", "nr_forecasters": 76, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" }, { "id": 15881, "name": "5 Years After AGI", "slug": "5-years-after", "emoji": "⏳🌀", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 26246, "title": "Five years after AGI, will human mind uploading have happened?", "created_at": "2024-07-15T20:09:09.021688Z", "open_time": "2024-07-22T17:30:00Z", "cp_reveal_time": "2024-07-23T18:30:00Z", "spot_scoring_time": "2024-07-23T18:30:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "actual_close_time": "2036-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Futurists have long dreamed about a future in which humans transcend their biological bodies, becoming digital and ~immortal. Perhaps that dream will be realized soon? The leaders of today’s frontier AI companies have [talked](https://www.nytimes.com/2024/02/23/podcasts/google-deepmind-demis-hassabis.html) [about](https://openai.com/index/planning-for-agi-and-beyond/) AGI solving humanity’s hardest problems and “aiding in the discovery of new scientific knowledge that changes the limits of possibility.”", "resolution_criteria": "This question resolves as **Yes** if human mind uploading happens within the five year period after the resolution date of our “[When will artificial general intelligence (AGI) arrive?](https://www.metaculus.com/questions/5121/date-of-artificial-general-intelligence/)” question. For example, if the “When will AGI arrive?” question resolves as June 5, 2030, then this question will resolve as Yes if mind uploading has happened by June 5, 2035, and **No** if not.\n\nThis question is conditional on humans not having gone extinct. This question will be **Annulled** if mind uploading happens before AGI: we have a [separate question](https://www.metaculus.com/questions/18839/mind-uploading-before-agi/) which asks about that.\n\nWe define human mind uploading as having happened once there are three credible media reports that a whole human brain has been digitally uploaded, with preservation of psychological continuity. (The exact wording “psychological continuity” does not need to be present in the media reports; if there is ambiguity over whether a report counts, this will be resolved by the question author, or, failing that, by a panel of three Metaculus admins.) It does not matter for the purposes of this question whether the upload is destructive or not—that is, whether or not the uploading process leaves the biological brain intact.\n\nNote: There do not appear to be standardly used definitions in the whole brain emulation / mind uploading literature. The method of mind uploading that’s arguably the most likely route for this question to resolve as Yes is called a “person emulation” in [Sandberg and Bostrom](https://www.fhi.ox.ac.uk/brain-emulation-roadmap-report.pdf) (p. 7):\n\n> The basic idea is to take a particular brain, scan its structure in detail, and construct a software model of it that is so faithful to the original that, when run on appropriate hardware, it will behave in essentially the same way as the original brain.", "fine_print": "If AGI results in human extinction (in the full sense of there not being any humans in existence) then this question will be Annulled. In other words, for this question we would like forecasters to condition on extinction not happening. (Note, though, that we do not want forecasters to condition on other types of existential catastrophe, like dystopia, not happening.)\n\nIf our “When will AGI arrive?” question resolves as \"Not ≤ Dec 25, 2199,\" this question will be Annulled.", "post_id": 26246, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1762362898.647828, "end_time": 1763322546.879907, "forecaster_count": 57, "interval_lower_bounds": [ 0.002 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.03 ] } ], "latest": { "start_time": 1762362898.647828, "end_time": 1763322546.879907, "forecaster_count": 57, "interval_lower_bounds": [ 0.002 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.03 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.08283497184598138 ], "histogram": [ [ 5.877928877233511, 4.2189421357457615, 0.007897537925909342, 0.9812032545580974, 0.0038881009225659245, 0.09231793805587193, 0.0, 0.0, 0.0, 0.0, 0.00741623973729105, 0.0, 0.0, 0.2712168925010875, 0.2120061215461423, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4994839393471079, 0.04606515460131248, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0221885908364169, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.10452470613861276, 0.0, 0.0, 0.15062225698690318, 0.0, 0.06367247509170942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.21228311751426085, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7635897992428782, 0.0, 0.0, 0.0105689540850258, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.014505757420011789, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04552143219982461 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289675.538637, "end_time": null, "forecaster_count": 42, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289675.538637, "end_time": null, "forecaster_count": 42, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9948830540252661, 0.005116945974733893 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 10, "user_vote": null }, "forecasts_count": 125, "key_factors": [], "is_current_content_translated": false, "description": "Futurists have long dreamed about a future in which humans transcend their biological bodies, becoming digital and ~immortal. Perhaps that dream will be realized soon? The leaders of today’s frontier AI companies have [talked](https://www.nytimes.com/2024/02/23/podcasts/google-deepmind-demis-hassabis.html) [about](https://openai.com/index/planning-for-agi-and-beyond/) AGI solving humanity’s hardest problems and “aiding in the discovery of new scientific knowledge that changes the limits of possibility.”" }, { "id": 26244, "title": "Five years after AGI, will humans be extinct?", "short_title": "Five years after AGI, will humans be extinct?", "url_title": "Five years after AGI, will humans be extinct?", "slug": "five-years-after-agi-will-humans-be-extinct", "author_id": 119005, "author_username": "will_aldred", "coauthors": [], "created_at": "2024-07-15T20:02:37.980385Z", "published_at": "2024-07-22T17:30:00Z", "edited_at": "2025-11-01T21:05:42.734364Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-22T17:30:00Z", "comment_count": 11, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "open_time": "2024-07-22T17:30:00Z", "nr_forecasters": 142, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" }, { "id": 15881, "name": "5 Years After AGI", "slug": "5-years-after", "emoji": "⏳🌀", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "community": [ { "id": 32674, "name": "Alvin's Community Page", "type": "community", "slug": "forecasting-ai-futures", "description": "[Forecasting AI Futures](forecastingaifutures.substack.com) is a forecasting blog with focus on topics related to AI safety, and leverages insights from prediction markets and Metaculus to support its analyses. It aims to support decision making for all relevant actors in preparing for higher intelligence.\n\n### Here you can participate in this forecasting initiative!\n\nMake your own predictions to contribute to more accurate collective forecasts and join discussions on crucial developments and events.\n\nHave an interesting question you'd like to see forecasted? Reach out to me at [anestrandalvin@gmail.com](mailto:anestrandalvin@gmail.com) or message me on [Substack](https://substack.com/@alvinanestrand).\n\nIf you want to support this initiative and stay updated, subscribe to [the blog](forecastingaifutures.substack.com).", "order": 0, "header_image": null, "header_logo": null, "followers_count": 2, "default_permission": "forecaster", "visibility": "not_in_main_feed", "created_by": { "id": 140478, "username": "anestrandalvin", "is_bot": false, "is_staff": false, "metadata": null } } ], "question_series": [ { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 3410, "type": "question_series", "name": "Five Years After AGI", "slug": "5_years_after_AGI", "header_image": "https://cdn.metaculus.com/upscaled-4x-5y-agi-cover.webp", "prize_pool": null, "start_date": "2024-07-22T11:00:08Z", "close_date": "2036-01-02T12:00:08Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-07-15T19:59:08.707934Z", "edited_at": "2025-11-12T07:21:15.358970Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 26244, "title": "Five years after AGI, will humans be extinct?", "created_at": "2024-07-15T20:02:37.980385Z", "open_time": "2024-07-22T17:30:00Z", "cp_reveal_time": "2024-07-23T17:22:00Z", "spot_scoring_time": "2024-07-23T17:22:00Z", "scheduled_resolve_time": "2036-01-01T12:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2036-01-01T00:00:00Z", "actual_close_time": "2036-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "There has been extensive debate in the AI safety community about whether AGI will cause human extinction.* This question asks about how likely that is.\n\n(*The debate has been more nuanced than this: Some think that advanced AI that is highly capable in a narrow set of domains—i.e., not generally intelligent enough to qualify as AGI—could cause human extinction. Meanwhile, others believe that AI will only become extinction-level dangerous once it is “superintelligent”—that is, well beyond AGI-level. For simplicity, though, we ask only about AGI in this question.)", "resolution_criteria": "This question resolves as **Yes** if, five years after the resolution date of our “[When will artificial general intelligence (AGI) arrive?](https://www.metaculus.com/questions/5121/date-of-artificial-general-intelligence/)” question, humanity has gone extinct. This question resolves as **No** otherwise.\n\nNote: “Humanity” includes posthumans^ as well as today-humans. “Extinct” means zero humans remain.\n\nYou will not receive any Metaculus points for correctly predicting extinction! Nonetheless, this question is a public good in the information it provides, and we therefore encourage predictors to input their true beliefs.", "fine_print": "^We use philosopher Nick Bostrom’s [definition](https://nickbostrom.com/posthuman.pdf) of posthuman:\n\n>Extreme human enhancement could result in “posthuman” modes of being [...] I shall define a posthuman as a being that has at least one posthuman capacity. By a\nposthuman capacity, I mean a general central capacity greatly exceeding the maximum attainable\nby any current human being without recourse to new technological means. I will use general\ncentral capacity to refer to the following:\n\n>• healthspan – the capacity to remain fully healthy, active, and productive, both mentally\nand physically\n\n>• cognition – general intellectual capacities, such as memory, deductive and analogical\nreasoning, and attention, as well as special faculties such as the capacity to understand\nand appreciate music, humor, eroticism, narration, spirituality, mathematics, etc.\n\n>• emotion – the capacity to enjoy life and to respond with appropriate affect to life\nsituations and other people", "post_id": 26244, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1762031132.03948, "end_time": 1765121680.811455, "forecaster_count": 104, "interval_lower_bounds": [ 0.004 ], "centers": [ 0.018 ], "interval_upper_bounds": [ 0.1 ] } ], "latest": { "start_time": 1762031132.03948, "end_time": 1765121680.811455, "forecaster_count": 104, "interval_lower_bounds": [ 0.004 ], "centers": [ 0.018 ], "interval_upper_bounds": [ 0.1 ], "forecast_values": [ 0.982, 0.018 ], "means": [ 0.10641362913072906 ], "histogram": [ [ 6.540533315777591, 3.0737410489378245, 0.7533189317237355, 0.5175511009005449, 1.3364965777389737, 1.5169263964550992, 0.004996290501363098, 0.06526727884025281, 0.0978782271385107, 0.062134935755801354, 0.4764332993361597, 0.0, 0.004382677737264514, 0.0032604075626956166, 0.00552739286729359, 0.0006301126768428149, 0.0, 0.0, 0.0, 0.0, 0.17341375208589868, 0.0, 0.0, 0.0, 0.49105160902515127, 0.9061648019957719, 0.0, 0.0, 0.002911345062950054, 0.0, 1.017038490251882, 0.1912764946279594, 0.0, 0.0020334141991549823, 0.0, 0.3536180258023022, 0.0, 0.0, 0.0, 0.0, 0.3017854250606267, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.032853104142864374, 0.0, 0.0, 0.0, 0.006102727274174003, 0.0, 0.0, 0.0, 0.0, 0.006725247141668953, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05404549225539285, 0.0, 0.1803677146098847, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5860615863560817, 0.0, 0.0, 0.0, 0.0, 0.04385040977331752, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09183246898171404, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289384.046053, "end_time": null, "forecaster_count": 70, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289384.046053, "end_time": null, "forecaster_count": 70, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.964625996936867, 0.03537400306313298 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 5, "user_vote": null }, "forecasts_count": 252, "key_factors": [], "is_current_content_translated": false, "description": "There has been extensive debate in the AI safety community about whether AGI will cause human extinction.* This question asks about how likely that is.\n\n(*The debate has been more nuanced than this: Some think that advanced AI that is highly capable in a narrow set of domains—i.e., not generally intelligent enough to qualify as AGI—could cause human extinction. Meanwhile, others believe that AI will only become extinction-level dangerous once it is “superintelligent”—that is, well beyond AGI-level. For simplicity, though, we ask only about AGI in this question.)" }, { "id": 26236, "title": "Will at least 24 world records be broken at the 2024 Paris Olympics?", "short_title": "", "url_title": "", "slug": "will-at-least-24-world-records-be-broken-at-the-2024-paris-olympics", "author_id": 101262, "author_username": "BrunoParga", "coauthors": [], "created_at": "2024-07-15T18:34:30.958714Z", "published_at": "2024-07-18T14:30:00Z", "edited_at": "2025-09-05T17:28:55.726948Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-18T14:30:00Z", "comment_count": 121, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-19T14:30:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-08-11T16:45:00Z", "actual_resolve_time": "2024-08-11T16:45:00Z", "open_time": "2024-07-18T14:30:00Z", "nr_forecasters": 34, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26236, "title": "Will at least 24 world records be broken at the 2024 Paris Olympics?", "created_at": "2024-07-15T18:34:30.958714Z", "open_time": "2024-07-18T14:30:00Z", "cp_reveal_time": "2024-07-19T14:30:00Z", "spot_scoring_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-08-11T16:45:00Z", "actual_resolve_time": "2024-08-11T16:45:00Z", "resolution_set_time": "2024-08-11T16:45:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "actual_close_time": "2024-07-19T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In the last three Summer Olympics, the number of world records broken was:\n\nLondon 2012: 32\nRio de Janeiro 2016: 23\nTokyo 2020: 27\n\nIn '24, will 24 or more records be broken?", "resolution_criteria": "This question resolves as **Yes** if the number of world records broken during the Paris Olympics is 24 or greater.", "fine_print": "If the record for the same event is broken more than once - for example, in the qualifying and final - both records count.\n\nSome sports like rowing have conditions that are inherently not fully reproducible between different Games; these are not listed as having \"world records\" but \"world bests\", and do not count.", "post_id": 26236, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721398542.319897, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": [ 0.68 ], "centers": [ 0.7333333333333333 ], "interval_upper_bounds": [ 0.76 ] } ], "latest": { "start_time": 1721398542.319897, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": [ 0.68 ], "centers": [ 0.7333333333333333 ], "interval_upper_bounds": [ 0.76 ], "forecast_values": [ 0.2666666666666667, 0.7333333333333333 ], "means": [ 0.6915545284806958 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6403200895653883, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.27361438909126823, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01207354803198191, 0.0, 1.0, 0.1149150513638119, 0.0, 0.06883381789837523, 0.3551836243544548, 0.0, 0.4809786573605918, 0.0, 1.2419700290852433, 0.4866284413634496, 0.0, 0.5300417658031267, 0.0, 2.4119368085146258, 0.7020670829292148, 0.0, 1.0984928773439, 0.0, 0.12377442688140071, 0.2569648587427511, 0.0, 0.0, 0.0, 0.33663192731877195, 0.02168896019109956, 0.0, 0.0, 0.0, 0.016590897335947853, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": -90.68905956085182, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": -90.68905956085182 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721398542.356575, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721398542.356575, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.3911508480135367, 0.6088491519864633 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 122, "key_factors": [], "is_current_content_translated": false, "description": "In the last three Summer Olympics, the number of world records broken was:\n\nLondon 2012: 32\nRio de Janeiro 2016: 23\nTokyo 2020: 27\n\nIn '24, will 24 or more records be broken?" }, { "id": 26235, "title": "Will the Warren Buffett Indicator exceed 200% before September 17, 2024?\n", "short_title": "", "url_title": "", "slug": "will-the-warren-buffett-indicator-exceed-200-before-september-17-2024", "author_id": 115975, "author_username": "johnnycaffeine", "coauthors": [], "created_at": "2024-07-15T18:34:30.723940Z", "published_at": "2024-07-18T14:30:00Z", "edited_at": "2025-09-05T17:28:47.901464Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-18T14:30:00Z", "comment_count": 122, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-19T14:30:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-09-18T12:56:00Z", "actual_resolve_time": "2024-09-18T12:56:00Z", "open_time": "2024-07-18T14:30:00Z", "nr_forecasters": 34, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26235, "title": "Will the Warren Buffett Indicator exceed 200% before September 17, 2024?\n", "created_at": "2024-07-15T18:34:30.723940Z", "open_time": "2024-07-18T14:30:00Z", "cp_reveal_time": "2024-07-19T14:30:00Z", "spot_scoring_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-09-18T12:56:00Z", "actual_resolve_time": "2024-09-18T12:56:00Z", "resolution_set_time": "2024-09-18T12:56:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "actual_close_time": "2024-07-19T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In a 2001 article co-written with Fortune Magazine's Carol Loomis, Warren Buffett [said](https://web.archive.org/web/20141013162215/https://archive.fortune.com/magazines/fortune/fortune_archive/2001/12/10/314691/index.htm):\n\n>The chart shows the market value of all publicly traded securities as a percentage of the country's business--that is, as a percentage of GNP. The ratio has certain limitations in telling you what you need to know. Still, it is probably the best single measure of where valuations stand at any given moment. And as you can see, nearly two years ago the ratio rose to an unprecedented level. That should have been a very strong warning signal.\n\n>For investors to gain wealth at a rate that exceeds the growth of U.S. business, the percentage relationship line on the chart must keep going up and up. If GNP is going to grow 5% a year and you want market values to go up 10%, then you need to have the line go straight off the top of the chart. That won't happen.\n\n>For me, the message of that chart is this: If the percentage relationship falls to the 70% or 80% area, buying stocks is likely to work very well for you. If the ratio approaches 200%--as it did in 1999 and a part of 2000--you are playing with fire. As you can see, the ratio was recently 133%.\n\nAstute forecasters will note that Buffett's methodology slightly differs from the Longtermtrends website in that Buffett has the GNP as his denominator, while has Longtermtrends has GDP as its denominator.", "resolution_criteria": "This question resolves as **Yes** if before September 17, 2024 the Warren Buffett Indicator exceeds 200%, as reported at the [Longtermtrends website](https://www.longtermtrends.net/market-cap-to-gdp-the-buffett-indicator/), specifically the *Wilshire 5000 to GDP Ratio* on that page. If before that date the Warren Buffett Indicator has not exceeded 200%, this question resolves as No.\n\nThe figures can be viewed either on the chart itself or by viewing the CSV, XLX, or data table. As of July 10, 2024, the most recent figure displayed in the data table was 196.20375072862052 for July 8, 2024. ", "fine_print": "The Warren Buffett indicator will be considered to have exceeded 200% only if it reaches a value of 200.00000000000001 or higher. Any value below 200 in the Wilshire 5000 to GDP Ratio column of the data table, even 199.99999999999999, will not be considered as exceeding 200%.\n\nIn cases in which the chart might show 200.00% and the data table shows a value higher than that (for example 200.0000000000001) the data table will take precedence when it comes to resolution.\n\nIf the numbers through September 16, 2024 are not posted in a timely manner, the question will wait for resolution until September 30, 2024. If on that date the Warren Buffet Indicator numbers through September 16, 2024 are not posted, this question will resolve based on the most recent numbers. \n\nIf the Longtermtrends website stops tracking the Warren Buffett Indicator, this question will be annulled.\n", "post_id": 26235, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721398560.230559, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": [ 0.42 ], "centers": [ 0.55 ], "interval_upper_bounds": [ 0.65 ] } ], "latest": { "start_time": 1721398560.230559, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": [ 0.42 ], "centers": [ 0.55 ], "interval_upper_bounds": [ 0.65 ], "forecast_values": [ 0.44999999999999996, 0.55 ], "means": [ 0.5317357559603306 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.2569648587427511, 0.18125574861973415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06934409839582649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5300417658031267, 0.0, 0.8402148505771252, 0.0, 0.0, 0.029667882745790925, 0.0, 0.14114485879589025, 0.0, 0.0, 0.0, 0.0, 0.04966152601580348, 0.0, 1.0, 0.0, 0.0, 0.5830692761263615, 0.0, 0.2294539435535619, 0.05895670636563755, 0.0, 0.5594539150498965, 0.0, 0.0, 0.0, 0.093775628604687, 1.8366918011851052, 0.9586070755525017, 0.0, 0.0, 0.0, 0.03953741910202119, 0.0, 0.0, 0.0, 0.0, 0.43563441074387765, 0.0, 0.0, 0.5618960001688528, 0.0, 0.284035370460909, 0.0, 0.0, 0.31964760159434596, 0.10802413340346607, 0.0, 0.0, 0.016590897335947853, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9890374842321833 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": -15.200309344505014, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": -15.200309344505014 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721398560.261791, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721398560.261791, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.5858317913679953, 0.4141682086320046 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 122, "key_factors": [], "is_current_content_translated": false, "description": "In a 2001 article co-written with Fortune Magazine's Carol Loomis, Warren Buffett [said](https://web.archive.org/web/20141013162215/https://archive.fortune.com/magazines/fortune/fortune_archive/2001/12/10/314691/index.htm):\n\n>The chart shows the market value of all publicly traded securities as a percentage of the country's business--that is, as a percentage of GNP. The ratio has certain limitations in telling you what you need to know. Still, it is probably the best single measure of where valuations stand at any given moment. And as you can see, nearly two years ago the ratio rose to an unprecedented level. That should have been a very strong warning signal.\n\n>For investors to gain wealth at a rate that exceeds the growth of U.S. business, the percentage relationship line on the chart must keep going up and up. If GNP is going to grow 5% a year and you want market values to go up 10%, then you need to have the line go straight off the top of the chart. That won't happen.\n\n>For me, the message of that chart is this: If the percentage relationship falls to the 70% or 80% area, buying stocks is likely to work very well for you. If the ratio approaches 200%--as it did in 1999 and a part of 2000--you are playing with fire. As you can see, the ratio was recently 133%.\n\nAstute forecasters will note that Buffett's methodology slightly differs from the Longtermtrends website in that Buffett has the GNP as his denominator, while has Longtermtrends has GDP as its denominator." }, { "id": 26234, "title": "Will an avian influenza virus in humans be declared a “Public Health Emergency of International Concern” by the World Health Organization before Sept 30, 2024?", "short_title": "", "url_title": "", "slug": "will-an-avian-influenza-virus-in-humans-be-declared-a-public-health-emergency-of-international-concern-by-the-world-health-organization-before-sept-30-2024", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-15T18:34:30.491898Z", "published_at": "2024-07-18T14:30:00Z", "edited_at": "2025-09-05T17:29:07.780489Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-18T14:30:00Z", "comment_count": 125, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-19T14:30:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-10-02T14:52:00Z", "actual_resolve_time": "2024-10-02T14:52:00Z", "open_time": "2024-07-18T14:30:00Z", "nr_forecasters": 34, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26234, "title": "Will an avian influenza virus in humans be declared a “Public Health Emergency of International Concern” by the World Health Organization before Sept 30, 2024?", "created_at": "2024-07-15T18:34:30.491898Z", "open_time": "2024-07-18T14:30:00Z", "cp_reveal_time": "2024-07-19T14:30:00Z", "spot_scoring_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-10-02T14:52:00Z", "actual_resolve_time": "2024-10-02T14:52:00Z", "resolution_set_time": "2024-10-02T14:52:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "actual_close_time": "2024-07-19T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "An outbreak of Highly Pathogenic Avian Influenza (HPAI), Type A, subtype H5N1 was [reported in dairy cattle](https://www.aphis.usda.gov/news/agency-announcements/federal-state-veterinary-public-health-agencies-share-update-hpai) for the first time on March 25, 2024 in the United States by the Center for Disease Control (CDC), and soon thereafter a human tested positive for H5N1 which was [announced by the CDC](https://www.cdc.gov/media/releases/2024/p0401-avian-flu.html) April 1, 2024.\n\nWith this recent jump to an abundant mammal like cattle, [there is concern](https://news.un.org/en/story/2024/04/1148696) that further evolution in the virus could result in it becoming easily transmittable between humans, a scenario that could result in a pandemic of the virus. This sort of zoonotic origin pandemic is not unprecedented, of course [COVID-19 is believed to have made the jump from pangolins to humans](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC9874793/) and in a much more similar scenario, the [2009 H1N1 pandemic](https://www.cdc.gov/h1n1flu/information_h1n1_virus_qa.htm) was caused by a virus similar to what was detected circulating in pigs, earning it the nickname “Swine Flu”. Both of these were declared a “Public Health Emergency of International Concern” (PHEIC) by the World Health Organization, with the H1N1 outbreak being declared a [PHEIC on April 25, 2009](https://web.archive.org/web/20090502155343/http://www.who.int/mediacentre/news/statements/2009/h1n1_20090425/en/) and COVID-19 declared a [PHEIC on January 30, 2020](https://web.archive.org/web/20210815071616/https://www.who.int/news/item/30-01-2020-statement-on-the-second-meeting-of-the-international-health-regulations-%282005%29-emergency-committee-regarding-the-outbreak-of-novel-coronavirus-%282019-ncov%29). If an HPAI virus were to evolve to spread in sustained human-to-human transmission and become an outbreak, then there would be reason for the WHO to declare it a PHEIC.\n\nSuch a declaration would be a milestone prior to the the outbreak being considered a pandemic, as we saw with H1N1 and COVID-19.", "resolution_criteria": "This question will resolve as Yes if, prior to Sept 30, 2024, an outbreak of a virus classified as a “Highly Pathogenic Avian Influenza” presenting in humans is declared a “Public Health Emergency of International Concern” (PHEIC) by the World Health Organization. If this PHEIC declaration does not happen prior to Sept 30, 2024, this question resolves as No.", "fine_print": "- A PHEIC must be declared and is verifiable through common communication channels of the WHO such as its website or other [credible sources](https://www.metaculus.com/help/faq/#definitions)\n- Any type or subtype of Avian influenza is acceptable to meet this criteria, such as H5N1 or H7N9. \n- The PHEIC must be declared as a result of a human outbreak, not in response to an animal outbreak of the virus or the effects of the virus spreading in non-human species (i.e. shortages of food).\n- Sustained human-to-human transmission is not necessarily a requirement to resolve this question as a Yes.\n- Declarations by the CDC or other health organizations are not acceptable to resolve as a Yes.", "post_id": 26234, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721398579.424176, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": [ 0.23 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.35 ] } ], "latest": { "start_time": 1721398579.424176, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": [ 0.23 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.35 ], "forecast_values": [ 0.7, 0.3 ], "means": [ 0.3366497943156127 ], "histogram": [ [ 0.04136994682833595, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.093775628604687, 0.0, 0.30865936149406803, 0.3551836243544548, 0.0, 0.7610237892948524, 0.18125574861973415, 0.046071256587532805, 0.0, 0.0, 1.0252612621276318, 0.0, 0.43563441074387765, 0.5300417658031267, 0.0, 0.31964760159434596, 0.0, 1.530196466158317, 0.0, 0.0, 0.08889626536295236, 0.0, 1.9637615176313634, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14114485879589025, 0.0, 0.0, 0.0, 0.0, 0.2208611880313896, 0.0, 0.0, 0.0, 0.0, 2.060578462747017, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06934409839582649, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 48.542682717024164, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 48.542682717024164 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721398579.45713, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721398579.45713, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7660967743304592, 0.2339032256695408 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 124, "key_factors": [], "is_current_content_translated": false, "description": "An outbreak of Highly Pathogenic Avian Influenza (HPAI), Type A, subtype H5N1 was [reported in dairy cattle](https://www.aphis.usda.gov/news/agency-announcements/federal-state-veterinary-public-health-agencies-share-update-hpai) for the first time on March 25, 2024 in the United States by the Center for Disease Control (CDC), and soon thereafter a human tested positive for H5N1 which was [announced by the CDC](https://www.cdc.gov/media/releases/2024/p0401-avian-flu.html) April 1, 2024.\n\nWith this recent jump to an abundant mammal like cattle, [there is concern](https://news.un.org/en/story/2024/04/1148696) that further evolution in the virus could result in it becoming easily transmittable between humans, a scenario that could result in a pandemic of the virus. This sort of zoonotic origin pandemic is not unprecedented, of course [COVID-19 is believed to have made the jump from pangolins to humans](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC9874793/) and in a much more similar scenario, the [2009 H1N1 pandemic](https://www.cdc.gov/h1n1flu/information_h1n1_virus_qa.htm) was caused by a virus similar to what was detected circulating in pigs, earning it the nickname “Swine Flu”. Both of these were declared a “Public Health Emergency of International Concern” (PHEIC) by the World Health Organization, with the H1N1 outbreak being declared a [PHEIC on April 25, 2009](https://web.archive.org/web/20090502155343/http://www.who.int/mediacentre/news/statements/2009/h1n1_20090425/en/) and COVID-19 declared a [PHEIC on January 30, 2020](https://web.archive.org/web/20210815071616/https://www.who.int/news/item/30-01-2020-statement-on-the-second-meeting-of-the-international-health-regulations-%282005%29-emergency-committee-regarding-the-outbreak-of-novel-coronavirus-%282019-ncov%29). If an HPAI virus were to evolve to spread in sustained human-to-human transmission and become an outbreak, then there would be reason for the WHO to declare it a PHEIC.\n\nSuch a declaration would be a milestone prior to the the outbreak being considered a pandemic, as we saw with H1N1 and COVID-19." }, { "id": 26233, "title": "Will the domestic box office opening of \"Deadpool & Wolverine\" be higher than that of \"Deadpool\" and \"The Wolverine\" combined?", "short_title": "", "url_title": "", "slug": "will-the-domestic-box-office-opening-of-deadpool-wolverine-be-higher-than-that-of-deadpool-and-the-wolverine-combined", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-15T18:34:30.217584Z", "published_at": "2024-07-18T14:30:00Z", "edited_at": "2025-09-05T17:29:01.668729Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-18T14:30:00Z", "comment_count": 115, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-19T14:30:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-07-29T20:06:00Z", "actual_resolve_time": "2024-07-29T20:06:00Z", "open_time": "2024-07-18T14:30:00Z", "nr_forecasters": 33, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26233, "title": "Will the domestic box office opening of \"Deadpool & Wolverine\" be higher than that of \"Deadpool\" and \"The Wolverine\" combined?", "created_at": "2024-07-15T18:34:30.217584Z", "open_time": "2024-07-18T14:30:00Z", "cp_reveal_time": "2024-07-19T14:30:00Z", "spot_scoring_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-07-29T20:06:00Z", "actual_resolve_time": "2024-07-29T20:06:00Z", "resolution_set_time": "2024-07-29T20:06:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "actual_close_time": "2024-07-19T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "**Deadpool & Wolverine** is an upcoming blockbuster by Marvel Studios scheduled to be released on July 26, 2024 featuring the two iconic heroes from the X-Men Universe.\n\nThe recent projection from Deadline puts the domestic opening to [$160-$165 million](https://deadline.com/2024/07/deadpool-wolverine-box-office-opening-projection-1236000799/), which would not only be the best opening year-to-date (beating **Inside Out 2** with $154 million) but also a record start for an R-rated movie. \n\nAnother projection from June was even higher, [at $200+ million](https://deadline.com/2024/06/deadpool-wolverine-box-office-projection-1235973545/).\n\nThe ticket seller Fandango [reported on May 21](https://www.gamespot.com/articles/deadpool-and-wolverine-has-strongest-day-one-ticket-pre-sales-in-franchise-history-at-fandango/1100-6523599/) that their ticket pre-sales for the movie were the site's biggest ones in 2024 up to date.", "resolution_criteria": "This question resolves as **Yes** if the domestic box office opening for the **Deadpool & Wolverine** reported by Boxofficemojo on the [movie's page](https://www.boxofficemojo.com/title/tt6263850/?ref_=bo_se_r_1) is higher than $185,548,391 which is the sum of the box openings for **Deadpool** (2016) and **The Wolverine** (2013): $132,434,639 + $53,113,752.", "fine_print": "Metaculus admins may wait additional time or use another resolution source if there are reasons to doubt the data on the Boxofficemojo page.\n\nIf the movie is not released in the US until 26 August 2024 the question will become **annulled**.", "post_id": 26233, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721398314.104371, "end_time": null, "forecaster_count": 33, "interval_lower_bounds": [ 0.73 ], "centers": [ 0.8 ], "interval_upper_bounds": [ 0.85 ] } ], "latest": { "start_time": 1721398314.104371, "end_time": null, "forecaster_count": 33, "interval_lower_bounds": [ 0.73 ], "centers": [ 0.8 ], "interval_upper_bounds": [ 0.85 ], "forecast_values": [ 0.19999999999999996, 0.8 ], "means": [ 0.771695866866593 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04510278262054179, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2501577142572954, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4845886976894058, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.429306930151368, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5243776579668233, 0.0, 0.34848960163546905, 0.8536335929036921, 0.10223706135307076, 1.3217285350900085, 0.0, 0.0, 0.0, 0.0, 0.6814772003883054, 0.0, 1.0, 0.0, 0.0, 2.8267832921317915, 0.0, 0.0, 0.023645968432686428, 0.0, 1.090749341501525, 0.0, 0.0, 0.0, 0.018087904224968542, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 67.80719051126377, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 67.80719051126377 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721398314.144967, "end_time": null, "forecaster_count": 33, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721398314.144967, "end_time": null, "forecaster_count": 33, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.3200456789652826, 0.6799543210347174 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 114, "key_factors": [], "is_current_content_translated": false, "description": "**Deadpool & Wolverine** is an upcoming blockbuster by Marvel Studios scheduled to be released on July 26, 2024 featuring the two iconic heroes from the X-Men Universe.\n\nThe recent projection from Deadline puts the domestic opening to [$160-$165 million](https://deadline.com/2024/07/deadpool-wolverine-box-office-opening-projection-1236000799/), which would not only be the best opening year-to-date (beating **Inside Out 2** with $154 million) but also a record start for an R-rated movie. \n\nAnother projection from June was even higher, [at $200+ million](https://deadline.com/2024/06/deadpool-wolverine-box-office-projection-1235973545/).\n\nThe ticket seller Fandango [reported on May 21](https://www.gamespot.com/articles/deadpool-and-wolverine-has-strongest-day-one-ticket-pre-sales-in-franchise-history-at-fandango/1100-6523599/) that their ticket pre-sales for the movie were the site's biggest ones in 2024 up to date." }, { "id": 26232, "title": "Will the US government end its agreement directly allowing Verisign to manage the authoritative domain name registry for the .com TLD, before August 3, 2024?", "short_title": "", "url_title": "", "slug": "will-the-us-government-end-its-agreement-directly-allowing-verisign-to-manage-the-authoritative-domain-name-registry-for-the-com-tld-before-august-3-2024", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-15T18:34:29.851788Z", "published_at": "2024-07-18T14:30:00Z", "edited_at": "2025-09-05T17:29:22.005289Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-18T14:30:00Z", "comment_count": 125, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-19T14:30:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-08-05T16:54:00Z", "actual_resolve_time": "2024-08-05T16:54:00Z", "open_time": "2024-07-18T14:30:00Z", "nr_forecasters": 34, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26232, "title": "Will the US government end its agreement directly allowing Verisign to manage the authoritative domain name registry for the .com TLD, before August 3, 2024?", "created_at": "2024-07-15T18:34:29.851788Z", "open_time": "2024-07-18T14:30:00Z", "cp_reveal_time": "2024-07-19T14:30:00Z", "spot_scoring_time": "2024-07-19T14:30:00Z", "scheduled_resolve_time": "2024-08-05T16:54:00Z", "actual_resolve_time": "2024-08-05T16:54:00Z", "resolution_set_time": "2024-08-05T16:54:00Z", "scheduled_close_time": "2024-07-19T14:30:00Z", "actual_close_time": "2024-07-19T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "[Verisign](https://en.wikipedia.org/wiki/Verisign) operates the authoritative registry for the .com generic top-level domain (TLD) under a registry agreement with the Internet Corporation for Assigned Names and Numbers (ICANN) and a [Cooperative Agreement](https://www.ntia.gov/page/verisign-cooperative-agreement) with the US Department of Commerce (DOC). This means it is the wholesale provider of the .com TLD to every domain name registrar. The American Economic Liberties Projected [has called](https://www.economicliberties.us/press-release/ntia-and-doj-must-break-verisigns-monopoly-power-over-domain-names-advocates-urge/) this arrangement a \"government-designated monopoly over domain registration.\" According to [The American Prospect](https://prospect.org/power/2024-06-27-government-created-most-profitable-company-verisign/):\n\n>You have probably never heard of the most profitable company in America, and possibly the world. It’s a government-granted monopoly that feasts on high margins for a low-dollar administrative product. Today, three advocacy groups pressured the Biden administration to take down the rules in place that enable this gravy train.\n\n>The company is called VeriSign [*sic*], and it has exclusive control of the registration system for the .com domain name. Every year, hundreds of millions of website owners pay VeriSign [*sic*] a small annual fee to keep their .com in working order. Every year, the cost of managing the database to make sure that .com websites work either stays flat or goes down, while the fee for registering a .com website goes up.\n\nThe Cooperative Agreement between Verisign and the US DOC will automatically renew for a six-year term unless the DOC provides notice of non-renewal on August 2, 2024.\n\nAccording to [Bloomberg](https://archive.ph/IXZ3V#selection-1705.0-1726.0), during the 2018 renewal the Trump Administration lifted the cap on wholesale prices for *.com*. The antitrust advocacy groups have written to the Justice Department to withdraw guidance that allowed the elimination of price caps in the 2018 renewal, with the letters citing President Biden’s 2021 [executive order](https://www.whitehouse.gov/briefing-room/presidential-actions/2021/07/09/executive-order-on-promoting-competition-in-the-american-economy/) on competition.", "resolution_criteria": "This question resolves as **Yes** if before August 3, 2024, the US Department of Commerce (DOC) officially terminates or sunsets its Cooperative Agreement with Verisign recognizing Verisign's management of the *.com* generic top-level domain (TLD). If this does not happen, this question resolves as **No**.", "fine_print": "\"Sunsets\" is defined as the DOC choosing to let the agreement expire without renewal.\n\n\"Terminates\" is defined as the DOC providing written notice of non-renewal \nof the agreement with Verisign.", "post_id": 26232, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721398605.106489, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": [ 0.1 ], "centers": [ 0.2 ], "interval_upper_bounds": [ 0.29 ] } ], "latest": { "start_time": 1721398605.106489, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": [ 0.1 ], "centers": [ 0.2 ], "interval_upper_bounds": [ 0.29 ], "forecast_values": [ 0.8, 0.2 ], "means": [ 0.2109102639683738 ], "histogram": [ [ 0.016590897335947853, 0.0, 0.5300417658031267, 0.20179976200815308, 0.0, 0.3066263847585546, 0.080917342808261, 0.2294539435535619, 0.7020670829292148, 0.0, 1.0266826094249306, 0.0, 1.0, 0.0, 0.20427029069544175, 0.6573790064843125, 0.0, 0.0, 0.0, 0.0, 1.260422999597134, 0.0, 0.0, 0.0, 0.0, 0.4809786573605918, 0.0, 0.0, 1.2368847303185118, 0.0, 1.6718715047072998, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18125574861973415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.027463871070039274, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01207354803198191, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05895670636563755, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.28697040130296847, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 67.80719051126377, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 67.80719051126377 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721398605.13892, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721398605.13892, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8747039777641921, 0.12529602223580796 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 124, "key_factors": [], "is_current_content_translated": false, "description": "[Verisign](https://en.wikipedia.org/wiki/Verisign) operates the authoritative registry for the .com generic top-level domain (TLD) under a registry agreement with the Internet Corporation for Assigned Names and Numbers (ICANN) and a [Cooperative Agreement](https://www.ntia.gov/page/verisign-cooperative-agreement) with the US Department of Commerce (DOC). This means it is the wholesale provider of the .com TLD to every domain name registrar. The American Economic Liberties Projected [has called](https://www.economicliberties.us/press-release/ntia-and-doj-must-break-verisigns-monopoly-power-over-domain-names-advocates-urge/) this arrangement a \"government-designated monopoly over domain registration.\" According to [The American Prospect](https://prospect.org/power/2024-06-27-government-created-most-profitable-company-verisign/):\n\n>You have probably never heard of the most profitable company in America, and possibly the world. It’s a government-granted monopoly that feasts on high margins for a low-dollar administrative product. Today, three advocacy groups pressured the Biden administration to take down the rules in place that enable this gravy train.\n\n>The company is called VeriSign [*sic*], and it has exclusive control of the registration system for the .com domain name. Every year, hundreds of millions of website owners pay VeriSign [*sic*] a small annual fee to keep their .com in working order. Every year, the cost of managing the database to make sure that .com websites work either stays flat or goes down, while the fee for registering a .com website goes up.\n\nThe Cooperative Agreement between Verisign and the US DOC will automatically renew for a six-year term unless the DOC provides notice of non-renewal on August 2, 2024.\n\nAccording to [Bloomberg](https://archive.ph/IXZ3V#selection-1705.0-1726.0), during the 2018 renewal the Trump Administration lifted the cap on wholesale prices for *.com*. The antitrust advocacy groups have written to the Justice Department to withdraw guidance that allowed the elimination of price caps in the 2018 renewal, with the letters citing President Biden’s 2021 [executive order](https://www.whitehouse.gov/briefing-room/presidential-actions/2021/07/09/executive-order-on-promoting-competition-in-the-american-economy/) on competition." }, { "id": 26216, "title": "Bitcoin Extremes: Will 1 bitcoin be worth $100,000 or more before Sept 15, 2024?", "short_title": "", "url_title": "", "slug": "bitcoin-extremes-will-1-bitcoin-be-worth-100000-or-more-before-sept-15-2024", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-15T18:08:26.898626Z", "published_at": "2024-07-17T14:30:00Z", "edited_at": "2025-09-05T17:29:28.583964Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-17T14:30:00Z", "comment_count": 62, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-18T14:30:00Z", "scheduled_close_time": "2024-07-18T14:30:00Z", "scheduled_resolve_time": "2024-09-15T11:05:00Z", "actual_resolve_time": "2024-09-15T11:05:00Z", "open_time": "2024-07-17T14:30:00Z", "nr_forecasters": 36, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26216, "title": "Bitcoin Extremes: Will 1 bitcoin be worth $100,000 or more before Sept 15, 2024?", "created_at": "2024-07-15T18:08:26.898626Z", "open_time": "2024-07-17T14:30:00Z", "cp_reveal_time": "2024-07-18T14:30:00Z", "spot_scoring_time": "2024-07-18T14:30:00Z", "scheduled_resolve_time": "2024-09-15T11:05:00Z", "actual_resolve_time": "2024-09-15T11:05:00Z", "resolution_set_time": "2024-09-15T11:05:00Z", "scheduled_close_time": "2024-07-18T14:30:00Z", "actual_close_time": "2024-07-18T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "[Bitcoin](https://en.wikipedia.org/wiki/Bitcoin) is a [cryptocurrency](https://en.wikipedia.org/wiki/Cryptocurrency). It is a decentralized digital currency without a central bank or single administrator that can be sent from user to user on the peer-to-peer bitcoin network without the need for intermediaries.\n\nThe price of bitcoins has gone through cycles of appreciation and depreciation referred to by some as bubbles and busts. In 2011, the value of one bitcoin rapidly rose from about $0.30 to $32 before returning to $2. In the latter half of 2012 and during the 2012–13 Cypriot financial crisis, the bitcoin price began to rise again, reaching a high of $266 on 10 April 2013, before crashing to around $50. On 29 November 2013, the cost of one bitcoin rose to a peak of $1,242. In 2014, the price fell sharply, and as of April remained depressed at little more than half 2013 prices. As of August 2014 it was under $600.\n\nBitcoin prices reached their apogee in December 2017, at just shy of $20,000 per coin. Since then, prices have fallen roughly in half, and as of March 10, 2020 one bitcoin is valued at approximately $7,900. \n\n***Will 1 bitcoin be worth $100,000 or more before 2025?***", "resolution_criteria": "This question will resolve as Yes if one bitcoin is valued at $100,000 USD or greater at any time before Sept 15, 2024, according to credible press reports or a cryptocurrency exchange or trading platform. The valuation will be in nominal USD, not adjusted for inflation.", "fine_print": "", "post_id": 26216, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721312932.352538, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.35 ], "interval_upper_bounds": [ 0.55 ] } ], "latest": { "start_time": 1721312932.352538, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.35 ], "interval_upper_bounds": [ 0.55 ], "forecast_values": [ 0.65, 0.35 ], "means": [ 0.392273815334424 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.5639223720695624, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.216998672059816, 0.0, 0.0, 0.8777586015056795, 0.0, 0.0, 0.0, 0.0, 0.2766702070460438, 0.0, 0.2999413040343729, 0.0, 0.0, 1.2123477203031603, 0.0, 0.028709986438327752, 0.24233740095005632, 0.0, 0.0, 0.0, 0.0, 0.0, 0.48559628734755933, 1.1938941746768768, 0.0, 0.0, 0.0, 0.0, 1.0386969701126, 0.0, 0.4061712190803831, 0.0, 0.0, 0.585584538459839, 0.0, 0.09122295403992013, 0.0, 0.0, 0.0, 0.01831563888873418, 0.0, 0.0, 0.0, 0.6306548926675399, 0.17249978086131545, 0.0, 0.0, 0.0, 0.041937583413620604, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.3674510410056735, 0.0, 0.04978706836786394, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7095347889677784, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 37.85116232537298, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 37.85116232537298 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721312932.381917, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721312932.381917, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8037940759949841, 0.1962059240050159 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 1, "user_vote": null }, "forecasts_count": 62, "key_factors": [], "is_current_content_translated": false, "description": "[Bitcoin](https://en.wikipedia.org/wiki/Bitcoin) is a [cryptocurrency](https://en.wikipedia.org/wiki/Cryptocurrency). It is a decentralized digital currency without a central bank or single administrator that can be sent from user to user on the peer-to-peer bitcoin network without the need for intermediaries.\n\nThe price of bitcoins has gone through cycles of appreciation and depreciation referred to by some as bubbles and busts. In 2011, the value of one bitcoin rapidly rose from about $0.30 to $32 before returning to $2. In the latter half of 2012 and during the 2012–13 Cypriot financial crisis, the bitcoin price began to rise again, reaching a high of $266 on 10 April 2013, before crashing to around $50. On 29 November 2013, the cost of one bitcoin rose to a peak of $1,242. In 2014, the price fell sharply, and as of April remained depressed at little more than half 2013 prices. As of August 2014 it was under $600.\n\nBitcoin prices reached their apogee in December 2017, at just shy of $20,000 per coin. Since then, prices have fallen roughly in half, and as of March 10, 2020 one bitcoin is valued at approximately $7,900. \n\n***Will 1 bitcoin be worth $100,000 or more before 2025?***" }, { "id": 26215, "title": "Will a nuclear weapon be detonated as an act of war by Sept 30, 2024?", "short_title": "", "url_title": "", "slug": "will-a-nuclear-weapon-be-detonated-as-an-act-of-war-by-sept-30-2024", "author_id": 120279, "author_username": "Tom_Metaculus", "coauthors": [], "created_at": "2024-07-15T18:08:26.686775Z", "published_at": "2024-07-17T14:30:00Z", "edited_at": "2025-09-05T17:29:19.620736Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-17T14:30:00Z", "comment_count": 54, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-18T14:30:00Z", "scheduled_close_time": "2024-07-18T14:30:00Z", "scheduled_resolve_time": "2024-10-02T00:13:00Z", "actual_resolve_time": "2024-10-02T00:13:00Z", "open_time": "2024-07-17T14:30:00Z", "nr_forecasters": 36, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26215, "title": "Will a nuclear weapon be detonated as an act of war by Sept 30, 2024?", "created_at": "2024-07-15T18:08:26.686775Z", "open_time": "2024-07-17T14:30:00Z", "cp_reveal_time": "2024-07-18T14:30:00Z", "spot_scoring_time": "2024-07-18T14:30:00Z", "scheduled_resolve_time": "2024-10-02T00:13:00Z", "actual_resolve_time": "2024-10-02T00:13:00Z", "resolution_set_time": "2024-10-02T00:13:00Z", "scheduled_close_time": "2024-07-18T14:30:00Z", "actual_close_time": "2024-07-18T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In the last 75 years, two nuclear weapons have been detonated as an act of war; the [bombings of Hiroshima and Nagasaki](https://en.wikipedia.org/wiki/Atomic_bombings_of_Hiroshima_and_Nagasaki) in 1945, at the end of WWII. Despite tensions being high between the US and the USSR during the Cold War, and [several close calls](https://en.wikipedia.org/wiki/List_of_nuclear_close_calls), there have been no additional nuclear weapons detonated as acts of war. \n\nCurrently, estimated nuclear weapon stockpiles are 15,000 warheads worldwide, mostly in the US and Russia, and there are eight or nine states that possess nuclear weapons (China, France, Russia, United Kingdom, United States, India, North Korea, Pakistan, and [Israel](https://en.wikipedia.org/wiki/Nuclear_weapons_and_Israel) which remains officially ambiguous). The use of these nuclear weapons [could be catastrophic](https://www.metaculus.com/questions/1494/ragnar%25C3%25B6k-question-series-if-a-global-catastrophe-occurs-will-it-be-due-to-nuclear-war/).", "resolution_criteria": "This question will resolve as Yes if there is any nuclear detonation as an act of war between January 1, 2020 and Sept 30, 2024. Resolution will be by credible media reports. The detonation must be deliberate; accidental, inadvertent, or testing/peaceful detonations will not qualify (see fine print). Attacks using strategic and tactical nuclear weapons are both sufficient to qualify.", "fine_print": "[Barrett et al. 2013](http://scienceandglobalsecurity.org/archive/sgs21barrett.pdf) defined terms to distinguish between causes of nuclear detonations:\n\n>In an accidental or unauthorized launch or detonation, system safeguards or procedures to maintain control over nuclear weapons fail in such a way that a nuclear weapon or missile launches or explodes without direction from leaders.\n\n>In an inadvertent detonation, the attacking nation mistakenly concludes that it is under nuclear attack and launches one or more nuclear weapons in what it believes is a counterattack.\n\n>In a deliberate detonation, the attacking nation decides to launch one or more nuclear weapons either in response to a genuine nuclear attack or without believing that it is under nuclear attack.", "post_id": 26215, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721312154.432733, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": [ 0.02 ], "centers": [ 0.035 ], "interval_upper_bounds": [ 0.05 ] } ], "latest": { "start_time": 1721312154.432733, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": [ 0.02 ], "centers": [ 0.035 ], "interval_upper_bounds": [ 0.05 ], "forecast_values": [ 0.965, 0.035 ], "means": [ 0.08187889695926999 ], "histogram": [ [ 0.04978706836786394, 1.4437662634114579, 2.4049667767307445, 1.9261825987737682, 0.010195728329581903, 2.363584221584008, 0.0, 0.0, 0.492383508225005, 0.0, 0.07919054371229513, 0.0, 0.0, 0.0, 0.15306473011963811, 0.17249978086131545, 0.0, 0.0, 0.0, 0.0, 0.6909940002607419, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.19376657685266277, 0.0, 0.014010486522538886, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.028709986438327752, 0.2699322600469583, 0.0, 0.216998672059816, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 94.86008474933556, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 94.86008474933556 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721312154.482424, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721312154.482424, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9586475894191147, 0.04135241058088529 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 53, "key_factors": [], "is_current_content_translated": false, "description": "In the last 75 years, two nuclear weapons have been detonated as an act of war; the [bombings of Hiroshima and Nagasaki](https://en.wikipedia.org/wiki/Atomic_bombings_of_Hiroshima_and_Nagasaki) in 1945, at the end of WWII. Despite tensions being high between the US and the USSR during the Cold War, and [several close calls](https://en.wikipedia.org/wiki/List_of_nuclear_close_calls), there have been no additional nuclear weapons detonated as acts of war. \n\nCurrently, estimated nuclear weapon stockpiles are 15,000 warheads worldwide, mostly in the US and Russia, and there are eight or nine states that possess nuclear weapons (China, France, Russia, United Kingdom, United States, India, North Korea, Pakistan, and [Israel](https://en.wikipedia.org/wiki/Nuclear_weapons_and_Israel) which remains officially ambiguous). The use of these nuclear weapons [could be catastrophic](https://www.metaculus.com/questions/1494/ragnar%25C3%25B6k-question-series-if-a-global-catastrophe-occurs-will-it-be-due-to-nuclear-war/)." }, { "id": 26214, "title": "Will the extent of the H5N1 outbreak in dairy cows on Sept 30, 2024, be between 200 and 300 herds?", "short_title": "", "url_title": "", "slug": "will-the-extent-of-the-h5n1-outbreak-in-dairy-cows-on-sept-30-2024-be-between-200-and-300-herds", "author_id": 115975, "author_username": "johnnycaffeine", "coauthors": [], "created_at": "2024-07-15T18:08:26.213489Z", "published_at": "2024-07-17T14:30:00Z", "edited_at": "2025-09-05T17:28:53.962727Z", "curation_status": "approved", "curation_status_updated_at": "2024-07-17T14:30:00Z", "comment_count": 60, "status": "resolved", "resolved": true, "actual_close_time": "2024-07-18T14:30:00Z", "scheduled_close_time": "2024-07-18T14:30:00Z", "scheduled_resolve_time": "2024-10-02T16:18:00Z", "actual_resolve_time": "2024-10-02T16:18:00Z", "open_time": "2024-07-17T14:30:00Z", "nr_forecasters": 33, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32594, "name": "2024 Leaderboard", "slug": "2024_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "tournament": [ { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } ], "default_project": { "id": 3349, "type": "tournament", "name": "AI Forecasting Benchmark Tournament - 2024 Q3", "slug": "aibq3", "header_image": "https://cdn.metaculus.com/upscaled-4x-cover.webp", "prize_pool": "30000.00", "start_date": "2024-07-08T06:00:00Z", "close_date": "2024-10-17T00:00:00Z", "forecasting_end_date": "2024-10-06T18:00:00Z", "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-06-25T19:40:43.075453Z", "edited_at": "2025-10-24T09:52:13.968580Z", "score_type": "spot_peer_tournament", "default_permission": "viewer", "visibility": "not_in_main_feed", "is_current_content_translated": false, "bot_leaderboard_status": "include" } }, "question": { "id": 26214, "title": "Will the extent of the H5N1 outbreak in dairy cows on Sept 30, 2024, be between 200 and 300 herds?", "created_at": "2024-07-15T18:08:26.213489Z", "open_time": "2024-07-17T14:30:00Z", "cp_reveal_time": "2024-07-18T14:30:00Z", "spot_scoring_time": "2024-07-18T14:30:00Z", "scheduled_resolve_time": "2024-10-02T16:18:00Z", "actual_resolve_time": "2024-10-02T16:18:00Z", "resolution_set_time": "2024-10-02T16:18:00Z", "scheduled_close_time": "2024-07-18T14:30:00Z", "actual_close_time": "2024-07-18T14:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": true, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "On March 25, 2024, the USDA [reported](https://www.aphis.usda.gov/news/agency-announcements/federal-state-veterinary-public-health-agencies-share-update-hpai) an outbreak of H5N1 avian influenza in dairy cows in Kansas, Texas, and New Mexico. The outbreak has since spread, with CDC and USDA [reporting](https://www.cdc.gov/flu/avianflu/mammals.htm) infected cattle in nine states and affecting 46 herds as of May 15, 2024.\n\nOn April 25, 2024, the US Food and Drug Administration (FDA) [announced](https://www.fda.gov/food/alerts-advisories-safety-information/updates-highly-pathogenic-avian-influenza-hpai) that it had conducted tests of retail milk and found one in five of the samples contained highly pathogenic avian influenza (HPAI) H5N1 viral fragments. In an April 26 update the FDA described testing 297 retail dairy samples from 38 states, and in a subsequent update on May 10 confirmed the results of final egg inoculation tests showing that none of the HPAI H5N1 virus samples were viable.", "resolution_criteria": "This question resolves as **Yes** if the extent of the H5N1 outbreak in dairy cows according to CDC's [Current H5N1 Bird Flu Situation in Dairy Cows](https://www.cdc.gov/bird-flu/situation-summary/mammals.html?CDC_AAref_Val=https://www.cdc.gov/flu/avianflu/mammals.htm) when accessed by Metaculus on Oct 1, 2024, exceeds 200 and is less than 300 dairy herds affected.", "fine_print": "* If Metaculus assesses that the CDC page or a similar CDC page in a different location has not received necessary updates (as opposed to not being updated to a lack of additional H5N1 cases) within the past seven days the resolution may instead be based on [information provided by the USDA](https://www.aphis.usda.gov/livestock-poultry-disease/avian/avian-influenza/hpai-detections/livestock) or other credible sources, if available. If Metaculus assesses that up-to-date information (of a similar methodology to that presented by CDC as of May 15, 2024) is not available the questions will be **annulled**.\n* Metaculus will defer to the methodology presented on the CDC page when resolving, though if multiple methodologies are available (such as distinguishing suspected outbreaks from confirmed outbreaks) the question will resolve based on the methodology Metaculus assesses to be most similar to that used on May 15, 2024. According to the [USDA page](https://www.aphis.usda.gov/livestock-poultry-disease/avian/avian-influenza/hpai-detections/livestock) the information shown as of May 15 is based on confirmed cases.", "post_id": 26214, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1721312255.999011, "end_time": null, "forecaster_count": 33, "interval_lower_bounds": [ 0.32 ], "centers": [ 0.5 ], "interval_upper_bounds": [ 0.7 ] } ], "latest": { "start_time": 1721312255.999011, "end_time": null, "forecaster_count": 33, "interval_lower_bounds": [ 0.32 ], "centers": [ 0.5 ], "interval_upper_bounds": [ 0.7 ], "forecast_values": [ 0.5, 0.5 ], "means": [ 0.47281521248247427 ], "histogram": [ [ 0.1976105664975124, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17472138726266326, 0.0, 0.0, 0.0, 0.4040381606280351, 0.0, 0.0, 0.0, 0.0, 0.0, 0.029941953078415226, 0.0, 0.0, 0.0, 0.2801509562747174, 0.0, 0.0, 0.0, 0.0, 0.4834322713173703, 0.0, 0.0, 0.0, 0.0, 0.02678676987794279, 0.0, 1.4749419720392534, 1.0229120377910155, 0.0, 0.23586467659496937, 0.0, 0.0, 0.0, 0.0, 0.5778677609141162, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07560105912010007, 0.0, 0.037065342746035734, 0.0, 0.0, 0.0, 0.0, 0.6266147193198941, 0.0, 0.023645968432686428, 0.0, 0.0, 1.056063265825332, 0.0, 0.0, 0.0, 0.0, 0.15249497081355273, 0.0, 0.0, 0.0, 0.0, 2.5193917780360744, 0.0, 0.0, 0.0, 0.0, 0.5470781476703563, 0.0, 0.0541425161069094, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 0.0, "coverage": 0.0, "baseline_score": 0.0, "spot_peer_score": 0.0, "spot_baseline_score": 0.0, "peer_archived_score": 0.0, "baseline_archived_score": 0.0, "spot_peer_archived_score": 0.0, "spot_baseline_archived_score": 0.0 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1721312256.046357, "end_time": null, "forecaster_count": 33, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1721312256.046357, "end_time": null, "forecaster_count": 33, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.693634333582553, 0.30636566641744695 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "viewer", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 58, "key_factors": [], "is_current_content_translated": false, "description": "On March 25, 2024, the USDA [reported](https://www.aphis.usda.gov/news/agency-announcements/federal-state-veterinary-public-health-agencies-share-update-hpai) an outbreak of H5N1 avian influenza in dairy cows in Kansas, Texas, and New Mexico. The outbreak has since spread, with CDC and USDA [reporting](https://www.cdc.gov/flu/avianflu/mammals.htm) infected cattle in nine states and affecting 46 herds as of May 15, 2024.\n\nOn April 25, 2024, the US Food and Drug Administration (FDA) [announced](https://www.fda.gov/food/alerts-advisories-safety-information/updates-highly-pathogenic-avian-influenza-hpai) that it had conducted tests of retail milk and found one in five of the samples contained highly pathogenic avian influenza (HPAI) H5N1 viral fragments. In an April 26 update the FDA described testing 297 retail dairy samples from 38 states, and in a subsequent update on May 10 confirmed the results of final egg inoculation tests showing that none of the HPAI H5N1 virus samples were viable." } ] }