Posts List Oldapi View
We shared this request example with FAB participants: url_qparams = { "limit": count, "offset": offset, "has_group": "false", "order_by": "-activity", "forecast_type": "binary", "project": tournament_id, "status": "open", "type": "forecast", "include_description": "true", } url = f"{api_info.base_url}/questions/" response = requests.get( url, headers={"Authorization": f"Token {api_info.token}"}, params=url_qparams )
But we don't want to support all these parameters, and the ones relevant are: - order_by - status - project - forecast_type - we ignore this, but assume it's binary - FAB only supports binary for now.
GET /api2/questions/?format=api&offset=4920
{ "count": 6399, "next": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=4940", "previous": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=4900", "results": [ { "id": 6981, "title": "Will Russia annex Ukrainian territory before 2022?", "short_title": "Russian annexation of Ukraine by 2022", "url_title": "Russian annexation of Ukraine by 2022", "slug": "russian-annexation-of-ukraine-by-2022", "author_id": 115580, "author_username": "Aithir", "coauthors": [], "created_at": "2021-04-06T20:41:55.096130Z", "published_at": "2021-04-17T07:00:00Z", "edited_at": "2025-09-05T17:29:00.916645Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-17T07:00:00Z", "comment_count": 32, "status": "resolved", "resolved": true, "actual_close_time": "2021-11-08T23:00:00Z", "scheduled_close_time": "2021-11-08T23:00:00Z", "scheduled_resolve_time": "2022-01-01T03:28:00Z", "actual_resolve_time": "2022-01-01T03:28:00Z", "open_time": "2021-04-17T07:00:00Z", "nr_forecasters": 128, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32597, "name": "2021 Leaderboard", "slug": "2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 6981, "title": "Will Russia annex Ukrainian territory before 2022?", "created_at": "2021-04-06T20:41:55.096130Z", "open_time": "2021-04-17T07:00:00Z", "cp_reveal_time": "2021-04-17T13:40:00.621363Z", "spot_scoring_time": "2021-04-17T13:40:00.621363Z", "scheduled_resolve_time": "2022-01-01T03:28:00Z", "actual_resolve_time": "2022-01-01T03:28:00Z", "resolution_set_time": "2022-01-01T03:28:00Z", "scheduled_close_time": "2021-11-08T23:00:00Z", "actual_close_time": "2021-11-08T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "After Russia occupied Crimea in 2014 Ukraine reacted by cutting off [nearly 90%](https://www.bloomberg.com/opinion/articles/2021-03-19/russia-vs-ukraine-crimea-s-water-crisis-is-an-impossible-problem-for-putin) of the region's fresh water.\nGiven the geopolitical importance of Crimea and the [Black Sea Fleet](https://en.wikipedia.org/wiki/Black_Sea_Fleet) this situation seems intolarable for Russia.\nRecently [Russian troops amassed](https://www.bbc.com/news/world-europe-56616778)\n near the Ukrainian border, potentially indicating a coming conflict. Reactions by NATO and the US specifically indicate a [credible](https://www.dw.com/en/us-asks-russia-to-explain-ukrainian-border-provocations/a-57105593) threat to the Ukrainian territory.", "resolution_criteria": "This question resolves positively if any formal annexations on behalf of the Russian Federation are represented within their [official constitution](http://kremlin.ru/acts/constitution) ([English source](http://www.constitution.ru/en/10003000-01.htm)), specifically, under '[Chapter 3, Federated device](http://kremlin.ru/acts/constitution/item#chapter3)' by 12:00AM January 1, 2022 Moscow Standard Time (GMT+3)", "fine_print": "", "post_id": 6981, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1636401722.510606, "end_time": null, "forecaster_count": 128, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.02 ], "interval_upper_bounds": [ 0.04 ] } ], "latest": { "start_time": 1636401722.510606, "end_time": null, "forecaster_count": 128, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.02 ], "interval_upper_bounds": [ 0.04 ], "forecast_values": [ 0.98, 0.02 ], "means": [ 0.03670663235959931 ], "histogram": [ [ 0.0, 10.137372307209919, 3.2976181955145085, 1.2715454561652506, 1.792480608133826, 2.2065626200375053, 0.0, 0.0480424651548976, 0.34654676341029406, 0.41752870186414237, 0.16241021534987926, 0.0073589484807744425, 0.2040038858901863, 0.16272686437994863, 0.005016519567872422, 0.00124098336830585, 0.01037390189295894, 0.0011931805525433913, 0.015293128726026542, 0.0, 0.059198677219636536, 0.0, 0.0, 0.0, 0.875142445089744, 0.09889382303426979, 0.0, 0.0, 0.0, 0.0, 5.0200029851686126e-05, 0.0, 0.0, 0.000586859480427528, 0.005803497673608363, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0010442151082370433, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0013290475260089698, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0016372635244738653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00011419072040023296 ] ] }, "score_data": { "peer_score": 15.045159159060013, "coverage": 0.999986077321436, "baseline_score": 88.29057893048454, "spot_peer_score": 4.009246936599457, "peer_archived_score": 15.045159159060013, "baseline_archived_score": 88.29057893048454, "spot_peer_archived_score": 4.009246936599457 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1636401722.542264, "end_time": null, "forecaster_count": 128, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1636401722.542264, "end_time": null, "forecaster_count": 128, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9912682751047287, 0.008731724895271286 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 31, "user_vote": null }, "forecasts_count": 290, "key_factors": [], "is_current_content_translated": false, "description": "After Russia occupied Crimea in 2014 Ukraine reacted by cutting off [nearly 90%](https://www.bloomberg.com/opinion/articles/2021-03-19/russia-vs-ukraine-crimea-s-water-crisis-is-an-impossible-problem-for-putin) of the region's fresh water.\nGiven the geopolitical importance of Crimea and the [Black Sea Fleet](https://en.wikipedia.org/wiki/Black_Sea_Fleet) this situation seems intolarable for Russia.\nRecently [Russian troops amassed](https://www.bbc.com/news/world-europe-56616778)\n near the Ukrainian border, potentially indicating a coming conflict. Reactions by NATO and the US specifically indicate a [credible](https://www.dw.com/en/us-asks-russia-to-explain-ukrainian-border-provocations/a-57105593) threat to the Ukrainian territory." }, { "id": 6980, "title": "If a language model is trained with 5x more compute than GPT-3 by the end of 2022, will it be public to end users?", "short_title": "GPT-4 Public by End of 2022", "url_title": "GPT-4 Public by End of 2022", "slug": "gpt-4-public-by-end-of-2022", "author_id": 116062, "author_username": "mtrazzi", "coauthors": [], "created_at": "2021-04-06T20:27:18.578800Z", "published_at": "2021-04-11T23:00:00Z", "edited_at": "2025-09-05T17:29:26.707852Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-11T23:00:00Z", "comment_count": 43, "status": "resolved", "resolved": true, "actual_close_time": "2022-12-31T21:26:00Z", "scheduled_close_time": "2022-12-31T21:26:00Z", "scheduled_resolve_time": "2023-01-01T07:00:00Z", "actual_resolve_time": "2023-01-01T07:00:00Z", "open_time": "2021-04-11T23:00:00Z", "nr_forecasters": 86, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" } ], "leaderboard_tag": [ { "id": 32590, "name": "2021-2025 Leaderboard", "slug": "2021_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" }, { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" } ] }, "question": { "id": 6980, "title": "If a language model is trained with 5x more compute than GPT-3 by the end of 2022, will it be public to end users?", "created_at": "2021-04-06T20:27:18.578800Z", "open_time": "2021-04-11T23:00:00Z", "cp_reveal_time": "2021-04-12T17:25:05.613276Z", "spot_scoring_time": "2021-04-12T17:25:05.613276Z", "scheduled_resolve_time": "2023-01-01T07:00:00Z", "actual_resolve_time": "2023-01-01T07:00:00Z", "resolution_set_time": "2023-01-01T07:00:00Z", "scheduled_close_time": "2022-12-31T21:26:00Z", "actual_close_time": "2022-12-31T21:26:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "ambiguous", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "OpenAI announced [GPT-2](https://openai.com/blog/better-language-models/) in February 2019, a [language model](https://en.wikipedia.org/wiki/Language_model) (LM) with 1.5B parameters. In June 2020, they [publicized](https://openai.com/blog/openai-api/) an API to a [175B](https://arxiv.org/pdf/2005.14165.pdf)-parameters LM. However, accessing it required filling a form (\"wait list\") to apply for their \"playground\" app or API.", "resolution_criteria": "To resolve positively:\n\n* The model paper should be **published on arXiv or on the ICLR, NeurIPS or ICML** (top machine learning conferences) websites **before the end of 2022**.\n\n* **It should at least be an API or graphical user interface with a language model option** where you could send a text prompt and it returns text. If for instance there is a new Deep Learning model that uses 5x more compute than GPT-3 but does not provide any way to produce text from text, that does not count.\n\n* You should be able to get one API call/send one prompt by **paying less than $1k from a publicly accessible source** on the clear web that you can find using one google search. This rule is to force the accessibility to end-user.\n\n * The $1k limit insists on it being business-to-consumer and not business-to-business.\n\n * The public url from one google search dismisses any leak of the model where you would need to do extra steps yourself or have private information of a token/password protected url (cf. [DeepNude](https://www.theregister.com/2019/06/27/deepfake_nudes_app_pulled/) where you needed to do some hacking to get access to the model after the author removed it publicly)\n\nIf a model meets the first two resolution criteria by 2022, but the third criterion (about public access) is not met, then this question resolves negatively. Otherwise, it resolves ambiguously", "fine_print": "", "post_id": 6980, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1672487344.708366, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.05 ], "interval_upper_bounds": [ 0.11 ] } ], "latest": { "start_time": 1672487344.708366, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.05 ], "interval_upper_bounds": [ 0.11 ], "forecast_values": [ 0.95, 0.05 ], "means": [ 0.10405348811361777 ], "histogram": [ [ 0.0, 4.740871976492441, 2.0385545086318917, 0.4820946812092099, 0.9796412427136766, 2.3657983835602026, 0.7585705930526657, 0.6073475340014041, 0.0, 0.0, 0.01695036226548519, 1.2139781634499855, 0.26625386030838016, 0.14585398402333088, 0.20342197809775475, 0.16690946789911046, 0.0, 0.011358522086374995, 0.06612687914115949, 0.0008782752490162315, 0.012493602609094254, 0.0, 0.0, 0.001322981755186089, 0.0, 0.4556357207254709, 0.0065324199916913074, 0.02245161071843299, 0.01898280703997906, 0.0, 0.21838840024106568, 0.29696714825170617, 0.0, 0.7545410499506788, 0.0, 0.03628969471386552, 0.0, 0.0, 0.0010872227688318024, 0.11118876700562327, 0.185017173696976, 0.123791229072723, 0.0, 0.08280320485427824, 0.00512503207471792, 0.037869149508909655, 0.0, 0.0, 0.1029390209692679, 0.0, 0.19054909718204532, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.04463644984976473, 0.0, 0.0, 0.012592687257094694, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07688730020434764, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13647181574739514, 0.0005305652088145888, 0.0, 0.0, 0.0, 0.0, 0.0006935976674286746, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05144869349449428 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1672487344.72711, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1672487344.72711, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9861551367036424, 0.013844863296357608 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 26, "user_vote": null }, "forecasts_count": 235, "key_factors": [], "is_current_content_translated": false, "description": "OpenAI announced [GPT-2](https://openai.com/blog/better-language-models/) in February 2019, a [language model](https://en.wikipedia.org/wiki/Language_model) (LM) with 1.5B parameters. In June 2020, they [publicized](https://openai.com/blog/openai-api/) an API to a [175B](https://arxiv.org/pdf/2005.14165.pdf)-parameters LM. However, accessing it required filling a form (\"wait list\") to apply for their \"playground\" app or API." }, { "id": 6973, "title": "Will the NBA raise the rim to 10'6\" (or higher) before 2030?", "short_title": "NBA raising the rim by 2030", "url_title": "NBA raising the rim by 2030", "slug": "nba-raising-the-rim-by-2030", "author_id": 112076, "author_username": "SimonM", "coauthors": [], "created_at": "2021-04-05T17:29:28.307472Z", "published_at": "2021-04-09T07:00:00Z", "edited_at": "2025-10-20T07:33:00.294505Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-09T07:00:00Z", "comment_count": 10, "status": "closed", "resolved": false, "actual_close_time": "2025-10-20T07:33:00Z", "scheduled_close_time": "2025-10-20T07:33:00Z", "scheduled_resolve_time": "2030-01-01T08:33:00Z", "actual_resolve_time": null, "open_time": "2021-04-09T07:00:00Z", "nr_forecasters": 84, "html_metadata_json": null, "projects": { "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 6973, "title": "Will the NBA raise the rim to 10'6\" (or higher) before 2030?", "created_at": "2021-04-05T17:29:28.307472Z", "open_time": "2021-04-09T07:00:00Z", "cp_reveal_time": "2021-04-10T18:08:47.690921Z", "spot_scoring_time": "2021-04-10T18:08:47.690921Z", "scheduled_resolve_time": "2030-01-01T08:33:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2025-10-20T07:33:00Z", "actual_close_time": "2025-10-20T07:33:00Z", "type": "binary", "options": null, "group_variable": "", "status": "closed", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": false, "open_lower_bound": false, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": false, "open_lower_bound": false, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "[Basketball](https://en.wikipedia.org/wiki/Basketball) is a team sport played with a 10 foot high rim. The [NBA](https://en.wikipedia.org/wiki/National_Basketball_Association) is the largest professional league in the world. \n\nWhilst making predictions for 2025 [Jeff Davidson](https://www.accountingweb.com/practice/team/9-predictions-for-what-your-world-just-might-look-like-in-2025) predicts that the height of the rim will be raised \"beyond the year 2025, but not too far beyond!\". We interpret that to mean 2030.", "resolution_criteria": "Resolution will be based on the official [NBA rulebook](https://official.nba.com/rule-no-1-court-dimensions-equipment/). If there is no official NBA rulebook available, we will defer to Wikipedia or any other general news sourc", "fine_print": "", "post_id": 6973, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763416537.095672, "end_time": 1764047991.980852, "forecaster_count": 24, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.004 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1763416537.095672, "end_time": 1764047991.980852, "forecaster_count": 24, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.004 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.996, 0.004 ], "means": [ 0.04977569667396961 ], "histogram": null }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289623.902773, "end_time": null, "forecaster_count": 74, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289623.902773, "end_time": null, "forecaster_count": 74, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9995, 0.0005 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 1, "user_vote": null }, "forecasts_count": 209, "key_factors": [], "is_current_content_translated": false, "description": "[Basketball](https://en.wikipedia.org/wiki/Basketball) is a team sport played with a 10 foot high rim. The [NBA](https://en.wikipedia.org/wiki/National_Basketball_Association) is the largest professional league in the world. \n\nWhilst making predictions for 2025 [Jeff Davidson](https://www.accountingweb.com/practice/team/9-predictions-for-what-your-world-just-might-look-like-in-2025) predicts that the height of the rim will be raised \"beyond the year 2025, but not too far beyond!\". We interpret that to mean 2030." }, { "id": 6970, "title": "Will vaccinated American citizens be allowed to enter Canada for discretionary travel by June 1st, 2021?", "short_title": "Vaccinated travel to Canada by June?", "url_title": "Vaccinated travel to Canada by June?", "slug": "vaccinated-travel-to-canada-by-june", "author_id": 118063, "author_username": "jakebd42", "coauthors": [], "created_at": "2021-04-04T05:24:30.807154Z", "published_at": "2021-04-21T07:00:00Z", "edited_at": "2025-09-05T17:29:02.317866Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-21T07:00:00Z", "comment_count": 15, "status": "resolved", "resolved": true, "actual_close_time": "2021-05-15T05:20:00Z", "scheduled_close_time": "2021-05-15T05:20:00Z", "scheduled_resolve_time": "2021-06-01T21:59:00Z", "actual_resolve_time": "2021-06-01T21:59:00Z", "open_time": "2021-04-21T07:00:00Z", "nr_forecasters": 62, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32597, "name": "2021 Leaderboard", "slug": "2021_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15865, "name": "Health & Pandemics", "slug": "biosecurity", "emoji": "🧬", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3691, "name": "Health & Pandemics", "slug": "health-pandemics", "emoji": "🦠", "description": "Health & Pandemics", "type": "category" } ] }, "question": { "id": 6970, "title": "Will vaccinated American citizens be allowed to enter Canada for discretionary travel by June 1st, 2021?", "created_at": "2021-04-04T05:24:30.807154Z", "open_time": "2021-04-21T07:00:00Z", "cp_reveal_time": "2021-04-23T07:00:00Z", "spot_scoring_time": "2021-04-23T07:00:00Z", "scheduled_resolve_time": "2021-06-01T21:59:00Z", "actual_resolve_time": "2021-06-01T21:59:00Z", "resolution_set_time": "2021-06-01T21:59:00Z", "scheduled_close_time": "2021-05-15T05:20:00Z", "actual_close_time": "2021-05-15T05:20:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "As of the creation of this question, the US-Canada border remains closed, with Canada allowing foreign nationals to enter for non-essential travels only in [very rare circumstances](https://www.canada.ca/en/immigration-refugees-citizenship/services/coronavirus-covid19/travel-restrictions-exemptions.html).\n\nWill this be significantly revised by June to criteria that any vaccinated American passport-holder could pass?", "resolution_criteria": "The question will resolve positively if entrance criteria for American passport holders is no different than what is was in January 2020, with one exception: additional requirement of proof of vaccination. This will be determined by available information on the Canadian government's website, canada.ca", "fine_print": "", "post_id": 6970, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1621053412.46016, "end_time": null, "forecaster_count": 62, "interval_lower_bounds": [ 0.18 ], "centers": [ 0.27 ], "interval_upper_bounds": [ 0.32 ] } ], "latest": { "start_time": 1621053412.46016, "end_time": null, "forecaster_count": 62, "interval_lower_bounds": [ 0.18 ], "centers": [ 0.27 ], "interval_upper_bounds": [ 0.32 ], "forecast_values": [ 0.73, 0.27 ], "means": [ 0.2491081666792028 ], "histogram": [ [ 0.0, 0.0028115822550663413, 0.5153518936135082, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05647213919671424, 1.2297224043556165, 0.0, 0.0, 0.0, 0.0, 0.4447807702067087, 0.10891869151455617, 0.0, 1.5090597002551052, 0.5521707312792997, 0.058761847271035915, 0.0, 0.1535071897926381, 0.24826285979550225, 0.018296883616726805, 1.6408596324261153, 0.21787566225389812, 1.2738846704704996, 1.2305199116329766, 0.0, 1.03937999268568, 0.0, 1.3143616250828285, 0.8690344820632586, 0.6326680595244999, 0.8267463668991765, 0.0, 0.026479956599866433, 0.0554399219934353, 0.0, 0.1543412104436131, 0.0, 0.0053628669779807745, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06871034895382641 ] ] }, "score_data": { "peer_score": 5.6997152386611365, "coverage": 0.9836422979613759, "baseline_score": 48.66016335292226, "spot_peer_score": -21.222015428207612, "peer_archived_score": 5.6997152386611365, "baseline_archived_score": 48.66016335292226, "spot_peer_archived_score": -21.222015428207612 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1621053412.513015, "end_time": null, "forecaster_count": 62, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1621053412.513015, "end_time": null, "forecaster_count": 62, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8238616792737476, 0.1761383207262523 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 10, "user_vote": null }, "forecasts_count": 173, "key_factors": [], "is_current_content_translated": false, "description": "As of the creation of this question, the US-Canada border remains closed, with Canada allowing foreign nationals to enter for non-essential travels only in [very rare circumstances](https://www.canada.ca/en/immigration-refugees-citizenship/services/coronavirus-covid19/travel-restrictions-exemptions.html).\n\nWill this be significantly revised by June to criteria that any vaccinated American passport-holder could pass?" }, { "id": 6967, "title": "Will Soylent-like meal replacements be labeled unhealthy?", "short_title": "Soylent and health", "url_title": "Soylent and health", "slug": "soylent-and-health", "author_id": 103600, "author_username": "2e10e122", "coauthors": [], "created_at": "2021-04-03T07:12:31.379892Z", "published_at": "2021-05-07T06:00:00Z", "edited_at": "2025-09-05T17:29:28.107311Z", "curation_status": "approved", "curation_status_updated_at": "2021-05-07T06:00:00Z", "comment_count": 15, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2029-11-30T23:00:00Z", "scheduled_resolve_time": "2029-12-31T23:00:00Z", "actual_resolve_time": null, "open_time": "2021-05-07T06:00:00Z", "nr_forecasters": 58, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" } ] }, "question": { "id": 6967, "title": "Will Soylent-like meal replacements be labeled unhealthy?", "created_at": "2021-04-03T07:12:31.379892Z", "open_time": "2021-05-07T06:00:00Z", "cp_reveal_time": "2021-05-07T19:32:31.151423Z", "spot_scoring_time": "2021-05-07T19:32:31.151423Z", "scheduled_resolve_time": "2029-12-31T23:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2029-11-30T23:00:00Z", "actual_close_time": "2029-11-30T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "A [meal replacement](https://www.wikiwand.com/en/Meal_replacement) is\n\n> a drink, bar, soup, etc. intended as a substitute for a solid food meal, usually with controlled quantities of calories and nutrients.\n\nMeal replacements may be consumed instead of traditional foodstuffs for several reasons, like dietary restrictions, price, and convenience.\n\nA class of meal replacements claiming to be nutritionally complete has gained popularity over the last decade. They are consumed as shakes and sold either in powder form or pre-mixed with water. Prime examples include [Soylent](https://www.wikiwand.com/en/Soylent_(meal_replacement)) and [Huel](https://www.wikiwand.com/en/Huel). A more comprehensive list can be found on [blendrunner.com](https://www.blendrunner.com/).\n\nDue to their novelty, it is unclear whether they're in fact nutritionally complete, as there could be unknown unknowns in human nutrition. Moreover, no empirical studies have established the safety of using them as one's main or sole food source over the long-term.\n\n***Will Soylent-like meal replacements be labeled unhealthy before 2030?***\n\nThis question resolves positive if, before 2030/1/1, the FDA or the EFSA do any of the following:\n\n1. Issuing official guidelines stating that a healthy diet should not rely on meal replacements of this kind alone. It should be clear from the phrasing that they're referring to Soylent-like products specifically, i.e. products consumed as shakes and claiming to be nutritionally complete.\n\n2. Banning an ingredient or manufacturing procedure currently used by any of the products listed in the fine print.\n\n[fine-print]\nList of products, taken from [blendrunner.com](https://www.blendrunner.com/):\n\n- Soylent Powder/Drink\n- Huel Powder/Ready-to-drink\n- Queal Steady\n- Jimmy Joy's Plenny Shake/Drink\n- ManaPowder/ManaDrink\n- Saturo Powder/Drink\n\nIf additional products are listed by blendrunner.com in this category, they will not be considered for the purposes of this question.\n\n[/fine-print]", "fine_print": "", "post_id": 6967, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763040731.317719, "end_time": 1763941221.247852, "forecaster_count": 21, "interval_lower_bounds": [ 0.21 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.33 ] } ], "latest": { "start_time": 1763040731.317719, "end_time": 1763941221.247852, "forecaster_count": 21, "interval_lower_bounds": [ 0.21 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.33 ], "forecast_values": [ 0.7, 0.3 ], "means": [ 0.2839186206689829 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.7995735536199648, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3764295304079803, 0.0, 0.07557908443735797, 0.0, 0.32677805160011447, 0.0, 0.0, 0.0, 0.0, 2.0609756668456782, 0.0, 0.0, 1.2344773826733824, 0.0, 0.9897723569095167, 0.11847113291540433, 0.0, 0.09570280051650464, 0.0, 0.2819710410581078, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08561795843432535, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24164198844540966, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289240.552614, "end_time": null, "forecaster_count": 58, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289240.552614, "end_time": null, "forecaster_count": 58, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8166149581079084, 0.18338504189209162 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 10, "user_vote": null }, "forecasts_count": 148, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 6966, "title": "Will SpaceX actually put a literal Dogecoin on the literal Moon by 2027?", "short_title": "SpaceX Puts Literal Dogecoin on Literal Moon", "url_title": "SpaceX Puts Literal Dogecoin on Literal Moon", "slug": "spacex-puts-literal-dogecoin-on-literal-moon", "author_id": 111828, "author_username": "ThirdEyeOpen", "coauthors": [], "created_at": "2021-04-02T23:28:36.838406Z", "published_at": "2021-04-05T05:00:00Z", "edited_at": "2025-09-05T17:28:56.913357Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-05T05:00:00Z", "comment_count": 72, "status": "closed", "resolved": false, "actual_close_time": "2024-01-14T05:59:00Z", "scheduled_close_time": "2024-01-14T05:59:00Z", "scheduled_resolve_time": "2027-01-01T05:59:00Z", "actual_resolve_time": null, "open_time": "2021-04-05T05:00:00Z", "nr_forecasters": 251, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32590, "name": "2021-2025 Leaderboard", "slug": "2021_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3693, "name": "Cryptocurrencies", "slug": "cryptocurrencies", "emoji": "💰", "description": "Cryptocurrencies", "type": "category" }, { "id": 3695, "name": "Space", "slug": "space", "emoji": "🚀", "description": "Space", "type": "category" } ] }, "question": { "id": 6966, "title": "Will SpaceX actually put a literal Dogecoin on the literal Moon by 2027?", "created_at": "2021-04-02T23:28:36.838406Z", "open_time": "2021-04-05T05:00:00Z", "cp_reveal_time": "2021-04-07T05:00:00Z", "spot_scoring_time": "2021-04-07T05:00:00Z", "scheduled_resolve_time": "2027-01-01T05:59:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2024-01-14T05:59:00Z", "actual_close_time": "2024-01-14T05:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "closed", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "On April 1st 2021, [Elon Musk tweeted: \"SpaceX is going to put a literal Dogecoin on the literal moon\"](https://mobile.twitter.com/elonmusk/status/1377567762919292938). While the day of publication is usually associated with pranks and humourously misleading jokes, there is speculation that the intention may be serious, and Elon has hinted at the idea previously.\n\n***Will SpaceX put a literal Dogecoin on the literal Moon?***\n\nThis question resolves positively if a vehicle operated by SpaceX delivers a representation of Dogecoin on the surface of Earth's Moon by the end of 2026. The \"Dogecoin\" must in some way contain at least one coin's worth of Dogecoin, for example by having a key written in it that allows access to an address that contains Dogecoin. A sculpture of a Dogecoin, but which does not carry any value of the cryptocurrency, will not resolve positively.", "fine_print": "", "post_id": 6966, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1705193118.029306, "end_time": null, "forecaster_count": 251, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.05 ], "interval_upper_bounds": [ 0.11 ] } ], "latest": { "start_time": 1705193118.029306, "end_time": null, "forecaster_count": 251, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.05 ], "interval_upper_bounds": [ 0.11 ], "forecast_values": [ 0.95, 0.05 ], "means": [ 0.10204662263390966 ], "histogram": [ [ 3.0214561292716686, 4.989680074002199, 2.809109439035319, 1.421824138119256, 1.7003933166220064, 3.449628074806418, 0.4911501604066085, 0.5857930758574923, 0.4706731342104142, 0.0, 2.959081095809692, 2.0553408690454202, 0.41557436145963644, 1.8557377971078062e-06, 0.0, 0.044376701611939164, 0.38999774782561203, 0.3961727197100348, 0.01776245399464456, 0.10595009725959306, 1.0730322419436273, 0.04315667338303108, 0.0, 0.0, 0.0, 0.9618054294943963, 0.05822000070021141, 1.525040219253506e-06, 0.055656488413053234, 0.0, 0.44030117891867304, 0.005196411282330814, 0.0, 0.016836170320175425, 0.2588250905278227, 0.030218670894291824, 0.04354798612100197, 0.0, 0.015950268643583336, 0.12780924600870003, 0.3601407458271396, 1.1526710509832505e-05, 0.036406227579904867, 0.0, 0.10499477131542068, 0.10989390017968909, 0.002153513488541547, 0.04923472709559051, 0.017140528053622203, 0.007268410539107888, 0.010368287284128702, 0.1132741212170707, 0.0009017090495349255, 0.009165982948252832, 0.00015502693128851286, 0.004572473852650152, 0.005420131491797134, 0.0006762310350823626, 0.0, 0.0005663041523705654, 0.031288776517924055, 0.004055540077712002, 0.0005646826295987281, 0.001127678279701934, 0.0003044472181220444, 0.002758398154096551, 0.00225862906158095, 0.010829682489398605, 0.0, 2.644629659537343e-06, 0.06572905656591091, 0.0, 0.0, 9.275573797294637e-05, 0.7048306501760225, 2.413407324777371e-05, 0.018072092202173796, 0.0, 0.0, 0.00013438842091682574, 0.011669777014807333, 0.0, 0.0, 0.0, 0.005446218823098715, 0.0001537069214915996, 0.00011614751024902168, 0.0017273541414045655, 0.0, 0.0, 0.03788596971006896, 0.00010006106212854434, 0.0, 0.0002045884230450191, 3.4477102327537125e-05, 0.0, 0.0, 0.0, 0.00010784930974857866, 0.0022019888918406415 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1705193118.068062, "end_time": null, "forecaster_count": 251, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1705193118.068062, "end_time": null, "forecaster_count": 251, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9937848667489757, 0.006215133251024281 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 16, "user_vote": null }, "forecasts_count": 714, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 6965, "title": "Will Rep. Matt Gaetz leave the US House of Representatives before 2023?", "short_title": "Rep Matt Gaetz leaves Congress by 2023", "url_title": "Rep Matt Gaetz leaves Congress by 2023", "slug": "rep-matt-gaetz-leaves-congress-by-2023", "author_id": 114156, "author_username": "TeeJayKay", "coauthors": [], "created_at": "2021-04-02T17:17:07.909765Z", "published_at": "2021-04-05T22:00:00Z", "edited_at": "2025-09-05T17:29:07.769801Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-05T22:00:00Z", "comment_count": 51, "status": "resolved", "resolved": true, "actual_close_time": "2021-10-01T07:01:00Z", "scheduled_close_time": "2021-10-01T07:01:00Z", "scheduled_resolve_time": "2023-01-01T05:00:00Z", "actual_resolve_time": "2023-01-01T05:00:00Z", "open_time": "2021-04-05T22:00:00Z", "nr_forecasters": 110, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32590, "name": "2021-2025 Leaderboard", "slug": "2021_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 6965, "title": "Will Rep. Matt Gaetz leave the US House of Representatives before 2023?", "created_at": "2021-04-02T17:17:07.909765Z", "open_time": "2021-04-05T22:00:00Z", "cp_reveal_time": "2021-04-06T08:03:30.644115Z", "spot_scoring_time": "2021-04-06T08:03:30.644115Z", "scheduled_resolve_time": "2023-01-01T05:00:00Z", "actual_resolve_time": "2023-01-01T05:00:00Z", "resolution_set_time": "2023-01-01T05:00:00Z", "scheduled_close_time": "2021-10-01T07:01:00Z", "actual_close_time": "2021-10-01T07:01:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Rep. Matt Gaetz has been a controversial figure for much of his tenure in Congress. Recently it came to light that Gaetz is being [investigated by the Department of Justice](https://www.businessinsider.com/justice-department-investigating-matt-gaetz-sexual-relationship-17-year-old-2021-3) for an alleged relationship with a 17-year-old, among other things. In another, separate scandal, he allegedly [showed nude photos of women he'd slept with to lawmakers](https://www.cnn.com/2021/04/01/politics/matt-gaetz-photos-women/index.html).\n\nGaetz has denied the allegations, characterizing them as a personal attack on him due to his conservatism, but has also floated the idea of [retiring from Congress early for a position at Newsmax](https://www.axios.com/matt-gaetz-retirement-congress-newsmax-e1a0e6bb-0279-4e97-ab22-508e28f4347a.html). Most recently, [his communications director has resigned](https://www.politico.com/news/2021/04/02/gaetz-communications-director-departs-scandal-478936).", "resolution_criteria": "This question resolves positively if Matt Gaetz is a not member of U.S. House of Representatives at any time between 2022-04-01 and 2023-01-01, whether it be by resignation, expulsion, or otherwise. If Gaetz completes his full term as representative to 2023-01-03, the question resolves negatively", "fine_print": "", "post_id": 6965, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1633050911.032346, "end_time": null, "forecaster_count": 109, "interval_lower_bounds": [ 0.54 ], "centers": [ 0.58 ], "interval_upper_bounds": [ 0.63 ] } ], "latest": { "start_time": 1633050911.032346, "end_time": null, "forecaster_count": 109, "interval_lower_bounds": [ 0.54 ], "centers": [ 0.58 ], "interval_upper_bounds": [ 0.63 ], "forecast_values": [ 0.42000000000000004, 0.58 ], "means": [ 0.5583611525268697 ], "histogram": [ [ 0.0, 0.14156091856216457, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4787182358551061, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09864101756083635, 0.010843095881615717, 0.0, 0.0, 0.2118001597438285, 0.18494298003198836, 0.0, 0.0, 0.005278284780573776, 0.0, 0.13343287511304883, 0.0, 0.0, 0.22672535077896389, 0.0, 0.04541843129948949, 0.009134070348939649, 0.0, 0.0, 0.0, 1.0670915650151884, 0.0, 0.0, 0.0, 0.0, 0.007611334437798737, 0.23685514934120233, 0.1048783956092752, 0.3113965765393445, 0.09273134214274618, 0.47539913029737474, 0.44189476753711276, 0.0, 0.0, 1.0243672562613007, 0.46071962605620564, 2.1944915071082147, 2.2675888826849113, 0.0, 0.0031831285372123284, 2.602379212749998, 0.9308105132594037, 0.6835470383939841, 1.4666636397552364, 0.0, 0.0, 1.2311506071948508, 0.06170906719523146, 0.011792323513531065, 0.0, 0.6213303172065177, 1.076819356340641, 0.0, 0.03441582824434122, 0.061488793259794075, 0.10825205252332025, 0.0, 0.0, 0.001075730585898714, 0.0, 0.04825407867180581, 0.08183481000451574, 0.02983409897790073, 0.0, 0.0, 0.0012325758379482118, 0.0, 0.0, 0.0022849581603560792, 0.0, 0.003921319701289916, 0.0, 0.0, 0.020591683781788054, 0.0, 0.0, 0.0008057945742863486, 0.0, 0.0, 0.06974239546232804 ] ] }, "score_data": { "peer_score": 22.24569558674651, "coverage": 0.9999819259123335, "baseline_score": -42.189381210735384, "spot_peer_score": 97.39510224953139, "peer_archived_score": 22.24569558674651, "baseline_archived_score": -42.189381210735384, "spot_peer_archived_score": 97.39510224953139 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1633050911.060756, "end_time": null, "forecaster_count": 109, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1633050911.060756, "end_time": null, "forecaster_count": 109, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.5298473901363714, 0.47015260986362867 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 17, "user_vote": null }, "forecasts_count": 372, "key_factors": [], "is_current_content_translated": false, "description": "Rep. Matt Gaetz has been a controversial figure for much of his tenure in Congress. Recently it came to light that Gaetz is being [investigated by the Department of Justice](https://www.businessinsider.com/justice-department-investigating-matt-gaetz-sexual-relationship-17-year-old-2021-3) for an alleged relationship with a 17-year-old, among other things. In another, separate scandal, he allegedly [showed nude photos of women he'd slept with to lawmakers](https://www.cnn.com/2021/04/01/politics/matt-gaetz-photos-women/index.html).\n\nGaetz has denied the allegations, characterizing them as a personal attack on him due to his conservatism, but has also floated the idea of [retiring from Congress early for a position at Newsmax](https://www.axios.com/matt-gaetz-retirement-congress-newsmax-e1a0e6bb-0279-4e97-ab22-508e28f4347a.html). Most recently, [his communications director has resigned](https://www.politico.com/news/2021/04/02/gaetz-communications-director-departs-scandal-478936)." }, { "id": 6959, "title": "Will there be a renewal of intense fighting in the Donbass this summer?", "short_title": "War in Donbass 2021", "url_title": "War in Donbass 2021", "slug": "war-in-donbass-2021", "author_id": 100816, "author_username": "akarlin", "coauthors": [], "created_at": "2021-04-01T12:10:48.127124Z", "published_at": "2021-04-07T06:00:00Z", "edited_at": "2025-09-05T17:28:51.687265Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-07T06:00:00Z", "comment_count": 16, "status": "resolved", "resolved": true, "actual_close_time": "2021-08-12T13:21:00Z", "scheduled_close_time": "2021-08-12T13:21:00Z", "scheduled_resolve_time": "2021-10-04T13:30:00Z", "actual_resolve_time": "2021-10-04T13:30:00Z", "open_time": "2021-04-07T06:00:00Z", "nr_forecasters": 58, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32597, "name": "2021 Leaderboard", "slug": "2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 6959, "title": "Will there be a renewal of intense fighting in the Donbass this summer?", "created_at": "2021-04-01T12:10:48.127124Z", "open_time": "2021-04-07T06:00:00Z", "cp_reveal_time": "2021-04-07T19:03:38.383329Z", "spot_scoring_time": "2021-04-07T19:03:38.383329Z", "scheduled_resolve_time": "2021-10-04T13:30:00Z", "actual_resolve_time": "2021-10-04T13:30:00Z", "resolution_set_time": "2021-10-04T13:30:00Z", "scheduled_close_time": "2021-08-12T13:21:00Z", "actual_close_time": "2021-08-12T13:21:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "American and European media and political institutions are <a href=\"https://www.cbsnews.com/news/russia-troops-ukraine-border-concerning-united-states/\">drawing</a> <a href=\"https://www.rferl.org/a/ukrainian-russian-military-buildup-border/31180563.html\">attention</a> to a Russian military buildup in the Donbass and Crimea. This is after several weeks of reports from <a href=\"https://strana.ua/news/322516-nastuplenie-vsu-na-donbasse-cheho-zhdat-ot-obostrenija-v-zone-oos.html\">Ukrainian</a>, <a href=\"https://www.youtube.com/watch?v=RFUdFKgxkFk\">Russian</a>, and LDNR sources about a Ukrainian military buildup at the frontlines in Donbass, which had until now been little noticed in the West except in OSINT/<a href=\"https://twitter.com/Archer83Able/status/1370418913641701379\">military watcher</a> circles. Old positions in the demarcation zones have been reoccupied by Ukrainian and NAF troops, and there are more frequent <a href=\"https://www.nytimes.com/2021/03/30/world/europe/ukraine-russia-fighting.html\">exchanges</a> of artillery fire.", "resolution_criteria": "Resolves positively if there are >250 Ukrainian military deaths in any given month within the next half year (until Oct 1, 2021, inclusive) on the territory of Donetsk and Lugansk oblasts of Ukraine.", "fine_print": "Source used will be <a href=\"http://memorybook.org.ua/indexfile/statmonth.htm\">MemoryBook</a> (reprinted on <a href=\"https://uk.wikipedia.org/wiki/%D0%92%D1%82%D1%80%D0%B0%D1%82%D0%B8_%D1%81%D0%B8%D0%BB%D0%BE%D0%B2%D0%B8%D1%85_%D1%81%D1%82%D1%80%D1%83%D0%BA%D1%82%D1%83%D1%80_%D0%B2%D0%BD%D0%B0%D1%81%D0%BB%D1%96%D0%B4%D0%BE%D0%BA_%D1%80%D0%BE%D1%81%D1%96%D0%B9%D1%81%D1%8C%D0%BA%D0%BE%D0%B3%D0%BE_%D0%B2%D1%82%D0%BE%D1%80%D0%B3%D0%BD%D0%B5%D0%BD%D0%BD%D1%8F_%D0%B2_%D0%A3%D0%BA%D1%80%D0%B0%D1%97%D0%BD%D1%83\">Wikipedia page</a> reprinting its stats), or another credible source should there be a strong consensus that this one has become incredible or obsolete.", "post_id": 6959, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1633064456.307757, "end_time": null, "forecaster_count": 75, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.03 ], "interval_upper_bounds": [ 0.04 ] } ], "latest": { "start_time": 1633064456.307757, "end_time": null, "forecaster_count": 75, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.03 ], "interval_upper_bounds": [ 0.04 ], "forecast_values": [ 0.97, 0.03 ], "means": [ 0.050223288164112076 ], "histogram": [ [ 0.0, 5.625896443079555, 2.065302846113098, 3.2190794144522896, 1.2207356392542226, 1.1977495746977294, 0.0, 0.06430137558421471, 0.28889934554601726, 0.0, 0.016946764592253687, 0.14611032672949936, 0.21333319411836404, 0.0, 0.04961809028037504, 0.2874008696100303, 0.012062988077826794, 0.0, 0.0, 0.0, 0.029207608844041524, 0.45555165592256447, 0.03607961009957732, 0.0012808209520579696, 0.0, 0.1414812311701222, 0.020975031716262728, 0.0029327142690001094, 0.6285363173936885, 0.0, 0.016151797308934002, 0.0, 0.0, 0.0, 0.0, 0.010703886146113043, 0.0, 0.0, 0.023254082934554995, 0.0, 0.01934767332701437, 0.0, 0.0, 0.009464057867442102, 0.0, 0.0, 0.010778238562133918, 0.0, 0.0, 0.0007129919161079304, 0.0, 0.004095049893788444, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.002007702400494954, 0.0, 0.0, 0.0, 0.0, 0.0, 0.005537830714382468, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 11.379001068530146, "coverage": 0.999890097636021, "baseline_score": 57.86307658177116, "spot_peer_score": -12.348718835451363, "peer_archived_score": 11.379001068530146, "baseline_archived_score": 57.86307658177116, "spot_peer_archived_score": -12.348718835451363 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1628750335.465058, "end_time": null, "forecaster_count": 58, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1628750335.465058, "end_time": null, "forecaster_count": 58, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9384754158389547, 0.06152458416104536 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 17, "user_vote": null }, "forecasts_count": 196, "key_factors": [], "is_current_content_translated": false, "description": "American and European media and political institutions are <a href=\"https://www.cbsnews.com/news/russia-troops-ukraine-border-concerning-united-states/\">drawing</a> <a href=\"https://www.rferl.org/a/ukrainian-russian-military-buildup-border/31180563.html\">attention</a> to a Russian military buildup in the Donbass and Crimea. This is after several weeks of reports from <a href=\"https://strana.ua/news/322516-nastuplenie-vsu-na-donbasse-cheho-zhdat-ot-obostrenija-v-zone-oos.html\">Ukrainian</a>, <a href=\"https://www.youtube.com/watch?v=RFUdFKgxkFk\">Russian</a>, and LDNR sources about a Ukrainian military buildup at the frontlines in Donbass, which had until now been little noticed in the West except in OSINT/<a href=\"https://twitter.com/Archer83Able/status/1370418913641701379\">military watcher</a> circles. Old positions in the demarcation zones have been reoccupied by Ukrainian and NAF troops, and there are more frequent <a href=\"https://www.nytimes.com/2021/03/30/world/europe/ukraine-russia-fighting.html\">exchanges</a> of artillery fire." }, { "id": 6948, "title": "Will lepton universality be falsified before 2026?", "short_title": "The death of lepton universality", "url_title": "The death of lepton universality", "slug": "the-death-of-lepton-universality", "author_id": 103600, "author_username": "2e10e122", "coauthors": [], "created_at": "2021-03-30T13:07:43.474021Z", "published_at": "2021-04-06T05:00:00Z", "edited_at": "2025-11-22T20:02:05.296572Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-06T05:00:00Z", "comment_count": 6, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2025-12-30T23:00:00Z", "scheduled_resolve_time": "2026-01-30T23:00:00Z", "actual_resolve_time": null, "open_time": "2021-04-06T05:00:00Z", "nr_forecasters": 36, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32590, "name": "2021-2025 Leaderboard", "slug": "2021_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" } ] }, "question": { "id": 6948, "title": "Will lepton universality be falsified before 2026?", "created_at": "2021-03-30T13:07:43.474021Z", "open_time": "2021-04-06T05:00:00Z", "cp_reveal_time": "2021-04-08T05:00:00Z", "spot_scoring_time": "2021-04-08T05:00:00Z", "scheduled_resolve_time": "2026-01-30T23:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2025-12-30T23:00:00Z", "actual_close_time": "2025-12-30T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "The [Standard Model of particle physics](https://www.wikiwand.com/en/Standard_Model) is the most complete description of physical phenomena not involving gravity known to date. It accommodates all known fundamental particles and explains their interactions [in a compact way](https://www.wikiwand.com/en/Mathematical_formulation_of_the_Standard_Model#/Lagrangian_formalism).\n\nOne of its features is lepton universality, which implies that the electron, the muon, and the tau particle couple with the same strength to the particles responsible for the electroweak force.\n\nA [recent paper](https://arxiv.org/abs/2103.11769) from the LHCb collaboration studying the decay of \\(B\\) mesons has found evidence against lepton universality at the \\(3.1\\sigma\\) level.\n\nAnomalies like this one [have happened before](https://www.wikiwand.com/en/750_GeV_diphoton_excess) in the LHC, so [it's not clear](https://resonaances.blogspot.com/2021/03/thoughts-on-rk.html) the result will survive new incoming data. The gold standard for discovery in particle physics is conventionally taken to be \\(5\\sigma\\).", "resolution_criteria": "This question resolves positively if a peer reviewed paper is published before 2025/12/31 claiming to have found evidence at the \\(5\\sigma\\) level or greater that lepton universality is violated in nature", "fine_print": "", "post_id": 6948, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763833050.060234, "end_time": 1764535135.502399, "forecaster_count": 12, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.04 ] } ], "latest": { "start_time": 1763833050.060234, "end_time": 1764535135.502399, "forecaster_count": 12, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.04 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.029849549338146007 ], "histogram": [ [ 2.39246060801706, 1.3681678989571866, 0.0, 0.29286789626133847, 0.362543115534516, 0.4411588324588233, 0.0, 0.0, 0.0, 0.17692120631776423, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.23128568172579037, 0.0, 0.0, 0.0, 0.08508524734423982, 0.0, 0.0, 0.0, 0.0, 0.12874931591104752, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728286897.26954, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728286897.26954, "end_time": null, "forecaster_count": 34, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9704587329217251, 0.029541267078274848 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 8, "user_vote": null }, "forecasts_count": 197, "key_factors": [], "is_current_content_translated": false, "description": "The [Standard Model of particle physics](https://www.wikiwand.com/en/Standard_Model) is the most complete description of physical phenomena not involving gravity known to date. It accommodates all known fundamental particles and explains their interactions [in a compact way](https://www.wikiwand.com/en/Mathematical_formulation_of_the_Standard_Model#/Lagrangian_formalism).\n\nOne of its features is lepton universality, which implies that the electron, the muon, and the tau particle couple with the same strength to the particles responsible for the electroweak force.\n\nA [recent paper](https://arxiv.org/abs/2103.11769) from the LHCb collaboration studying the decay of \\(B\\) mesons has found evidence against lepton universality at the \\(3.1\\sigma\\) level.\n\nAnomalies like this one [have happened before](https://www.wikiwand.com/en/750_GeV_diphoton_excess) in the LHC, so [it's not clear](https://resonaances.blogspot.com/2021/03/thoughts-on-rk.html) the result will survive new incoming data. The gold standard for discovery in particle physics is conventionally taken to be \\(5\\sigma\\)." }, { "id": 6946, "title": "Will an anthropogenic disaster happen in China before one happens in India?", "short_title": "Anthropogenic Disasters - China vs India", "url_title": "Anthropogenic Disasters - China vs India", "slug": "anthropogenic-disasters-china-vs-india", "author_id": 112076, "author_username": "SimonM", "coauthors": [], "created_at": "2021-03-29T20:49:20.360359Z", "published_at": "2021-04-11T07:00:00Z", "edited_at": "2025-09-05T17:29:01.382774Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-11T07:00:00Z", "comment_count": 13, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2100-01-01T00:00:00Z", "scheduled_resolve_time": "2200-01-01T00:00:00Z", "actual_resolve_time": null, "open_time": "2021-04-11T07:00:00Z", "nr_forecasters": 52, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 6946, "title": "Will an anthropogenic disaster happen in China before one happens in India?", "created_at": "2021-03-29T20:49:20.360359Z", "open_time": "2021-04-11T07:00:00Z", "cp_reveal_time": "2021-04-12T21:34:41.333694Z", "spot_scoring_time": "2021-04-12T21:34:41.333694Z", "scheduled_resolve_time": "2200-01-01T00:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2100-01-01T00:00:00Z", "actual_close_time": "2100-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "If you look through Wikipedia's [List of wars and anthropogenic disasters by death toll](https://en.wikipedia.org/wiki/List_of_wars_and_anthropogenic_disasters_by_death_toll), China features highly across a range of different disaster types, higher than India; However, both India and China have had roughly comparable population sizes for as [long as we have data for it](https://en.wikipedia.org/wiki/Estimates_of_historical_world_population#World_Population_Estimates,_20_Countries_and_Regional_Totals,_0%E2%80%932000_AD_(in_thousands)).", "resolution_criteria": "Resolves positively if an anthropogenic disaster resulting in the death of at least 10,000,000 people occurs in China before one happens in India", "fine_print": "", "post_id": 6946, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1754168475.801483, "end_time": 1866595296.899812, "forecaster_count": 51, "interval_lower_bounds": [ 0.49 ], "centers": [ 0.502 ], "interval_upper_bounds": [ 0.6 ] } ], "latest": { "start_time": 1754168475.801483, "end_time": 1866595296.899812, "forecaster_count": 51, "interval_lower_bounds": [ 0.49 ], "centers": [ 0.502 ], "interval_upper_bounds": [ 0.6 ], "forecast_values": [ 0.498, 0.502 ], "means": [ 0.5274700694378766 ], "histogram": [ [ 0.0, 0.061881780206469506, 0.06930124042864184, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.26969150866663705, 0.0, 0.189341530780365, 0.0, 0.0, 0.3193625093223559, 0.0, 0.0, 0.09329360355583258, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5613981478636185, 0.0, 0.0, 0.0, 0.0, 0.3469183260877188, 0.0, 0.6153078178235376, 0.0, 0.9469851855057609, 3.244170862388624, 0.0, 0.0, 0.0, 0.0, 1.6127472924162158, 0.0, 0.5576688763820327, 0.0, 0.0, 1.5455323727097483, 0.0, 0.0, 0.6811121724598794, 0.0, 0.1233362298559661, 0.0, 0.0, 0.0, 0.0, 0.5421825521478726, 0.0, 0.0, 0.004474424132665067, 0.018701514513498173, 0.953880364022869, 0.0, 0.0, 0.0, 0.0, 0.029133191509986833, 0.0, 0.0, 0.0, 0.003256133383704309, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728290258.069667, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728290258.069667, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.6254197677863917, 0.3745802322136082 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 6, "user_vote": null }, "forecasts_count": 122, "key_factors": [], "is_current_content_translated": false, "description": "If you look through Wikipedia's [List of wars and anthropogenic disasters by death toll](https://en.wikipedia.org/wiki/List_of_wars_and_anthropogenic_disasters_by_death_toll), China features highly across a range of different disaster types, higher than India; However, both India and China have had roughly comparable population sizes for as [long as we have data for it](https://en.wikipedia.org/wiki/Estimates_of_historical_world_population#World_Population_Estimates,_20_Countries_and_Regional_Totals,_0%E2%80%932000_AD_(in_thousands))." }, { "id": 6944, "title": "In the 2024 US presidential election, will any state refuse to certify their election results?", "short_title": "2024 US election results not certified", "url_title": "2024 US election results not certified", "slug": "2024-us-election-results-not-certified", "author_id": 117502, "author_username": "RyanBeck", "coauthors": [], "created_at": "2021-03-29T11:01:15.331378Z", "published_at": "2021-04-16T22:00:00Z", "edited_at": "2025-09-05T17:28:51.312382Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-16T22:00:00Z", "comment_count": 20, "status": "resolved", "resolved": true, "actual_close_time": "2024-11-02T03:59:00Z", "scheduled_close_time": "2024-11-02T03:59:00Z", "scheduled_resolve_time": "2025-01-21T04:59:00Z", "actual_resolve_time": "2025-01-21T16:07:00Z", "open_time": "2021-04-16T22:00:00Z", "nr_forecasters": 136, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32590, "name": "2021-2025 Leaderboard", "slug": "2021_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3685, "name": "Elections", "slug": "elections", "emoji": "🗳️", "description": "Elections", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 6944, "title": "In the 2024 US presidential election, will any state refuse to certify their election results?", "created_at": "2021-03-29T11:01:15.331378Z", "open_time": "2021-04-16T22:00:00Z", "cp_reveal_time": "2021-04-18T15:58:43.627512Z", "spot_scoring_time": "2021-04-18T15:58:43.627512Z", "scheduled_resolve_time": "2025-01-21T04:59:00Z", "actual_resolve_time": "2025-01-21T16:07:00Z", "resolution_set_time": "2025-01-21T16:08:56.917171Z", "scheduled_close_time": "2024-11-02T03:59:00Z", "actual_close_time": "2024-11-02T03:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Recent [controversy over state election laws](https://www.businessinsider.com/georgia-new-election-law-means-for-voters-and-officials-explainer-2021-3) has caused some Democrats to be concerned that Republicans intend to use their political power in state governments to distort future elections in their favor. One commentator [recently suggested](https://web.archive.org/web/20210329024002/https://twitter.com/jbouie/status/1376161115244204037):\n\n>\"If a Democrat wins a GOP-controlled swing state in 2024 … there’s a very good chance the victory isn’t certified\"\n\nElection certification is the process in which states [confirm the election results](https://www.marketplace.org/2020/11/13/how-election-certification-works-when-will-2020-be-certified/) and declare them to be the official results. Typically states set their own deadlines for election certification, which in 2020 [ranged from November 5th to December 8th for the presidential contest](https://ballotpedia.org/Election_results_certification_dates,_2020) (several states appear to have no deadline), but federal law provides a [\"safe harbor\" deadline](https://www.npr.org/2020/12/08/942288226/bidens-victory-cemented-as-states-reach-deadline-for-certifying-vote-tallies) by which states must formally certify their election results in order for the certified results to be federally recognized as governing the outcome. The \"safe harbor\" deadline is set by [3 U.S. Code § 5](https://www.law.cornell.edu/uscode/text/3/5) and [3 U.S. Code § 7](https://www.law.cornell.edu/uscode/text/3/7) which sets the safe harbor deadline as six days prior to the first Monday after the second Wednesday in December of the election year.", "resolution_criteria": "The question will resolve positively if any state (or DC) does not certify their results by the \"safe harbor\" deadline. The \"safe harbor\" deadline for the 2024 presidential election will be December 10th", "fine_print": "", "post_id": 6944, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1730491666.979583, "end_time": null, "forecaster_count": 135, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.12 ], "interval_upper_bounds": [ 0.18 ] } ], "latest": { "start_time": 1730491666.979583, "end_time": null, "forecaster_count": 135, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.12 ], "interval_upper_bounds": [ 0.18 ], "forecast_values": [ 0.88, 0.12 ], "means": [ 0.1394928684400307 ], "histogram": [ [ 0.0, 0.3008689719252607, 0.0019620111566528127, 1.3176345021335154, 1.6475803577681676, 2.365581015865658, 0.8471816421521785, 1.0105896070400524, 0.22118835180311228, 0.0, 1.9393876090546616, 0.7376627696123728, 1.5681343235345233, 0.8049933066493316, 0.6449321567039206, 1.1746414157907537, 0.2834844959197855, 0.01495437243218068, 1.7638207823612815, 0.005429947567341769, 0.3755181792289473, 1.4445471860599053, 0.018308380000309384, 0.00033924000740849557, 0.358594695901422, 0.5076215570506251, 0.16232920466197684, 0.0009794353960549545, 0.0, 0.0, 0.614841563502758, 0.007933824671589458, 0.41321318036730975, 0.11581760647879613, 0.013051657401365055, 0.12607376217828592, 0.05495986147498624, 0.013975062184252263, 0.003064109948486093, 0.0, 0.48278375914076804, 0.0, 0.0, 0.0, 0.0, 0.07311873514419015, 0.0, 0.0, 0.0, 0.0, 0.150717313743543, 0.0, 0.0, 0.0, 0.0, 0.12791831064104553, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.011361536607641616, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.003941525261707368, 0.0, 0.006834988728375071, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0036249822223360915 ] ] }, "score_data": { "baseline_score": 65.78624027697835, "peer_score": 12.022106921150396, "coverage": 0.9999533004994478, "relative_legacy_score": 0.0, "weighted_coverage": 0.9999533004994478, "spot_peer_score": 4.4542343044833475, "spot_baseline_score": 64.15460290875237, "baseline_archived_score": 65.78624027697835, "peer_archived_score": 12.022106921150396, "relative_legacy_archived_score": 0.0, "spot_peer_archived_score": 4.4542343044833475, "spot_baseline_archived_score": 64.15460290875237 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287210.595952, "end_time": null, "forecaster_count": 133, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287210.595952, "end_time": null, "forecaster_count": 133, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9693264411136492, 0.03067355888635082 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 17, "user_vote": null }, "forecasts_count": 360, "key_factors": [], "is_current_content_translated": false, "description": "Recent [controversy over state election laws](https://www.businessinsider.com/georgia-new-election-law-means-for-voters-and-officials-explainer-2021-3) has caused some Democrats to be concerned that Republicans intend to use their political power in state governments to distort future elections in their favor. One commentator [recently suggested](https://web.archive.org/web/20210329024002/https://twitter.com/jbouie/status/1376161115244204037):\n\n>\"If a Democrat wins a GOP-controlled swing state in 2024 … there’s a very good chance the victory isn’t certified\"\n\nElection certification is the process in which states [confirm the election results](https://www.marketplace.org/2020/11/13/how-election-certification-works-when-will-2020-be-certified/) and declare them to be the official results. Typically states set their own deadlines for election certification, which in 2020 [ranged from November 5th to December 8th for the presidential contest](https://ballotpedia.org/Election_results_certification_dates,_2020) (several states appear to have no deadline), but federal law provides a [\"safe harbor\" deadline](https://www.npr.org/2020/12/08/942288226/bidens-victory-cemented-as-states-reach-deadline-for-certifying-vote-tallies) by which states must formally certify their election results in order for the certified results to be federally recognized as governing the outcome. The \"safe harbor\" deadline is set by [3 U.S. Code § 5](https://www.law.cornell.edu/uscode/text/3/5) and [3 U.S. Code § 7](https://www.law.cornell.edu/uscode/text/3/7) which sets the safe harbor deadline as six days prior to the first Monday after the second Wednesday in December of the election year." }, { "id": 6940, "title": "By 2030, will at least 10,000 Americans die in a single year from a single conflict?", "short_title": "10k+ US Conflict Deaths in 1 year by 2030", "url_title": "10k+ US Conflict Deaths in 1 year by 2030", "slug": "10k-us-conflict-deaths-in-1-year-by-2030", "author_id": 100216, "author_username": "kokotajlod", "coauthors": [], "created_at": "2021-03-28T13:49:12.918823Z", "published_at": "2021-04-07T22:00:00Z", "edited_at": "2025-10-27T23:43:42.675148Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-07T22:00:00Z", "comment_count": 23, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2029-12-31T14:13:00Z", "scheduled_resolve_time": "2030-01-01T14:13:00Z", "actual_resolve_time": null, "open_time": "2021-04-07T22:00:00Z", "nr_forecasters": 124, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 6940, "title": "By 2030, will at least 10,000 Americans die in a single year from a single conflict?", "created_at": "2021-03-28T13:49:12.918823Z", "open_time": "2021-04-07T22:00:00Z", "cp_reveal_time": "2021-04-08T15:48:39.117154Z", "spot_scoring_time": "2021-04-08T15:48:39.117154Z", "scheduled_resolve_time": "2030-01-01T14:13:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2029-12-31T14:13:00Z", "actual_close_time": "2029-12-31T14:13:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "For purposes of this question, deaths due to bioweapons, electricity grid failures, etc. don't count. Roughly, we are interested in \"deaths due to kinetic attacks.\" The 9/11 attacks count because ramming planes into buildings to make them collapse is kinetic. Were someone to hack into the FDA and mess things up so as to delay vaccine approval, thereby causing tens of thousands of deaths, that would not count.\n\nThe attackers don't need to be the military of a nation-state; terrorist groups count and coalitions/alliances also count.\n\nHowever, the attackers need to be \"part of the same team\" in some sense. Otherwise, this would resolve positive simply in virtue of the US annual homicide rate! If there were a series of race riots, insurrections, or acts of domestic terrorism linked together under one banner (e.g. white supremacy, antigovernment, or antipolice) that would count.\n\nYes, this means that (contrary to what the headline question would suggest) if the USA gets involved in several independent small-scale wars, the US casualties from which total more than 10,000 in a year, that would not count", "fine_print": "", "post_id": 6940, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763752273.608672, "end_time": 1764774585.858386, "forecaster_count": 70, "interval_lower_bounds": [ 0.06 ], "centers": [ 0.09 ], "interval_upper_bounds": [ 0.15 ] } ], "latest": { "start_time": 1763752273.608672, "end_time": 1764774585.858386, "forecaster_count": 70, "interval_lower_bounds": [ 0.06 ], "centers": [ 0.09 ], "interval_upper_bounds": [ 0.15 ], "forecast_values": [ 0.91, 0.09 ], "means": [ 0.11622622393331461 ], "histogram": [ [ 0.0, 0.11494798990593433, 0.4424052518723317, 0.0, 0.2937072262777226, 1.3469583244148002, 2.923933896157525, 0.7984890724912569, 1.4739175360018282, 0.5093539337740345, 0.6286462340089126, 0.09379907679016249, 1.0823652811885816, 0.0, 0.06830530044940847, 3.159909995103043, 0.028134217409661617, 0.0, 0.00742799989072429, 0.12976308100754524, 0.7221243404725501, 0.0, 0.19175831506623003, 0.0, 0.0, 0.17732339311343043, 0.19249412917235842, 0.4134223820577321, 0.018175126021569836, 0.0, 0.12491850857333592, 0.0, 0.0, 0.07921034496113237, 0.0, 0.0, 0.0, 0.0, 0.0, 0.038098485368474766, 0.07686482496376282, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08624868390658809, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.003933706607008041, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.011833556630312131 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728288964.804978, "end_time": null, "forecaster_count": 122, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728288964.804978, "end_time": null, "forecaster_count": 122, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.968315581793643, 0.031684418206356986 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 20, "user_vote": null }, "forecasts_count": 326, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 6939, "title": "If and when this graph is extended to 10^14 parameter models trained on 10^14 elapsed tokens of similar-quality data, will the 10^14 parameter learning curve have slowed down substantially?", "short_title": "GPT-3 scaling plateau in < 3 OOMs", "url_title": "GPT-3 scaling plateau in < 3 OOMs", "slug": "gpt-3-scaling-plateau-in-3-ooms", "author_id": 100216, "author_username": "kokotajlod", "coauthors": [], "created_at": "2021-03-28T10:41:34.010408Z", "published_at": "2021-05-18T22:00:00Z", "edited_at": "2025-10-20T06:01:54.706502Z", "curation_status": "approved", "curation_status_updated_at": "2021-05-18T22:00:00Z", "comment_count": 22, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2037-02-23T11:37:00Z", "scheduled_resolve_time": "2050-01-01T11:37:00Z", "actual_resolve_time": null, "open_time": "2021-05-18T22:00:00Z", "nr_forecasters": 29, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2341, "type": "question_series", "name": "AI Training and Compute", "slug": "ai-training-and-compute", "header_image": "https://cdn.metaculus.com/aitraining8.png", "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-22T20:04:19.044654Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, { "id": 2344, "type": "question_series", "name": "AI Technical Benchmarks", "slug": "ai-technical-benchmarks", "header_image": "https://cdn.metaculus.com/technical.png", "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-22T20:04:18.980942Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2344, "type": "question_series", "name": "AI Technical Benchmarks", "slug": "ai-technical-benchmarks", "header_image": "https://cdn.metaculus.com/technical.png", "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-22T20:04:18.980942Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 6939, "title": "If and when this graph is extended to 10^14 parameter models trained on 10^14 elapsed tokens of similar-quality data, will the 10^14 parameter learning curve have slowed down substantially?", "created_at": "2021-03-28T10:41:34.010408Z", "open_time": "2021-05-18T22:00:00Z", "cp_reveal_time": "2021-05-20T22:00:00Z", "spot_scoring_time": "2021-05-20T22:00:00Z", "scheduled_resolve_time": "2050-01-01T11:37:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2037-02-23T11:37:00Z", "actual_close_time": "2037-02-23T11:37:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Consider figure 15 from [this paper](https://arxiv.org/pdf/2010.14701.pdf#page=18).\n\nSome people (arguably the authors of this paper) predict that as we scale models past GPT-3's size (the 10^11 parameter learning curve, models with parameter count X trained on X elapsed tokens will score close to the L(D) line at X elapsed tokens.\n\nWe are interested in whether instead the trendline will \"plateau\" or at least be substantially slower than the line L(D) by the end of the next 3 orders of magnitude of parameter count. For the sake of specificity, let's say substantially slower = less than half as steep as L(D) on this graph.", "resolution_criteria": "This question resolves positively if the relevant experiment is done and reported (extending this graph, or providing equivalent data) and the slope of the learning curve for the 10^14 parameter model around 10^14 data points (Say, from 10^12 to 10^14) is less than half as steep as the slope of L(D). It resolves negatively if instead the slope is at least half as steep as L(D).\n\nThis question also resolves positively (or negatively) if it becomes uncontroversial what would have happened if the experiment had been done. For example, maybe other experiments will provide much more evidence about neural net scaling trends in general, such that it will be easy to calculate what would happen with this one in particular.\n\nThis question resolves ambiguously if by 2050 no one has performed the experiment AND there is no consensus about what would have happened if someone had.\n\nThe Metaculus moderators are the judge of final resort for determining whether and how to resolve this question.\n\nFor more context, see the comment threads below (Search for \"It's a big deal.\")", "fine_print": "It is important that the data used to extend the graph be of similar quality. Obviously if we just threw in 10^14 tokens of basic arithmetic problems, the model would get good at basic arithmetic but not at anything else, and it's unclear whether the result would be on-trend or not. Ideally we'd have 10e14 tokens of diverse internet text, scanned books, chat logs, emails, etc. If this experiment gets done with different-quality data, the question becomes whether it gives us enough evidence to uncontroversial predict what would have happened if we had done it with similar-quality data.", "post_id": 6939, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1760940104.039379, "end_time": 1765533348.0288, "forecaster_count": 24, "interval_lower_bounds": [ 0.27 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.358 ] } ], "latest": { "start_time": 1760940104.039379, "end_time": 1765533348.0288, "forecaster_count": 24, "interval_lower_bounds": [ 0.27 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.358 ], "forecast_values": [ 0.7, 0.3 ], "means": [ 0.3669589440042172 ], "histogram": null }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287734.631113, "end_time": null, "forecaster_count": 29, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287734.631113, "end_time": null, "forecaster_count": 29, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8547785006256539, 0.14522149937434614 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 11, "user_vote": null }, "forecasts_count": 121, "key_factors": [], "is_current_content_translated": false, "description": "Consider figure 15 from [this paper](https://arxiv.org/pdf/2010.14701.pdf#page=18).\n\nSome people (arguably the authors of this paper) predict that as we scale models past GPT-3's size (the 10^11 parameter learning curve, models with parameter count X trained on X elapsed tokens will score close to the L(D) line at X elapsed tokens.\n\nWe are interested in whether instead the trendline will \"plateau\" or at least be substantially slower than the line L(D) by the end of the next 3 orders of magnitude of parameter count. For the sake of specificity, let's say substantially slower = less than half as steep as L(D) on this graph." }, { "id": 6920, "title": "Will Israel hold a second national election for Knesset in 2021?", "short_title": "Will Israel hold a second election in 2021?", "url_title": "Will Israel hold a second election in 2021?", "slug": "will-israel-hold-a-second-election-in-2021", "author_id": 116812, "author_username": "chipsie", "coauthors": [], "created_at": "2021-03-26T01:34:24.987925Z", "published_at": "2021-04-05T04:00:00Z", "edited_at": "2025-09-05T17:29:28.676921Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-05T04:00:00Z", "comment_count": 11, "status": "resolved", "resolved": true, "actual_close_time": "2021-08-01T03:59:00Z", "scheduled_close_time": "2021-08-01T03:59:00Z", "scheduled_resolve_time": "2021-12-29T17:52:00Z", "actual_resolve_time": "2021-12-29T17:52:00Z", "open_time": "2021-04-05T04:00:00Z", "nr_forecasters": 61, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32597, "name": "2021 Leaderboard", "slug": "2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3685, "name": "Elections", "slug": "elections", "emoji": "🗳️", "description": "Elections", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 6920, "title": "Will Israel hold a second national election for Knesset in 2021?", "created_at": "2021-03-26T01:34:24.987925Z", "open_time": "2021-04-05T04:00:00Z", "cp_reveal_time": "2021-04-06T16:53:38.140742Z", "spot_scoring_time": "2021-04-06T16:53:38.140742Z", "scheduled_resolve_time": "2021-12-29T17:52:00Z", "actual_resolve_time": "2021-12-29T17:52:00Z", "resolution_set_time": "2021-12-29T17:52:00Z", "scheduled_close_time": "2021-08-01T03:59:00Z", "actual_close_time": "2021-08-01T03:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Israel holds national elections every four years, however elections can be held early if no party is able to form a governing coalition or the government is dissolved midway through a term. Israel has held four elections in the past three years. The most recent election has just concluded and coalition negotiations are still ongoing. Will this election result in a stable government, or will there be more political gridlock?", "resolution_criteria": "This question will resolve positively if another national election for Knesset occurs in Israel by December 31st 2021.", "fine_print": "Local elections, additional coalition negotiations, or an election that is announced in 2021 but scheduled for 2022 are not sufficient to cause this question to resolve positively. This question is only asking about a national election to select members of Knesset that occurs in 2021.\n\nThis question closes retroactively on the day that a qualifying election is announced.\n\nResolution dates will be according to Israeli Standard Time.", "post_id": 6920, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1627776238.004909, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": [ 0.1 ], "centers": [ 0.14 ], "interval_upper_bounds": [ 0.18 ] } ], "latest": { "start_time": 1627776238.004909, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": [ 0.1 ], "centers": [ 0.14 ], "interval_upper_bounds": [ 0.18 ], "forecast_values": [ 0.86, 0.14 ], "means": [ 0.15468315069823074 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.10619425017942953, 0.0, 1.3018681142681308, 0.0, 0.4738397328642019, 0.0, 0.30820192961550297, 2.1779746966163507, 0.0, 0.16361328140823625, 0.20903539704454718, 4.104613219484559, 0.13316672358264545, 0.060189962532687886, 0.08847061033139408, 1.6143001827799839, 0.7461257598969706, 0.3307625712849324, 0.0, 0.15552512648814285, 0.06645500596732509, 0.0, 0.9010411077045193, 0.5123121112600272, 0.0, 0.04416450282422419, 0.0, 0.014925286850224059, 0.0, 0.0, 0.0, 0.0, 0.21207425793025764, 0.008145825627362421, 0.0, 0.0, 0.0, 0.15044316718241518, 0.0, 0.0, 0.0, 0.0, 0.2263451221037383, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0011024176184751514, 0.0, 0.0, 0.002292301674078456, 0.002996681779674101, 0.004697335090360764, 0.0, 0.0, 0.0, 0.0037945794224233387, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0016681565683498104, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 14.30410335947818, "coverage": 0.9998569287641964, "baseline_score": 18.34300423875763, "spot_peer_score": 14.544761432603567, "peer_archived_score": 14.30410335947818, "baseline_archived_score": 18.34300423875763, "spot_peer_archived_score": 14.544761432603567 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1627776238.035618, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1627776238.035618, "end_time": null, "forecaster_count": 61, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9141842740764963, 0.0858157259235037 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 4, "user_vote": null }, "forecasts_count": 238, "key_factors": [], "is_current_content_translated": false, "description": "Israel holds national elections every four years, however elections can be held early if no party is able to form a governing coalition or the government is dissolved midway through a term. Israel has held four elections in the past three years. The most recent election has just concluded and coalition negotiations are still ongoing. Will this election result in a stable government, or will there be more political gridlock?" }, { "id": 6910, "title": "Will the 117th United States Senate change the filibuster rules during its session?", "short_title": "Will the US Senate change the filibuster?", "url_title": "Will the US Senate change the filibuster?", "slug": "will-the-us-senate-change-the-filibuster", "author_id": 117817, "author_username": "Mojowen", "coauthors": [], "created_at": "2021-03-23T22:09:36.196080Z", "published_at": "2021-04-02T07:00:00Z", "edited_at": "2025-09-05T17:28:52.341017Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-02T07:00:00Z", "comment_count": 48, "status": "resolved", "resolved": true, "actual_close_time": "2022-02-01T05:00:00Z", "scheduled_close_time": "2022-02-01T05:00:00Z", "scheduled_resolve_time": "2023-01-03T16:50:00Z", "actual_resolve_time": "2023-01-03T16:50:00Z", "open_time": "2021-04-02T07:00:00Z", "nr_forecasters": 131, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32590, "name": "2021-2025 Leaderboard", "slug": "2021_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 6910, "title": "Will the 117th United States Senate change the filibuster rules during its session?", "created_at": "2021-03-23T22:09:36.196080Z", "open_time": "2021-04-02T07:00:00Z", "cp_reveal_time": "2021-04-03T05:41:08.167102Z", "spot_scoring_time": "2021-04-03T05:41:08.167102Z", "scheduled_resolve_time": "2023-01-03T16:50:00Z", "actual_resolve_time": "2023-01-03T16:50:00Z", "resolution_set_time": "2023-01-03T16:50:00Z", "scheduled_close_time": "2022-02-01T05:00:00Z", "actual_close_time": "2022-02-01T05:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "The United State Senate features a parliamentary procedure known as the [filibuster](https://en.wikipedia.org/wiki/Filibuster_in_the_United_States_Senate), which requires a three-fifths threshold to invoke cloture and vote.\n\nThe rules have been changed several times, including the adoption of a two-track system in the 1960's and altering the rule to exclude judicial and executive branch nominees from the three-fifths threshold in 2013 and 2017.", "resolution_criteria": "This question resolves positively if the United States Senate adopts a rule during the 117th Congress to either abolish or alter the filibuster", "fine_print": "", "post_id": 6910, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1643681276.969656, "end_time": null, "forecaster_count": 131, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.33 ] } ], "latest": { "start_time": 1643681276.969656, "end_time": null, "forecaster_count": 131, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.33 ], "forecast_values": [ 0.75, 0.25 ], "means": [ 0.2676415020693081 ], "histogram": [ [ 0.0, 0.008732334072308943, 0.0, 0.0, 0.0, 0.0, 0.28722651746213257, 0.0, 0.0, 0.0, 0.4008886888481758, 0.0, 0.0011649147362207618, 0.0, 0.1486544003203563, 0.6056005877815064, 0.3488133558429181, 0.0, 1.07376401614566, 1.0460087870955785, 3.056941309322026, 0.0, 0.0, 0.0, 1.1125445680199744, 3.536672433383743, 1.0961249457034525, 0.040378151698363784, 0.5910812040738033, 0.0, 0.418842141421701, 0.0, 1.2736523662578683, 1.2602891406787908, 0.0, 1.9741123694117766, 0.21309265547090303, 0.0, 1.0788083767780896, 1.1601437124001917, 0.3895363980175607, 0.0075358426782257525, 0.0, 0.0033427532148844396, 0.0816258733319549, 0.028985512609866124, 0.0, 0.022949395933020367, 0.0, 0.0010351305218856013, 0.019530815410581864, 0.0007744120635893653, 0.006458237024698661, 0.0030620553889559718, 0.0, 0.004315581753800415, 0.0, 6.046339999853544e-05, 0.0, 0.0, 0.012595000488477455, 0.0, 0.0, 0.0, 0.0, 0.011831056020497875, 0.0, 0.051957158001606045, 0.0, 0.0, 4.4000499145849686e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.01552329852504315, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 4.745932868790935, "coverage": 0.9989418661647425, "baseline_score": 31.229352925853735, "spot_peer_score": -0.338602638693208, "peer_archived_score": 4.745932868790935, "baseline_archived_score": 31.229352925853735, "spot_peer_archived_score": -0.338602638693208 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1643681276.999011, "end_time": null, "forecaster_count": 131, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1643681276.999011, "end_time": null, "forecaster_count": 131, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7990561127491193, 0.20094388725088078 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 30, "user_vote": null }, "forecasts_count": 336, "key_factors": [], "is_current_content_translated": false, "description": "The United State Senate features a parliamentary procedure known as the [filibuster](https://en.wikipedia.org/wiki/Filibuster_in_the_United_States_Senate), which requires a three-fifths threshold to invoke cloture and vote.\n\nThe rules have been changed several times, including the adoption of a two-track system in the 1960's and altering the rule to exclude judicial and executive branch nominees from the three-fifths threshold in 2013 and 2017." }, { "id": 6903, "title": "Will the WTA and ATP merge before 2031?", "short_title": "WTA / ATP Merger before 2031", "url_title": "WTA / ATP Merger before 2031", "slug": "wta-atp-merger-before-2031", "author_id": 112076, "author_username": "SimonM", "coauthors": [], "created_at": "2021-03-22T13:43:02.728083Z", "published_at": "2021-08-17T22:00:00Z", "edited_at": "2025-11-12T18:50:13.187636Z", "curation_status": "approved", "curation_status_updated_at": "2021-08-17T22:00:00Z", "comment_count": 9, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2030-01-01T00:00:00Z", "scheduled_resolve_time": "2031-01-01T00:00:00Z", "actual_resolve_time": null, "open_time": "2021-08-17T22:00:00Z", "nr_forecasters": 26, "html_metadata_json": null, "projects": { "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 6903, "title": "Will the WTA and ATP merge before 2031?", "created_at": "2021-03-22T13:43:02.728083Z", "open_time": "2021-08-17T22:00:00Z", "cp_reveal_time": "2021-08-19T22:00:00Z", "spot_scoring_time": "2021-08-19T22:00:00Z", "scheduled_resolve_time": "2031-01-01T00:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2030-01-01T00:00:00Z", "actual_close_time": "2030-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Professional [tennis](https://en.wikipedia.org/wiki/Tennis) is divided into two tours, the [ATP](https://en.wikipedia.org/wiki/ATP_Tour) for men and the [WTA](https://en.wikipedia.org/wiki/Women%27s_Tennis_Association) for women. There has been speculation for some time whether they will merge into a single tour. This happened most recently following [Roger Federer](https://twitter.com/rogerfederer/status/1252922285096423424)'s tweet in 2020:\n\n> Just wondering…..am I the only one thinking that now is the time for men’s and women’s tennis to be united and come together as one?\n\n\nAs part of [The Tennis Abstract Podcast's 100th episode](http://www.tennisabstract.com/blog/2021/03/17/podcast-episode-100-100-questions-for-episode-100/) it was asked \"Will ATP & WTA merge in next decade?\". The answer given was \"No\".", "resolution_criteria": "This question will resolve as **Yes** if both men and women play as part of one tour with one tour organiser in top competitive Tennis. It will also resolve as **Yes** if not all tournaments are mixed (ie if some currently all-female or all-male tournaments are part of the tour, that doesn't trigger negative resolution as long as the overarching tour organiser is one body)\n\nThis question will resolve as **No** if men and women play as part of two separate tours on the basis of gender.\n\nIn the event that it is unclear (eg the tennis tour fractures into multiple tours (WTA, ATP, mixed tour)) we will ask [Jeff Sackmann's](http://www.tennisabstract.com/) opinion and resolve at his discretion. (In the event he says it's unclear / is uncontactable / unwilling to decide, the question will resolve as **Ambiguous**", "fine_print": "", "post_id": 6903, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1762973402.937324, "end_time": 1764044073.656486, "forecaster_count": 12, "interval_lower_bounds": [ 0.37 ], "centers": [ 0.43 ], "interval_upper_bounds": [ 0.48 ] } ], "latest": { "start_time": 1762973402.937324, "end_time": 1764044073.656486, "forecaster_count": 12, "interval_lower_bounds": [ 0.37 ], "centers": [ 0.43 ], "interval_upper_bounds": [ 0.48 ], "forecast_values": [ 0.5700000000000001, 0.43 ], "means": [ 0.449515980277589 ], "histogram": [ [ 0.0, 0.08508524734423982, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4411588324588233, 0.0, 0.0, 0.0, 0.0, 0.362543115534516, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7394682331392083, 0.0, 0.0, 0.0, 0.0, 1.0, 0.5295781724391592, 0.0, 0.12874931591104752, 0.0, 0.29286789626133847, 0.862882435577901, 0.0, 0.23128568172579037, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17692120631776423, 0.0, 0.0, 0.0, 0.0, 0.6286996658179782, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289592.490541, "end_time": null, "forecaster_count": 25, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289592.490541, "end_time": null, "forecaster_count": 25, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7578067025287256, 0.24219329747127433 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 4, "user_vote": null }, "forecasts_count": 100, "key_factors": [], "is_current_content_translated": false, "description": "Professional [tennis](https://en.wikipedia.org/wiki/Tennis) is divided into two tours, the [ATP](https://en.wikipedia.org/wiki/ATP_Tour) for men and the [WTA](https://en.wikipedia.org/wiki/Women%27s_Tennis_Association) for women. There has been speculation for some time whether they will merge into a single tour. This happened most recently following [Roger Federer](https://twitter.com/rogerfederer/status/1252922285096423424)'s tweet in 2020:\n\n> Just wondering…..am I the only one thinking that now is the time for men’s and women’s tennis to be united and come together as one?\n\n\nAs part of [The Tennis Abstract Podcast's 100th episode](http://www.tennisabstract.com/blog/2021/03/17/podcast-episode-100-100-questions-for-episode-100/) it was asked \"Will ATP & WTA merge in next decade?\". The answer given was \"No\"." }, { "id": 6898, "title": "Will most of the Quad/Five Eyes countries boycott the 2022 Winter Olympics?", "short_title": "Quad/FiveEyes boycott of 2022 Winter Olympics", "url_title": "Quad/FiveEyes boycott of 2022 Winter Olympics", "slug": "quadfiveeyes-boycott-of-2022-winter-olympics", "author_id": 117841, "author_username": "t.88", "coauthors": [], "created_at": "2021-03-22T09:03:02.001856Z", "published_at": "2021-04-08T04:00:00Z", "edited_at": "2025-09-05T17:29:08.439010Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-08T04:00:00Z", "comment_count": 39, "status": "resolved", "resolved": true, "actual_close_time": "2021-12-03T16:00:00Z", "scheduled_close_time": "2021-12-03T16:00:00Z", "scheduled_resolve_time": "2022-02-04T15:44:00Z", "actual_resolve_time": "2022-02-04T15:44:00Z", "open_time": "2021-04-08T04:00:00Z", "nr_forecasters": 122, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32597, "name": "2021 Leaderboard", "slug": "2021_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" }, { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 6898, "title": "Will most of the Quad/Five Eyes countries boycott the 2022 Winter Olympics?", "created_at": "2021-03-22T09:03:02.001856Z", "open_time": "2021-04-08T04:00:00Z", "cp_reveal_time": "2021-04-08T20:06:17.583609Z", "spot_scoring_time": "2021-04-08T20:06:17.583609Z", "scheduled_resolve_time": "2022-02-04T15:44:00Z", "actual_resolve_time": "2022-02-04T15:44:00Z", "resolution_set_time": "2022-02-04T15:44:00Z", "scheduled_close_time": "2021-12-03T16:00:00Z", "actual_close_time": "2021-12-03T16:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "China will host the 2022 Winter Olympic Games. Significant political tensions between China and the Western powers, especially the USA, as well as human rights' activists dissatisfaction with China's recent behavior - particularly over the 2019 Hong Kong Protests and the Xinjiang re-education camps - have led to calls for a relocation or boycott of the event.", "resolution_criteria": "For the purpose of this question, US or US-Allied nations includes the countries that belong to the Five Eyes or the Quadrilateral Security Dialogue. They are - Canada, USA, UK, Japan, Australia, India, New Zealand.\n\nThe question will be resolved on the first day of the Winter 2022 Olympic Games (currently scheduled for 2022-02-04), and will resolve later if the 2022 Olympic Games are postponed.\n\nThe question will resolve positively if at least four of the above-mentioned nations do not participate in the event, and negatively if three or fewer boycott.\n\nThe question will resolve ambiguously if the 2022 Winter Olympics is cancelled or relocated to another country for whatever reasons (including due to Covid-19 pandemic). If the 2022 Winter Olympics in China are not held before the next Winter Olympics (such as the 2026 Winter Olympics in Milan), the question will resolve ambiguously.\n\nFor this question, a boycott shall be defined as a nation's athletes competing without officially representing their country, and/or a nation holding a separate set of athletic events outside China", "fine_print": "", "post_id": 6898, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1638544176.433541, "end_time": null, "forecaster_count": 122, "interval_lower_bounds": [ 0.06 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.15 ] } ], "latest": { "start_time": 1638544176.433541, "end_time": null, "forecaster_count": 122, "interval_lower_bounds": [ 0.06 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.15 ], "forecast_values": [ 0.9, 0.1 ], "means": [ 0.112740526699766 ], "histogram": [ [ 0.0, 0.339637695416246, 0.6603917843024188, 0.3004799072260629, 0.06474215620790466, 3.693928607538002, 1.5575709031127698, 0.4092903937191111, 2.19009261077034, 0.29756102746249596, 1.2734222247215872, 0.8677067022507088, 2.2450027067007485, 0.2777084928461296, 0.7021026327989963, 1.7885849924257609, 0.0, 0.0, 0.0003205856696931407, 0.0, 0.08716499577407077, 1.434538362778388, 0.5029445198864249, 0.6297982349163743, 0.3729215800614319, 0.8832749150599624, 0.0, 0.0, 0.0, 0.0, 0.000149338795229685, 0.0, 0.007589822952839385, 0.0, 0.0, 0.0019313626404353574, 0.0, 0.0, 0.0, 0.005214875732224013, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 6.565167425399423e-05, 0.0003770686253336921, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 4.687945436846261, "coverage": 0.999946865663953, "baseline_score": 80.09511577793404, "spot_peer_score": 5.960811191564206, "peer_archived_score": 4.687945436846261, "baseline_archived_score": 80.09511577793404, "spot_peer_archived_score": 5.960811191564206 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1638544176.472096, "end_time": null, "forecaster_count": 122, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1638544176.472096, "end_time": null, "forecaster_count": 122, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9569765829960007, 0.04302341700399926 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 17, "user_vote": null }, "forecasts_count": 263, "key_factors": [], "is_current_content_translated": false, "description": "China will host the 2022 Winter Olympic Games. Significant political tensions between China and the Western powers, especially the USA, as well as human rights' activists dissatisfaction with China's recent behavior - particularly over the 2019 Hong Kong Protests and the Xinjiang re-education camps - have led to calls for a relocation or boycott of the event." }, { "id": 6889, "title": "Will Serena Williams win another Tennis Grand Slam?", "short_title": "Serena Williams Wins Grand Slam", "url_title": "Serena Williams Wins Grand Slam", "slug": "serena-williams-wins-grand-slam", "author_id": 112076, "author_username": "SimonM", "coauthors": [], "created_at": "2021-03-20T15:21:43.858454Z", "published_at": "2021-04-07T04:00:00Z", "edited_at": "2025-09-05T17:29:24.110633Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-07T04:00:00Z", "comment_count": 37, "status": "resolved", "resolved": true, "actual_close_time": "2024-09-03T16:34:00Z", "scheduled_close_time": "2025-01-01T00:00:00Z", "scheduled_resolve_time": "2025-01-01T00:00:00Z", "actual_resolve_time": "2024-09-03T16:34:00Z", "open_time": "2021-04-07T04:00:00Z", "nr_forecasters": 115, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32590, "name": "2021-2025 Leaderboard", "slug": "2021_2025_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "question_series": [ { "id": 1724, "type": "question_series", "name": "Verity", "slug": null, "header_image": "https://cdn.metaculus.com/verity.jpg", "prize_pool": null, "start_date": "2018-12-31T23:00:00Z", "close_date": "2119-12-30T23:00:00Z", "forecasting_end_date": "2032-12-21T05:00:00Z", "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T00:29:42.029359Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 1724, "type": "question_series", "name": "Verity", "slug": null, "header_image": "https://cdn.metaculus.com/verity.jpg", "prize_pool": null, "start_date": "2018-12-31T23:00:00Z", "close_date": "2119-12-30T23:00:00Z", "forecasting_end_date": "2032-12-21T05:00:00Z", "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T00:29:42.029359Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ] }, "question": { "id": 6889, "title": "Will Serena Williams win another Tennis Grand Slam?", "created_at": "2021-03-20T15:21:43.858454Z", "open_time": "2021-04-07T04:00:00Z", "cp_reveal_time": "2021-04-08T03:19:37.629238Z", "spot_scoring_time": "2021-04-08T03:19:37.629238Z", "scheduled_resolve_time": "2025-01-01T00:00:00Z", "actual_resolve_time": "2024-09-03T16:34:00Z", "resolution_set_time": "2024-09-03T16:34:00Z", "scheduled_close_time": "2025-01-01T00:00:00Z", "actual_close_time": "2024-09-03T16:34:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "[Serena Williams](https://en.wikipedia.org/wiki/Serena_Williams) is widely considered the greatest Tennis player of all time. She has won 23 Grand Slams in her career. 1 short of Margaret Court's record.\n\nHowever, she is starting to get older turning 40 in 2021 and she's slipped away from the #1 ranking. She last won a slam in 2017, although she's reached 4 slam finals since then.\n\nAs part of [The Tennis Abstract Podcast's 100th episode](http://www.tennisabstract.com/blog/2021/03/17/podcast-episode-100-100-questions-for-episode-100/) it was asked \"Last year, each of you said you expected Serena to win another major. Do you want to change your answer?\". The answer given was \"Probably not\" (ie probably she doesn't win another one).", "resolution_criteria": "This question resolves positive if Serena Williams wins a grand slam after 2021-04-01.\n\nThis question resolve negative if Serena Williams stops playing Tennis before winning another grand slam", "fine_print": "", "post_id": 6889, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1725288034.310292, "end_time": null, "forecaster_count": 115, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1725288034.310292, "end_time": null, "forecaster_count": 115, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.011289273997348121 ], "histogram": [ [ 9.037826190850565, 8.749220714826139, 0.3286435839237587, 0.4857004483215419, 0.10553348067187643, 0.12003155824420125, 0.8887957735840841, 0.0, 0.0, 0.04110034877217819, 0.16132427454019754, 0.0021522755529947324, 0.0, 0.0, 0.0019272342406730534, 0.0, 0.0, 0.0, 0.000520080144308091, 0.0008101800757223834, 0.0004176495388616647, 0.0, 0.0, 0.0, 0.0, 0.003682732790746319, 0.004373014579243026, 0.001058585232616273, 0.005888908111284704, 0.0, 0.0, 0.00020597879790480454, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0007033164117141776, 0.0, 0.00044217479300900296, 0.0, 0.0003102742470409554, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0039753087057917014, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00687927061288771 ] ] }, "score_data": { "peer_score": 9.016490775142062, "coverage": 0.9123334556390347, "baseline_score": 79.42503270240161, "spot_peer_score": 1.8123858034498932, "peer_archived_score": 9.016490775142062, "baseline_archived_score": 79.42503270240161, "spot_peer_archived_score": 1.8123858034498932 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1725349637.999454, "end_time": null, "forecaster_count": 115, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1725349637.999454, "end_time": null, "forecaster_count": 115, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9995, 0.0005 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 2, "user_vote": null }, "forecasts_count": 344, "key_factors": [], "is_current_content_translated": false, "description": "[Serena Williams](https://en.wikipedia.org/wiki/Serena_Williams) is widely considered the greatest Tennis player of all time. She has won 23 Grand Slams in her career. 1 short of Margaret Court's record.\n\nHowever, she is starting to get older turning 40 in 2021 and she's slipped away from the #1 ranking. She last won a slam in 2017, although she's reached 4 slam finals since then.\n\nAs part of [The Tennis Abstract Podcast's 100th episode](http://www.tennisabstract.com/blog/2021/03/17/podcast-episode-100-100-questions-for-episode-100/) it was asked \"Last year, each of you said you expected Serena to win another major. Do you want to change your answer?\". The answer given was \"Probably not\" (ie probably she doesn't win another one)." }, { "id": 6887, "title": "Will Jannik Sinner be the first man born in the 2000s to win a slam in Tennis?", "short_title": "Jannik Sinner first Gen Z slam winner", "url_title": "Jannik Sinner first Gen Z slam winner", "slug": "jannik-sinner-first-gen-z-slam-winner", "author_id": 112076, "author_username": "SimonM", "coauthors": [], "created_at": "2021-03-20T14:55:41.261260Z", "published_at": "2021-04-10T04:00:00Z", "edited_at": "2025-09-05T17:29:28.994362Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-10T04:00:00Z", "comment_count": 18, "status": "resolved", "resolved": true, "actual_close_time": "2022-09-11T17:33:00Z", "scheduled_close_time": "2030-01-01T00:00:00Z", "scheduled_resolve_time": "2030-01-01T00:00:00Z", "actual_resolve_time": "2022-09-11T17:33:00Z", "open_time": "2021-04-10T04:00:00Z", "nr_forecasters": 36, "html_metadata_json": null, "projects": { "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 6887, "title": "Will Jannik Sinner be the first man born in the 2000s to win a slam in Tennis?", "created_at": "2021-03-20T14:55:41.261260Z", "open_time": "2021-04-10T04:00:00Z", "cp_reveal_time": "2021-04-12T04:00:00Z", "spot_scoring_time": "2021-04-12T04:00:00Z", "scheduled_resolve_time": "2030-01-01T00:00:00Z", "actual_resolve_time": "2022-09-11T17:33:00Z", "resolution_set_time": "2022-09-11T17:33:00Z", "scheduled_close_time": "2030-01-01T00:00:00Z", "actual_close_time": "2022-09-11T17:33:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Jannik Sinner](https://en.wikipedia.org/wiki/Jannik_Sinner) is an Italian professional tennis player. He is current the youngest tennis player in the top 10 based on Tennis Abstract's [Elo rankings](http://tennisabstract.com/reports/atp_elo_ratings.html)\n\nAs part of [The Tennis Abstract Podcast's 100th episode](http://www.tennisabstract.com/blog/2021/03/17/podcast-episode-100-100-questions-for-episode-100/) it was asked \"Who will be the first man born in the 2000s to win a Slam?\". Sinner was picked out as being most likely to win. To operationalise this, we ask what his chances are.\n\n***Will Jannik Sinner be the first man born in the 2000s to win a slam?***\n\nResolves positive if Jannik Sinner is the first male player born after the millennium to win a tennis Grand Slam. (One of Australian Open, Rolland Garros, Wimbledon, US Open).\n\nResolves negative if a player other than Jannik Sinner born after the millennium wins a tennis Grand Slam before Sinner.\n\nResolution will be by credible media reports.\n\nThis question closes retroactively day before the beginning of the tournament which triggers resolution.", "fine_print": "", "post_id": 6887, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1664185831.472444, "end_time": null, "forecaster_count": 40, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.1 ] } ], "latest": { "start_time": 1664185831.472444, "end_time": null, "forecaster_count": 40, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.1 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.07675237424042397 ], "histogram": [ [ 0.0, 8.095256998421835, 0.0, 0.0, 0.0, 0.2659211825430975, 0.0, 0.0, 0.0, 0.0, 1.0671575027025026, 0.0, 0.0, 0.2168121871892493, 0.0, 0.0, 0.0, 0.0, 0.06594039540813919, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4835860218282165, 0.0, 0.0, 0.14006392172911664, 0.0, 0.05674149545770603, 0.0, 0.0, 0.0, 0.0, 0.10952494009961755, 0.016764573440571914, 0.0, 0.0, 0.0, 0.030314528419522604, 0.0, 0.0, 0.21409999499789023, 0.0, 0.0, 0.025253159325845955, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007369969155267514, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.004870516352324537, 0.04232921962320499, 0.0, 0.0, 0.0, 0.2403700099395794, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07555473386203065 ] ] }, "score_data": { "peer_score": 2.8958342531817753, "coverage": 0.16290936910680326, "baseline_score": 7.568263446521127, "spot_peer_score": -0.4382525584058483, "peer_archived_score": 2.8958342531817753, "baseline_archived_score": 7.568263446521127, "spot_peer_archived_score": -0.4382525584058483 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1662536158.663528, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1662536158.663528, "end_time": null, "forecaster_count": 36, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9687869919663316, 0.031213008033668464 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 2, "user_vote": null }, "forecasts_count": 93, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 6886, "title": "Will Djokovic win 21 Tennis Grand Slams?", "short_title": "Djokovic Total Tennis Slams", "url_title": "Djokovic Total Tennis Slams", "slug": "djokovic-total-tennis-slams", "author_id": 112076, "author_username": "SimonM", "coauthors": [], "created_at": "2021-03-20T14:46:13.894825Z", "published_at": "2021-04-02T07:00:00Z", "edited_at": "2025-09-05T17:29:09.010081Z", "curation_status": "approved", "curation_status_updated_at": "2021-04-02T07:00:00Z", "comment_count": 25, "status": "resolved", "resolved": true, "actual_close_time": "2021-07-24T14:02:00Z", "scheduled_close_time": "2021-07-24T14:02:00Z", "scheduled_resolve_time": "2022-07-10T20:33:00Z", "actual_resolve_time": "2022-07-10T20:33:00Z", "open_time": "2021-04-02T07:00:00Z", "nr_forecasters": 49, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32590, "name": "2021-2025 Leaderboard", "slug": "2021_2025_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 6886, "title": "Will Djokovic win 21 Tennis Grand Slams?", "created_at": "2021-03-20T14:46:13.894825Z", "open_time": "2021-04-02T07:00:00Z", "cp_reveal_time": "2021-04-04T07:00:00Z", "spot_scoring_time": "2021-04-04T07:00:00Z", "scheduled_resolve_time": "2022-07-10T20:33:00Z", "actual_resolve_time": "2022-07-10T20:33:00Z", "resolution_set_time": "2022-07-10T20:33:00Z", "scheduled_close_time": "2021-07-24T14:02:00Z", "actual_close_time": "2021-07-24T14:02:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "[Novak Djokovic](https://en.wikipedia.org/wiki/Novak_Djokovic_career_statistics) is one of the most successful tennis players of all time. As of time of writing (March 2021) he has 18 slams, two behind [Rafael Nadal](https://en.wikipedia.org/wiki/Roger_Federer) and [Roger Federer](https://en.wikipedia.org/wiki/Roger_Federer).\n\nThe Grand Slams in Tennis are: Australian Open, Rolland Garros, Wimbledon and US Open.\n\nAs part of [The Tennis Abstract Podcast's 100th episode](http://www.tennisabstract.com/blog/2021/03/17/podcast-episode-100-100-questions-for-episode-100/) it was asked \"Will Djokovic win #20? #21?\". The answer given was \"Yes and yes\".", "resolution_criteria": "This question resolves positive when Djokovic wins his 21st Grand Slam. (As reported by credible media reports)\n\nThis question resolves negative if Djokovic stops playing tennis before winning his 21st.\n\nThis question closes early if Djokovic wins his 20th Grand Slam", "fine_print": "", "post_id": 6886, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1627133741.539427, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": [ 0.87 ], "centers": [ 0.93 ], "interval_upper_bounds": [ 0.96 ] } ], "latest": { "start_time": 1627133741.539427, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": [ 0.87 ], "centers": [ 0.93 ], "interval_upper_bounds": [ 0.96 ], "forecast_values": [ 0.06999999999999995, 0.93 ], "means": [ 0.9104789688362553 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.26102326165716355, 0.0, 0.0, 0.1561599880943003, 0.0, 0.0, 0.0, 0.0024787521766663585, 0.013686165309959817, 0.0, 0.0, 0.0, 0.0, 0.01660289071683996, 0.8043908332778769, 1.0312494221361335, 0.0, 0.010561813766971701, 0.7710467266525035, 0.04384841732294667, 0.04978706836786394, 0.0, 0.0, 0.8055774048334167, 0.08544194034641192, 1.674563079122574, 0.9287285510284102, 0.2702887174842956, 2.1255877445401867, 1.8655887224229246, 0.5304726716933951, 0.0, 1.0600043527819194 ] ] }, "score_data": { "peer_score": 0.5721624742122694, "coverage": 0.9974741082135973, "baseline_score": 65.32557722642069, "spot_peer_score": -8.26870097119778, "peer_archived_score": 0.5721624742122694, "baseline_archived_score": 65.32557722642069, "spot_peer_archived_score": -8.26870097119778 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1627133741.591351, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1627133741.591351, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.07455043217388668, 0.9254495678261133 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 13, "user_vote": null }, "forecasts_count": 97, "key_factors": [], "is_current_content_translated": false, "description": "[Novak Djokovic](https://en.wikipedia.org/wiki/Novak_Djokovic_career_statistics) is one of the most successful tennis players of all time. As of time of writing (March 2021) he has 18 slams, two behind [Rafael Nadal](https://en.wikipedia.org/wiki/Roger_Federer) and [Roger Federer](https://en.wikipedia.org/wiki/Roger_Federer).\n\nThe Grand Slams in Tennis are: Australian Open, Rolland Garros, Wimbledon and US Open.\n\nAs part of [The Tennis Abstract Podcast's 100th episode](http://www.tennisabstract.com/blog/2021/03/17/podcast-episode-100-100-questions-for-episode-100/) it was asked \"Will Djokovic win #20? #21?\". The answer given was \"Yes and yes\"." } ] }