Posts List Oldapi View
We shared this request example with FAB participants: url_qparams = { "limit": count, "offset": offset, "has_group": "false", "order_by": "-activity", "forecast_type": "binary", "project": tournament_id, "status": "open", "type": "forecast", "include_description": "true", } url = f"{api_info.base_url}/questions/" response = requests.get( url, headers={"Authorization": f"Token {api_info.token}"}, params=url_qparams )
But we don't want to support all these parameters, and the ones relevant are: - order_by - status - project - forecast_type - we ignore this, but assume it's binary - FAB only supports binary for now.
GET /api2/questions/?format=api&offset=5300
{ "count": 6410, "next": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5320", "previous": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5280", "results": [ { "id": 4863, "title": "Will Project Vesta still exist in 2030?", "short_title": "", "url_title": "", "slug": "will-project-vesta-still-exist-in-2030", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-07-21T17:43:21.411970Z", "published_at": "2020-07-26T23:00:00Z", "edited_at": "2025-09-05T17:28:58.445495Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-26T23:00:00Z", "comment_count": 7, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2029-06-30T23:00:00Z", "scheduled_resolve_time": "2030-06-30T23:00:00Z", "actual_resolve_time": null, "open_time": "2020-07-26T23:00:00Z", "nr_forecasters": 32, "html_metadata_json": null, "projects": { "topic": [ { "id": 15867, "name": "Environment & Climate", "slug": "climate", "emoji": "🌎", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3697, "name": "Environment & Climate", "slug": "environment-climate", "emoji": "🌱", "description": "Environment & Climate", "type": "category" } ], "question_series": [ { "id": 2982, "type": "question_series", "name": "Stripe Climate Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-07-26T07:00:00Z", "close_date": "2029-07-01T23:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-02-29T10:13:20.532760Z", "edited_at": "2025-09-30T04:27:52.279980Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2982, "type": "question_series", "name": "Stripe Climate Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-07-26T07:00:00Z", "close_date": "2029-07-01T23:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-02-29T10:13:20.532760Z", "edited_at": "2025-09-30T04:27:52.279980Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4863, "title": "Will Project Vesta still exist in 2030?", "created_at": "2020-07-21T17:43:21.411970Z", "open_time": "2020-07-26T23:00:00Z", "cp_reveal_time": "2020-07-28T23:00:00Z", "spot_scoring_time": "2020-07-28T23:00:00Z", "scheduled_resolve_time": "2030-06-30T23:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2029-06-30T23:00:00Z", "actual_close_time": "2029-06-30T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": ">Stripe, a financial-services company, [committed last year](https://stripe.com/blog/negative-emissions-commitment) to spending at least $1M/year on negative emissions and carbon storage technologies, at any price, with the aim of helping these technologies develop.\n\n>They recently wrote up [a summary](https://stripe.com/blog/first-negative-emissions-purchases) of their first set of these purchases.\n\n>This miniseries aims to predict the success of the companies that Stripe chose.\n\nStripe has purchased 3333.3 tons of negative emissions from Project Vesta at $75 per ton.\n\n[Project Vesta](https://projectvesta.org/) captures CO2 by using an abundant, naturally occurring mineral called olivine. Ocean waves grind down the olivine, increasing its surface area. As the olivine breaks down, it captures atmospheric CO2 from within the ocean and stabilizes it as limestone on the seafloor. They have not publicly set a long-term price target.\n\n**This question asks:**\n> On 2030/7/1, will Project Vesta still be selling negative emissions using broadly similar technology to their 2020 approach?\n\nThis question resolves positive if [this](https://www.metaculus.com/questions/4858/what-will-be-the-cost-of-negative-emissions-sold-by-project-vesta-in-2030/) question has an unambiguous resolution. Otherwise, it resolves negative.", "fine_print": "", "post_id": 4863, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1740023566.246358, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.22 ], "centers": [ 0.32 ], "interval_upper_bounds": [ 0.56 ] } ], "latest": { "start_time": 1740023566.246358, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": [ 0.22 ], "centers": [ 0.32 ], "interval_upper_bounds": [ 0.56 ], "forecast_values": [ 0.6799999999999999, 0.32 ], "means": [ 0.4146964830511399 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.19073805166550978, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07016860785683743, 0.014369596090439076, 0.422729524609672, 0.7620908974955233, 0.02581358824615143, 0.1876730211456733, 0.4903135774079878, 0.3058323220141224, 0.0, 0.5724472223148853, 0.2835798475679543, 0.0, 0.0, 0.09630553750946041, 0.0, 0.11160910633783082, 0.8355804261814468, 1.7133721378374211, 0.03268672417676853, 0.0, 0.21572547604369705, 0.0, 0.0, 0.0, 0.38043555332875517, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9147633448549574, 0.0, 0.0, 0.0, 0.4884074031868188, 0.0, 0.0, 0.0, 0.3415440718595426, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0094962884186239, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6939525952509218, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6657926091654209 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289341.150483, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289341.150483, "end_time": null, "forecaster_count": 32, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.780226543490661, 0.21977345650933902 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 5, "user_vote": null }, "forecasts_count": 104, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4862, "title": "Will Climeworks still exist in 2030?", "short_title": "", "url_title": "", "slug": "will-climeworks-still-exist-in-2030", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-07-21T17:42:31.824246Z", "published_at": "2020-07-26T23:00:00Z", "edited_at": "2025-09-05T17:29:28.749547Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-26T23:00:00Z", "comment_count": 6, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2029-06-30T23:00:00Z", "scheduled_resolve_time": "2030-06-30T23:00:00Z", "actual_resolve_time": null, "open_time": "2020-07-26T23:00:00Z", "nr_forecasters": 37, "html_metadata_json": null, "projects": { "topic": [ { "id": 15867, "name": "Environment & Climate", "slug": "climate", "emoji": "🌎", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3697, "name": "Environment & Climate", "slug": "environment-climate", "emoji": "🌱", "description": "Environment & Climate", "type": "category" } ], "question_series": [ { "id": 2982, "type": "question_series", "name": "Stripe Climate Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-07-26T07:00:00Z", "close_date": "2029-07-01T23:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-02-29T10:13:20.532760Z", "edited_at": "2025-09-30T04:27:52.279980Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2982, "type": "question_series", "name": "Stripe Climate Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-07-26T07:00:00Z", "close_date": "2029-07-01T23:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": null, "created_at": "2024-02-29T10:13:20.532760Z", "edited_at": "2025-09-30T04:27:52.279980Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4862, "title": "Will Climeworks still exist in 2030?", "created_at": "2020-07-21T17:42:31.824246Z", "open_time": "2020-07-26T23:00:00Z", "cp_reveal_time": "2020-07-28T23:00:00Z", "spot_scoring_time": "2020-07-28T23:00:00Z", "scheduled_resolve_time": "2030-06-30T23:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2029-06-30T23:00:00Z", "actual_close_time": "2029-06-30T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": ">Stripe, a financial-services company, [committed last year](https://stripe.com/blog/negative-emissions-commitment) to spending at least $1M/year on negative emissions and carbon storage technologies, at any price, with the aim of helping these technologies develop.\n\n>They recently wrote up [a summary](https://stripe.com/blog/first-negative-emissions-purchases) of their first set of these purchases.\n\n>This miniseries aims to predict the success of the companies that Stripe chose.\n\nStripe has purchased 322.5 tons of negative emissions from Climeworks at $775 per ton.\n\n[Climeworks](https://www.climeworks.com/) uses renewable geothermal energy and waste heat to capture CO2 directly from the air, concentrate it, and permanently sequester it underground in basaltic rock formations with Carbfix. Their own price target in the long-term is $100-$200 per ton.\n\n**This question asks:**\n> On 2030/7/1, will Climeworks still be selling negative emissions using broadly similar technology to their 2020 approach?\n\nThis question resolves positive if [this](https://www.metaculus.com/questions/4857/what-will-be-the-cost-of-negative-emissions-sold-by-climeworks-in-2030/) question has an unambiguous resolution. Otherwise, it resolves negative.", "fine_print": "", "post_id": 4862, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763111980.097138, "end_time": 1764968785.34645, "forecaster_count": 23, "interval_lower_bounds": [ 0.4 ], "centers": [ 0.4737323189026249 ], "interval_upper_bounds": [ 0.7 ] } ], "latest": { "start_time": 1763111980.097138, "end_time": 1764968785.34645, "forecaster_count": 23, "interval_lower_bounds": [ 0.4 ], "centers": [ 0.4737323189026249 ], "interval_upper_bounds": [ 0.7 ], "forecast_values": [ 0.5262676810973751, 0.4737323189026249 ], "means": [ 0.5223896627266444 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.02246421852694655, 0.0, 0.0, 0.0, 0.0, 0.04671076085250769, 0.0, 0.0, 1.3041360186573907, 0.0, 0.0, 0.0, 0.26402013543029734, 0.0, 0.0, 0.0, 0.0, 0.0, 0.061064077012331824, 0.22781833750170227, 0.8582084815747382, 0.39738560315786137, 0.0, 0.13981930080831617, 0.0, 0.0, 0.1659893709142453, 1.3511561689987446, 0.0, 0.0, 0.07732302163410486, 0.5751117920109702, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7234704561894825, 0.0, 0.0, 0.0, 0.0, 0.8079494144037086, 0.0, 0.0, 0.0, 0.0, 0.3484801054844811, 0.0, 0.0, 0.0, 0.0, 0.5103156119575218, 0.0, 0.0, 0.0, 0.0, 0.19523450305981643, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0339924118233929 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289528.727814, "end_time": null, "forecaster_count": 37, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289528.727814, "end_time": null, "forecaster_count": 37, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.5855251899636822, 0.4144748100363178 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 3, "user_vote": null }, "forecasts_count": 169, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4849, "title": "Will any science fiction literature originally written in Spanish win a major science fiction award before 2031?", "short_title": "Spanish Sci-Fi Award By 2031?", "url_title": "Spanish Sci-Fi Award By 2031?", "slug": "spanish-sci-fi-award-by-2031", "author_id": 103396, "author_username": "francisco.jariego", "coauthors": [], "created_at": "2020-07-19T13:36:37.601317Z", "published_at": "2020-08-03T07:00:00Z", "edited_at": "2025-11-21T06:52:35.864571Z", "curation_status": "approved", "curation_status_updated_at": "2020-08-03T07:00:00Z", "comment_count": 13, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2030-12-31T22:59:00Z", "scheduled_resolve_time": "2030-12-31T22:59:00Z", "actual_resolve_time": null, "open_time": "2020-08-03T07:00:00Z", "nr_forecasters": 69, "html_metadata_json": null, "projects": { "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4849, "title": "Will any science fiction literature originally written in Spanish win a major science fiction award before 2031?", "created_at": "2020-07-19T13:36:37.601317Z", "open_time": "2020-08-03T07:00:00Z", "cp_reveal_time": "2020-08-05T06:57:23.721340Z", "spot_scoring_time": "2020-08-05T06:57:23.721340Z", "scheduled_resolve_time": "2030-12-31T22:59:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2030-12-31T22:59:00Z", "actual_close_time": "2030-12-31T22:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "Spanish Science Fiction has only a token presence internationally. In spite of pioneering works such as El Anacronopete, the first story involving a time machine, and prestigious authors like Miguel de Unamuno, Leopoldo Alas Clarin or Pedro Salinas writing SF stories, Spain has failed to impress the international readership with a universally embraced SF classic. Most writers and titles seem to be invisible not only to foreign readers and spectators, but also to their local peers, to the extent of being considered a \"phantom genre.\"\n\nThe Spanish-speaking public, however, enjoys science fiction like the rest of the world. Spanish literature does not lack imagination, and indeed has produced cultural phenomena such as magical realism during the Latin American Boom. Rapid technological development has made science fiction increasingly familiar, not only a resource for entertainment, but also a valuable tool in marketing or future studies. And numerous Spanish authors keep trying.\n\n(EN ESPAÑOL:)\n\n***¿Ganará una obra de Ciencia Ficción escrita y publicada originalmente en español hasta el año 2029 incluido alguno de los grandes premios internaciones que acreditan a los grandes autores del género?***\n\nEspaña tiene una presencia testimonial en el panorama internacional de la ciencia ficción. A pesar de que haber preconizado ideas como la máquina de tiempo (El Anacronopete) y de que autores de prestigio como Miguel de Unamuno, Leopoldo Alas Clarín o Pedro Salinas se aproximaran al género, la ciencia ficción española no ha logrado cautivar a los lectores extranjeros con un clásico universalmente aceptado y la mayoría de los escritores y títulos parecen invisibles, no solo fuera de España, sino también entre sus pares de profesión. Hay quien la ha llegado a describir como un «género fantasma».\n\nEl público español disfruta, sin embargo, con la ciencia ficción como en el resto del mundo, y la literatura en español no solo no carece de imaginación, sino que ha producido fenómenos culturales como el realismo mágico del boom latinoamericano. El rápido desarrollo tecnológico ha convertido a la ciencia ficción en algo cada vez más familiar, no solo un recurso para el entretenimiento, sino una herramienta para el marketing y el debate de ideas. Numerosos autores españoles lo siguen intentando.", "resolution_criteria": "Resolution will be positive if a short story, novelette, novella or novel originally written in Spanish and published by the end of 2029, in any medium, is granted one of the following awards: Hugo, Nebula, John W. Campbell, Theodore Sturgeon or Arthur C. Clarke, before the end of the year 2030.\n\n(Note: except for the Hugo Prize, these prizes currently require a previous translation into English. The answer will be positive if and only if the work was originally written and published in Spanish).\n\n(EN ESPAÑOL:)\n\n***¿Ganará una obra de Ciencia Ficción escrita y publicada originalmente en español hasta el año 2029 incluido alguno de los grandes premios internaciones que acreditan a los grandes autores del género?***\n\nLa respuesta será SI, si un cuento corto, novela corta (en cualquiera de sus extensiones) o novela escrita en castellano y publicada hasta el año 2029, en cualquier medio, recibe uno de los siguientes premios: Hugo, Nébula, John W. Campbell, Theodore Sturgeon o Arthur C. clarke, antes del cierre del año 2030.\n\n(Nota, en la actualidad excepto el premio Hugo al que podría acceder directamente una obra escrita en español, el resto requieren una traducción previa al inglés. La respuesta será positiva si y solo sí la obra fue escrita y publicada con anterioridad en español).", "fine_print": "", "post_id": 4849, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763904180.171771, "end_time": 1765134450.0009, "forecaster_count": 47, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.41 ] } ], "latest": { "start_time": 1763904180.171771, "end_time": 1765134450.0009, "forecaster_count": 47, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.3 ], "interval_upper_bounds": [ 0.41 ], "forecast_values": [ 0.7, 0.3 ], "means": [ 0.36455132313940036 ], "histogram": [ [ 0.0, 0.044423031630246905, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007784236121081673, 0.0, 0.0028636604341700815, 0.0, 0.0, 0.0, 0.0, 0.15635056202879707, 0.0, 0.1902336590916781, 0.0, 0.0, 0.029221049724499126, 0.0, 0.8629050061859964, 0.03877020169610887, 0.0575181573859948, 1.8807466956942618, 0.0, 0.2581398902959248, 1.3015452444310402, 0.0, 1.2859373544976131, 0.0, 0.017823678094023753, 0.0, 0.3589031494592382, 0.05065733435182447, 0.021159747596370834, 0.0, 0.10299458082921686, 0.7421409634973282, 1.450333606481431, 1.014847804209651, 0.22981289680703934, 0.0, 0.0, 0.30155575609455343, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5429942209064903, 0.0, 0.0, 0.0, 0.8157988505018527, 0.0, 0.0, 0.0, 0.0, 0.1274765214171608, 0.0, 0.0, 0.0, 0.0, 0.009856869823379553, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09222549710312164, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08235174299721564, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14132753844395196 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287024.019995, "end_time": null, "forecaster_count": 66, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287024.019995, "end_time": null, "forecaster_count": 66, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8299053714831299, 0.17009462851687004 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 9, "user_vote": null }, "forecasts_count": 213, "key_factors": [], "is_current_content_translated": false, "description": "Spanish Science Fiction has only a token presence internationally. In spite of pioneering works such as El Anacronopete, the first story involving a time machine, and prestigious authors like Miguel de Unamuno, Leopoldo Alas Clarin or Pedro Salinas writing SF stories, Spain has failed to impress the international readership with a universally embraced SF classic. Most writers and titles seem to be invisible not only to foreign readers and spectators, but also to their local peers, to the extent of being considered a \"phantom genre.\"\n\nThe Spanish-speaking public, however, enjoys science fiction like the rest of the world. Spanish literature does not lack imagination, and indeed has produced cultural phenomena such as magical realism during the Latin American Boom. Rapid technological development has made science fiction increasingly familiar, not only a resource for entertainment, but also a valuable tool in marketing or future studies. And numerous Spanish authors keep trying.\n\n(EN ESPAÑOL:)\n\n***¿Ganará una obra de Ciencia Ficción escrita y publicada originalmente en español hasta el año 2029 incluido alguno de los grandes premios internaciones que acreditan a los grandes autores del género?***\n\nEspaña tiene una presencia testimonial en el panorama internacional de la ciencia ficción. A pesar de que haber preconizado ideas como la máquina de tiempo (El Anacronopete) y de que autores de prestigio como Miguel de Unamuno, Leopoldo Alas Clarín o Pedro Salinas se aproximaran al género, la ciencia ficción española no ha logrado cautivar a los lectores extranjeros con un clásico universalmente aceptado y la mayoría de los escritores y títulos parecen invisibles, no solo fuera de España, sino también entre sus pares de profesión. Hay quien la ha llegado a describir como un «género fantasma».\n\nEl público español disfruta, sin embargo, con la ciencia ficción como en el resto del mundo, y la literatura en español no solo no carece de imaginación, sino que ha producido fenómenos culturales como el realismo mágico del boom latinoamericano. El rápido desarrollo tecnológico ha convertido a la ciencia ficción en algo cada vez más familiar, no solo un recurso para el entretenimiento, sino una herramienta para el marketing y el debate de ideas. Numerosos autores españoles lo siguen intentando." }, { "id": 4838, "title": "Will there be deadly conflict in the South China Sea in 2020?", "short_title": "", "url_title": "", "slug": "will-there-be-deadly-conflict-in-the-south-china-sea-in-2020", "author_id": 100912, "author_username": "PeterWildeford", "coauthors": [], "created_at": "2020-07-17T03:37:41.418966Z", "published_at": "2020-07-24T22:00:00Z", "edited_at": "2025-09-05T17:29:00.670806Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-24T22:00:00Z", "comment_count": 22, "status": "resolved", "resolved": true, "actual_close_time": "2020-10-31T06:00:00Z", "scheduled_close_time": "2020-10-31T06:00:00Z", "scheduled_resolve_time": "2021-01-01T00:21:00Z", "actual_resolve_time": "2021-01-01T00:21:00Z", "open_time": "2020-07-24T22:00:00Z", "nr_forecasters": 91, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 4838, "title": "Will there be deadly conflict in the South China Sea in 2020?", "created_at": "2020-07-17T03:37:41.418966Z", "open_time": "2020-07-24T22:00:00Z", "cp_reveal_time": "2020-07-26T00:43:38.880440Z", "spot_scoring_time": "2020-07-26T00:43:38.880440Z", "scheduled_resolve_time": "2021-01-01T00:21:00Z", "actual_resolve_time": "2021-01-01T00:21:00Z", "resolution_set_time": "2021-01-01T00:21:00Z", "scheduled_close_time": "2020-10-31T06:00:00Z", "actual_close_time": "2020-10-31T06:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "In the last decade, China has increased its power and reach over [the South China Sea](https://en.wikipedia.org/wiki/South_China_Sea), building multiple armed bases and making increasingly aggressive moves.\n\n[The United States has sent two aircraft carriers in the South China Sea](https://www.nytimes.com/2020/07/04/us/politics/south-china-sea-aircraft-carrier.html) and has for the first time [officially pushed back](https://www.hindustantimes.com/world-news/pompeo-amps-up-pitch-says-will-use-all-tools-to-support-countries-over-south-china-sea/story-I3nUnUD7Oks1dcikQ6zjPI.html) against China's disputed territorial claims, saying it would use \"all tools\" to oppose China. Together this increases the possibility of a regional conflict.\n\nThis comes not too long after [a deadly border conflict between India and China](https://en.wikipedia.org/wiki/Sino-Indian_border_dispute#2020_skirmishes) in which at least 20 people were killed. Would we be likely to see a similar level of conflict between the US and China in 2020?\n\nThis question will resolve positive if a credible media report indicates that there are at least five Chinese and/or American military personnel killed within a one-week period, with all deaths attributed to some conflict between the Chinese and American armed forces, and with all deaths taking place within the South China Sea before 2021.\n\nThis question will close retroactively to three days prior to the date of the first death of the one-week period.", "fine_print": "", "post_id": 4838, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1604120664.887005, "end_time": null, "forecaster_count": 91, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.03 ], "interval_upper_bounds": [ 0.08 ] } ], "latest": { "start_time": 1604120664.887005, "end_time": null, "forecaster_count": 91, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.03 ], "interval_upper_bounds": [ 0.08 ], "forecast_values": [ 0.97, 0.03 ], "means": [ 0.05448401621525003 ], "histogram": [ [ 0.0, 4.567910012247287, 1.903347800582808, 3.7805385225711317, 0.5297832464017308, 1.7385934947960076, 0.5070251442677864, 0.0, 1.392333061264647, 0.00965371437268609, 0.8803163457539387, 0.01861959560679366, 0.003928914836847643, 0.0, 0.6511869951896028, 0.2094973131076473, 0.0, 0.0, 0.0, 0.0, 0.9465436071604256, 0.004443617915320058, 0.0634779910345077, 0.07891436402356318, 0.25819523838988884, 0.0005317208022573057, 0.0, 0.0, 0.0, 0.0, 0.03738650641033331, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0010142146568156748, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 10.036336375032679, "coverage": 0.9976772585640294, "baseline_score": 89.14306731607464, "spot_peer_score": 5.793591219202654, "peer_archived_score": 10.036336375032679, "baseline_archived_score": 89.14306731607464, "spot_peer_archived_score": 5.793591219202654 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1604120664.91907, "end_time": null, "forecaster_count": 91, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1604120664.91907, "end_time": null, "forecaster_count": 91, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9938889515283623, 0.006111048471637736 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 20, "user_vote": null }, "forecasts_count": 173, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4836, "title": "Will China's Tianwen-1 rover successfully land on Mars?", "short_title": "China's Tianwen-1 rover is succesful", "url_title": "China's Tianwen-1 rover is succesful", "slug": "chinas-tianwen-1-rover-is-succesful", "author_id": 106736, "author_username": "nagolinc", "coauthors": [], "created_at": "2020-07-16T13:51:46.770890Z", "published_at": "2020-08-13T07:00:00Z", "edited_at": "2025-09-05T17:29:23.118887Z", "curation_status": "approved", "curation_status_updated_at": "2020-08-13T07:00:00Z", "comment_count": 32, "status": "resolved", "resolved": true, "actual_close_time": "2021-03-18T10:00:00Z", "scheduled_close_time": "2021-03-18T10:00:00Z", "scheduled_resolve_time": "2021-06-28T12:46:00Z", "actual_resolve_time": "2021-06-28T12:46:00Z", "open_time": "2020-08-13T07:00:00Z", "nr_forecasters": 96, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3695, "name": "Space", "slug": "space", "emoji": "🚀", "description": "Space", "type": "category" } ] }, "question": { "id": 4836, "title": "Will China's Tianwen-1 rover successfully land on Mars?", "created_at": "2020-07-16T13:51:46.770890Z", "open_time": "2020-08-13T07:00:00Z", "cp_reveal_time": "2020-08-14T08:36:46.222132Z", "spot_scoring_time": "2020-08-14T08:36:46.222132Z", "scheduled_resolve_time": "2021-06-28T12:46:00Z", "actual_resolve_time": "2021-06-28T12:46:00Z", "resolution_set_time": "2021-06-28T12:46:00Z", "scheduled_close_time": "2021-03-18T10:00:00Z", "actual_close_time": "2021-03-18T10:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "China's [Tianwen-1](https://en.wikipedia.org/wiki/Tianwen-1) mission is expected to launch in late July of 2020. It consists of an orbiter, lander and rover. This question asks:\n\n**\"Will China's Tianwen-1 rover successfully land on Mars?\"**\n\n---\n\nThe question resolves positive if the rover lands successfully and is able to travel at least 30 meters on the surface of Mars after landing and will resolve positive based off of credible media reporting that this has happened.\n\nThe question will resolve negatively if a mission identified as Tianwen-1 fails for any reason (explodes at launch, fails to survive descent to Mars surface, unable to establish communication, rover does not travel at least 30 meters and is unable to move further) based off of credible media reporting that this has happened.\n\nThe question resolves ambiguously if no mission identified as Tianwen-1 attempts to launch before December 31st, 2023.", "fine_print": "", "post_id": 4836, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1616055692.585733, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": [ 0.65 ], "centers": [ 0.74 ], "interval_upper_bounds": [ 0.79 ] } ], "latest": { "start_time": 1616055692.585733, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": [ 0.65 ], "centers": [ 0.74 ], "interval_upper_bounds": [ 0.79 ], "forecast_values": [ 0.26, 0.74 ], "means": [ 0.7302215517288355 ], "histogram": [ [ 0.0, 0.01454955880794046, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.049014995944548286, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0007831332794337743, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.017363302970072207, 0.0, 0.0, 0.0, 0.03625353907742515, 0.0, 0.38487057945307895, 0.0, 0.0, 0.15072782623248232, 0.04551310199086637, 0.0, 0.402619007983522, 0.8605432069039223, 2.6485636691697123, 0.4264944679860645, 0.04883882966951377, 0.013290133035904001, 0.807308829189947, 0.620218381394492, 1.6023113055582159, 0.25364764538129814, 0.5313271557040812, 0.997229287439688, 1.4971517569439758, 0.902488567631141, 0.5184582971917374, 0.1530396155059593, 1.375964171151747, 1.41094932919084, 0.0, 0.042226411430796126, 0.025387373569805124, 0.39489997724859494, 0.852735796438567, 0.0, 0.6501048397718527, 0.0, 0.0, 0.3571189897811597, 0.0, 0.0, 0.0, 0.0, 0.008246561353721116, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 6.410021229196627, "coverage": 0.9985235269663599, "baseline_score": 44.44730429940932, "spot_peer_score": 22.8125842305486, "peer_archived_score": 6.410021229196627, "baseline_archived_score": 44.44730429940932, "spot_peer_archived_score": 22.8125842305486 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1616055692.705009, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1616055692.705009, "end_time": null, "forecaster_count": 96, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.23527959356662542, 0.7647204064333746 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 18, "user_vote": null }, "forecasts_count": 200, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4835, "title": "[Short Fuse] Will the UK's Intelligence and Security committee publish the report into Russian interference by the end of July?", "short_title": "", "url_title": "", "slug": "short-fuse-will-the-uks-intelligence-and-security-committee-publish-the-report-into-russian-interference-by-the-end-of-july", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-07-16T10:34:18.633784Z", "published_at": "2020-07-17T22:00:00Z", "edited_at": "2025-09-05T17:28:46.694124Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-17T22:00:00Z", "comment_count": 11, "status": "resolved", "resolved": true, "actual_close_time": "2020-07-21T09:30:00Z", "scheduled_close_time": "2020-07-31T22:59:00Z", "scheduled_resolve_time": "2020-07-31T22:59:00Z", "actual_resolve_time": "2020-07-21T09:30:00Z", "open_time": "2020-07-17T22:00:00Z", "nr_forecasters": 19, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 4835, "title": "[Short Fuse] Will the UK's Intelligence and Security committee publish the report into Russian interference by the end of July?", "created_at": "2020-07-16T10:34:18.633784Z", "open_time": "2020-07-17T22:00:00Z", "cp_reveal_time": "2020-07-18T21:32:47.279736Z", "spot_scoring_time": "2020-07-18T21:32:47.279736Z", "scheduled_resolve_time": "2020-07-31T22:59:00Z", "actual_resolve_time": "2020-07-21T09:30:00Z", "resolution_set_time": "2020-07-21T09:30:00Z", "scheduled_close_time": "2020-07-31T22:59:00Z", "actual_close_time": "2020-07-21T09:30:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "In March 2019, a report into Russian activity including \"allegations of espionage, subversion and interference in elections\" was finalised by the UK Parliament's Intelligence and Security Committee. \n\n[It was referred to Number 10 on 17 October of that year.](https://www.bbc.co.uk/news/uk-politics-50256887)\n\n[At the start of November, Number 10 indicated that there was no intention of releasing it before the General Election on 12 December, despite prominent calls for its immediate release, including from the chairman of the committee, Dominic Grieve.](https://www.bbc.co.uk/news/uk-politics-50294569)\n\nIn 2020, news emerged that Boris Johnson was planning to replace the head of the Intelligence and Security committee with Chris Grayling, a former transport secretary whose track record includes a [widely discussed awarding of a £13.8m contract to provide freight services in case of a No-Deal Brexit to a company which had never run a Ferry service and owned no ships.](https://www.bbc.co.uk/news/business-46704522) This decision was [loudly](https://www.independent.co.uk/news/uk/politics/boris-johnson-chris-grayling-intelligence-security-committee-gchq-a9393451.html), and [repeatedly](https://www.theguardian.com/politics/2020/mar/11/chris-grayling-to-head-body-that-will-have-final-say-over-russia-report) criticised, [including by Johnson's own Conservative Party](https://www.thetimes.co.uk/article/tories-angry-at-no10-plan-to-give-chris-grayling-intelligence-role-qxzcx95hn).\n\nOn 2020/7/15, however, Chris Grayling failed to win the vote to gain leadership of the committee, despite the Conservatives having control of it. The winner, a Conservative MP named Julian Lewis, was [promptly kicked out of the Conservative Party.](https://www.theguardian.com/politics/2020/jul/15/chris-grayling-fails-to-become-intelligence-and-security-chair)\n\nAs of 2020/07/16, no report has been released.\n\n\n\n**This question asks:** \n\n>Will the UK's Intelligence and Security committee publish a report into Russian interference in UK democracy, before the end of July?\n\nThis question will resolve on the basis of credible media reports. In the case of minor redactions, the report will still count as having been released. \n\nIf the report is released, but with redactions so significant that multiple mainstream UK newspapers mention a cover-up, this question should resolve ambiguously. For the purpose of this question, mainstream UK newspapers are any newspapers which appear on the BBC's [The Papers](https://www.bbc.co.uk/news/blogs/the_papers) page.", "fine_print": "", "post_id": 4835, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1595333718.253518, "end_time": null, "forecaster_count": 22, "interval_lower_bounds": [ 0.94 ], "centers": [ 0.97 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1595333718.253518, "end_time": null, "forecaster_count": 22, "interval_lower_bounds": [ 0.94 ], "centers": [ 0.97 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.030000000000000027, 0.97 ], "means": [ 0.9493195514878648 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6152047490237542, 0.0, 0.0, 0.0, 0.0, 0.05190371288004473, 0.0, 0.0, 0.8658840500227839, 0.0, 0.08591921520153545, 0.0, 0.12942361094527205, 0.03777143320256801, 0.25314547146501193, 1.5368195773976971, 0.0, 0.441563953841307, 0.0, 3.8835227247221926 ] ] }, "score_data": { "peer_score": 1.558365064143857, "coverage": 0.2472741359430709, "baseline_score": 19.752895852803128, "spot_peer_score": 11.347963010305048, "peer_archived_score": 1.558365064143857, "baseline_archived_score": 19.752895852803128, "spot_peer_archived_score": 11.347963010305048 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1595318584.198158, "end_time": null, "forecaster_count": 19, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1595318584.198158, "end_time": null, "forecaster_count": 19, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.01923837789200178, 0.9807616221079982 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 4, "user_vote": null }, "forecasts_count": 30, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4826, "title": "Will any Wirecard executive receive a custodial sentence for their involvement in the Wirecard collapse?", "short_title": "Wirecard Fraud Convictions", "url_title": "Wirecard Fraud Convictions", "slug": "wirecard-fraud-convictions", "author_id": 112076, "author_username": "SimonM", "coauthors": [], "created_at": "2020-07-15T15:04:06.825731Z", "published_at": "2020-11-23T23:00:00Z", "edited_at": "2025-09-05T17:29:08.849919Z", "curation_status": "approved", "curation_status_updated_at": "2020-11-23T23:00:00Z", "comment_count": 16, "status": "resolved", "resolved": true, "actual_close_time": "2022-01-01T00:00:00Z", "scheduled_close_time": "2022-01-01T00:00:00Z", "scheduled_resolve_time": "2023-12-31T23:00:00Z", "actual_resolve_time": "2023-12-31T23:00:00Z", "open_time": "2020-11-23T23:00:00Z", "nr_forecasters": 30, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3688, "name": "Law", "slug": "law", "emoji": "⚖️", "description": "Law", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4826, "title": "Will any Wirecard executive receive a custodial sentence for their involvement in the Wirecard collapse?", "created_at": "2020-07-15T15:04:06.825731Z", "open_time": "2020-11-23T23:00:00Z", "cp_reveal_time": "2020-11-25T23:00:00Z", "spot_scoring_time": "2020-11-25T23:00:00Z", "scheduled_resolve_time": "2023-12-31T23:00:00Z", "actual_resolve_time": "2023-12-31T23:00:00Z", "resolution_set_time": "2023-12-31T23:00:00Z", "scheduled_close_time": "2022-01-01T00:00:00Z", "actual_close_time": "2022-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Wirecard uncovered a €1.9bn hole in its balance sheet in June 2020. \n\nThe company had faced accusations for some time that it has been engaged in accounting fraud.\n\nMore details can be found here:\n\n- https://www.ft.com/content/284fb1ad-ddc0-45df-a075-0709b36868db\n- https://en.wikipedia.org/wiki/Wirecard_scandal\n\nMarkus Braun, Oliver Bellenhaus have been arrested by German prosecutors. Alexander von Knoop and Susanne Steidl are under investigation. Jan Marsalek is a fugitive with an active arrest warrant.\n\nThis question resolves true if any senior Wirecard executives are given a custodial sentence for crimes relating to fraud at Wirecard. This list includes:\n\n- Markus Braun\n- Jan Marsalek\n- Alexander von Knoop\n- Susanne Steidl \n- Jan Marsalek\n- Oliver Bellenhaus\n\n\nThe imprisonment must be as a result of a conviction, pre-trial detainment will count if the court uses it as part of the sentence (eg time-served). Any country's court system is valid, although the conviction needs to related to Wirecard.\n\nConvictions in absentia will not result in the question resolving positive unless they are eventually caught and serve their punishment", "fine_print": "", "post_id": 4826, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1640990455.403256, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": [ 0.55 ], "centers": [ 0.61 ], "interval_upper_bounds": [ 0.62 ] } ], "latest": { "start_time": 1640990455.403256, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": [ 0.55 ], "centers": [ 0.61 ], "interval_upper_bounds": [ 0.62 ], "forecast_values": [ 0.39, 0.61 ], "means": [ 0.5955423493631269 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2581744010322857, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.11525585627893538, 0.0, 0.0, 0.0, 0.41443698810707236, 1.9788421984930782, 0.0, 0.0839758874887769, 0.0, 0.9164845817547782, 0.6959467315747595, 1.8861311565282528, 0.8305036517217669, 0.0, 1.0757974995594175, 0.9057410009691461, 0.0, 0.0, 0.0, 0.03911858530021452, 0.0, 0.0, 0.030893002561247163, 0.0, 0.0, 0.07073615502467667, 0.153865828537011, 0.0, 0.0, 0.0, 0.011364900518339544, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 10.821419822000518, "coverage": 0.9999276418996722, "baseline_score": -39.31063440173862, "spot_peer_score": 1.3207560533325005, "peer_archived_score": 10.821419822000518, "baseline_archived_score": -39.31063440173862, "spot_peer_archived_score": 1.3207560533325005 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1640990455.421581, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1640990455.421581, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.4006629651489426, 0.5993370348510574 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 2, "user_vote": null }, "forecasts_count": 75, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4821, "title": "Will EA Global London 2021 be cancelled, rescheduled, or moved online?", "short_title": "", "url_title": "", "slug": "will-ea-global-london-2021-be-cancelled-rescheduled-or-moved-online", "author_id": 105906, "author_username": "mattvdm", "coauthors": [], "created_at": "2020-07-15T11:05:13.689753Z", "published_at": "2020-07-18T22:00:00Z", "edited_at": "2025-09-05T17:28:59.625216Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-18T22:00:00Z", "comment_count": 5, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-14T10:00:00Z", "scheduled_close_time": "2021-04-29T23:00:00Z", "scheduled_resolve_time": "2021-04-29T23:00:00Z", "actual_resolve_time": "2021-01-14T10:00:00Z", "open_time": "2020-07-18T22:00:00Z", "nr_forecasters": 49, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15865, "name": "Health & Pandemics", "slug": "biosecurity", "emoji": "🧬", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3691, "name": "Health & Pandemics", "slug": "health-pandemics", "emoji": "🦠", "description": "Health & Pandemics", "type": "category" } ] }, "question": { "id": 4821, "title": "Will EA Global London 2021 be cancelled, rescheduled, or moved online?", "created_at": "2020-07-15T11:05:13.689753Z", "open_time": "2020-07-18T22:00:00Z", "cp_reveal_time": "2020-07-20T22:00:00Z", "spot_scoring_time": "2020-07-20T22:00:00Z", "scheduled_resolve_time": "2021-04-29T23:00:00Z", "actual_resolve_time": "2021-01-14T10:00:00Z", "resolution_set_time": "2021-01-14T10:00:00Z", "scheduled_close_time": "2021-04-29T23:00:00Z", "actual_close_time": "2021-01-14T10:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Effective Altruism Global](https://en.wikipedia.org/wiki/Effective_Altruism_Global), abbreviated EA Global, is a series of philanthropy conferences that focuses on the effective altruism movement. [EA Global London 2021](https://www.eaglobal.org/events/london2020/) is scheduled to be held in London, UK from to 30 April 2021 to 2 May 2021. In 2020, EA Global conferences in London and San Francisco were both cancelled due to COVID-19.\n\n\n**Will EA Global: London 2021 be cancelled, rescheduled, or moved online?**\n\n\nThe question resolves negatively if the EA Global London 2021 takes place in the originally scheduled physical location at the scheduled dates. It resolves positively if the event is cancelled or the dates are moved, or if the venue is moved online or away from London. **Note that resolution does not require COVID-19 — or anything else — to be cited as a reason for any changes to the event.**\n\nThere were 2 previous questions on 2020 EA Global conferences (which — unlike this question — asked about cancellation/rescheduling due to COVID-19 specifically):\n\n[EA Global San Francisco 2020](https://www.metaculus.com/questions/3756/will-ea-global-san-francisco-be-cancelled-or-rescheduled-due-to-covid-19/)\n\n\n[EA Global London 2020](https://www.metaculus.com/questions/4318/will-ea-global-london-2020-be-cancelled-or-rescheduled-due-to-covid-19/)", "fine_print": "", "post_id": 4821, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1610638976.418788, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": [ 0.65 ], "centers": [ 0.75 ], "interval_upper_bounds": [ 0.9 ] } ], "latest": { "start_time": 1610638976.418788, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": [ 0.65 ], "centers": [ 0.75 ], "interval_upper_bounds": [ 0.9 ], "forecast_values": [ 0.25, 0.75 ], "means": [ 0.776320278859809 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13613871759178028, 0.0, 0.0, 0.0, 0.0, 0.01542797475028349, 0.0, 0.0, 0.0, 0.0, 0.010561813766971701, 0.0, 0.04978706836786394, 0.008531995357508187, 0.0, 0.0, 0.0, 0.021542612258663517, 0.0, 0.0, 0.00515416995245163, 0.0, 0.0, 0.0, 0.0, 0.04519002219406405, 0.28495119657571016, 0.0, 0.0, 0.0, 0.025137985516365664, 0.1494220410952148, 0.6929825785475864, 0.0, 0.0, 0.39841571840887346, 0.18113776984784913, 0.012852091876617526, 0.0, 0.0, 1.1883868337609695, 0.0, 0.0, 0.0, 0.5505289351792777, 1.0110179637400996, 0.0, 0.063459122985457, 0.0, 0.9085515435096261, 0.761545766310015, 0.5987118703611444, 0.0, 0.0, 0.07128274001026091, 0.4778053807256561, 0.0, 0.0, 0.0, 0.0, 0.19892345174351658, 0.0, 0.0, 0.0, 0.0, 2.192861477569686, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.4467796817292475 ] ] }, "score_data": { "peer_score": 5.420155034382772, "coverage": 0.6294940161256497, "baseline_score": 12.94506832897083, "spot_peer_score": 12.342693150316414, "peer_archived_score": 5.420155034382772, "baseline_archived_score": 12.94506832897083, "spot_peer_archived_score": 12.342693150316414 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1610525178.505511, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1610525178.505511, "end_time": null, "forecaster_count": 49, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.13142473482553274, 0.8685752651744673 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 6, "user_vote": null }, "forecasts_count": 119, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4816, "title": "Will Derek Chauvin be acquitted of all murder charges?", "short_title": "", "url_title": "", "slug": "will-derek-chauvin-be-acquitted-of-all-murder-charges", "author_id": 108770, "author_username": "Matthew_Barnett", "coauthors": [], "created_at": "2020-07-14T16:06:39.734941Z", "published_at": "2020-07-16T22:00:00Z", "edited_at": "2025-09-05T17:28:57.811962Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-16T22:00:00Z", "comment_count": 103, "status": "resolved", "resolved": true, "actual_close_time": "2021-03-01T08:00:00Z", "scheduled_close_time": "2021-03-01T08:00:00Z", "scheduled_resolve_time": "2021-04-20T21:39:00Z", "actual_resolve_time": "2021-04-20T21:39:00Z", "open_time": "2020-07-16T22:00:00Z", "nr_forecasters": 198, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3688, "name": "Law", "slug": "law", "emoji": "⚖️", "description": "Law", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4816, "title": "Will Derek Chauvin be acquitted of all murder charges?", "created_at": "2020-07-14T16:06:39.734941Z", "open_time": "2020-07-16T22:00:00Z", "cp_reveal_time": "2020-07-17T13:43:47.052253Z", "spot_scoring_time": "2020-07-17T13:43:47.052253Z", "scheduled_resolve_time": "2021-04-20T21:39:00Z", "actual_resolve_time": "2021-04-20T21:39:00Z", "resolution_set_time": "2021-04-20T21:39:00Z", "scheduled_close_time": "2021-03-01T08:00:00Z", "actual_close_time": "2021-03-01T08:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Derek Chauvin [is](https://en.wikipedia.org/wiki/Derek_Chauvin),\n\n> an American former police officer charged with the killing of George Floyd in Minneapolis, Minnesota, on May 25, 2020. During an arrest made by Chauvin and three other officers, he knelt on George Floyd's neck for almost eight minutes while Floyd was handcuffed and lying face down on a street. The death set off a series of protests around the world.\n\n> Chauvin was fired by the Minneapolis Police Department the day after the incident. He was initially charged with third-degree murder and second-degree manslaughter; a charge of second-degree murder was later added.\n\nSome have suggested that he will be acquitted of his murder charges. From [a Medium post](https://medium.com/@gavrilodavid/why-derek-chauvin-may-get-off-his-murder-charge-2e2ad8d0911),\n\n> There are six crucial pieces of information — six facts — that have been largely omitted from discussion on the Chauvin’s conduct. Taken together, they likely exonerate the officer of a murder charge. [...]\n\n> 1. George Floyd was experiencing cardiopulmonary and psychological distress minutes before he was placed on the ground, let alone had a knee to his neck.\n\n> 2. The Minneapolis Police Department (MPD) allows the use of neck restraint on suspects who actively resist arrest, and George Floyd actively resisted arrest on two occasions, including immediately prior to neck restraint being used.\n\n> 3. The officers were recorded on their body cams assessing George Floyd as suffering from “excited delirium syndrome” (ExDS), a condition which the MPD considers an extreme threat to both the officers and the suspect. A white paper used by the MPD acknowledges that ExDS suspects may die irrespective of force involved. The officers’ response to this situation was in line with MPD guidelines for ExDS.\n\n> 4. Restraining the suspect on his or her abdomen (prone restraint) is a common tactic in ExDS situations, and the white paper used by the MPD instructs the officers to control the suspect until paramedics arrive.\n\n> 5. Floyd’s autopsy revealed a potentially lethal concoction of drugs — not just a potentially lethal dose of fentanyl, but also methamphetamine. Together with his history of drug abuse and two serious heart conditions, Floyd’s condition was exceptionally and unusually fragile.\n\n> 6. Chauvin’s neck restraint is unlikely to have exerted a dangerous amount of force to Floyd’s neck. Floyd is shown on video able to lift his head and neck, and a robust study on double-knee restraints showed a median force exertion of approximately approximately 105lbs.\n\nThis question resolves positively if Derek Chauvin is acquitted of ALL murder\\(^†\\) charges OR all murder charges against him are dropped. Otherwise, it resolves negatively. If he dies before resolution, the question resolves ambiguously.\n\n<small>\\(^†\\) Only convictions for offences actually called \"murder\" trigger negative resolution ; conviction for other offences such as manslaughter does not.</small>", "fine_print": "", "post_id": 4816, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1614577793.029515, "end_time": null, "forecaster_count": 198, "interval_lower_bounds": [ 0.65 ], "centers": [ 0.73 ], "interval_upper_bounds": [ 0.79 ] } ], "latest": { "start_time": 1614577793.029515, "end_time": null, "forecaster_count": 198, "interval_lower_bounds": [ 0.65 ], "centers": [ 0.73 ], "interval_upper_bounds": [ 0.79 ], "forecast_values": [ 0.27, 0.73 ], "means": [ 0.7233183282457685 ], "histogram": [ [ 0.0, 0.00261312390515081, 0.0, 0.0, 0.0, 0.0, 0.0, 8.96881016542232e-06, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08027187678707301, 0.0, 0.0, 0.0, 0.0, 5.7216846341725895e-06, 0.0, 0.0, 2.134650591382874e-05, 0.0, 7.570460368456666e-05, 0.0, 0.0, 0.0, 0.0, 0.03933154359090087, 0.0, 0.0, 0.0, 6.778895211371546e-05, 0.06352642534530095, 0.008704908418489105, 0.0, 0.1369415199621206, 0.00010388066247248304, 0.32274024145033664, 0.0, 0.0, 0.0, 0.0, 0.6820908892102943, 0.010760693600631034, 0.0, 0.0, 0.416314610745873, 1.10692423371817, 0.5228615432575995, 1.6386529167421844, 0.8668762878537511, 0.0657220876600487, 1.1976857579576803, 0.5848556517203196, 0.9142313715324965, 0.13440198298786057, 0.23375016343075003, 1.4716779172852572, 0.3093951934011834, 0.744544262717526, 2.4303065210017567, 0.6564520670949059, 5.003506984615163, 0.17813262958260112, 0.026673065401348714, 0.04488733144650936, 0.8987408255327798, 1.1425873478798159, 0.35599362020434766, 1.2026606219504463, 0.4472005541225355, 0.0, 0.13652445821793757, 0.003262052306723476, 0.0, 0.5277811616103768, 0.0005454994531489946, 0.6629380306361775, 0.0, 0.0, 1.0692873431164618, 0.0, 0.0002638063136157436, 0.0, 0.009182867495108398, 0.0, 0.2933711926021715 ] ] }, "score_data": { "peer_score": 9.292334620697412, "coverage": 0.9999136601046946, "baseline_score": -79.78330522435681, "spot_peer_score": 32.17460118548615, "peer_archived_score": 9.292334620697412, "baseline_archived_score": -79.78330522435681, "spot_peer_archived_score": 32.17460118548615 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1614577793.083067, "end_time": null, "forecaster_count": 198, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1614577793.083067, "end_time": null, "forecaster_count": 198, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.28241747863212163, 0.7175825213678784 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 49, "user_vote": null }, "forecasts_count": 613, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4813, "title": "[Short fuse]: Will Tesla's stock price close below $1,000 per share before 2021?", "short_title": "", "url_title": "", "slug": "short-fuse-will-teslas-stock-price-close-below-1000-per-share-before-2021", "author_id": 101465, "author_username": "Jgalt", "coauthors": [], "created_at": "2020-07-14T00:32:38.047853Z", "published_at": "2020-07-16T22:00:00Z", "edited_at": "2025-09-05T17:29:05.653732Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-16T22:00:00Z", "comment_count": 23, "status": "resolved", "resolved": true, "actual_close_time": "2020-08-03T23:00:00Z", "scheduled_close_time": "2020-08-03T23:00:00Z", "scheduled_resolve_time": "2020-12-31T20:45:00Z", "actual_resolve_time": "2020-12-31T20:45:00Z", "open_time": "2020-07-16T22:00:00Z", "nr_forecasters": 40, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" } ] }, "question": { "id": 4813, "title": "[Short fuse]: Will Tesla's stock price close below $1,000 per share before 2021?", "created_at": "2020-07-14T00:32:38.047853Z", "open_time": "2020-07-16T22:00:00Z", "cp_reveal_time": "2020-07-18T10:15:31.514570Z", "spot_scoring_time": "2020-07-18T10:15:31.514570Z", "scheduled_resolve_time": "2020-12-31T20:45:00Z", "actual_resolve_time": "2020-12-31T20:45:00Z", "resolution_set_time": "2020-12-31T20:45:00Z", "scheduled_close_time": "2020-08-03T23:00:00Z", "actual_close_time": "2020-08-03T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "In early 2020, US automaker [Tesla, Inc.](https://en.wikipedia.org/wiki/Tesla,_Inc.) became America's most valuable automobile manufacturer, and [saw its market capitalization eclipse that of GM and Ford combined,](https://www.reuters.com/article/us-usa-stocks-tesla/teslas-market-value-zooms-past-that-of-gm-and-ford-combined-idUSKBN1Z72MU) climbing to over $89 billion by January 9 2020.\n\nTesla's stock price continued to rise sharply in the first half of 2020, despite CEO Musk tweeting on May 1 [\"Tesla stock price is too high imo.\"](https://twitter.com/elonmusk/status/1256239815256797184)\n\nAs of July 12 2020, Tesla's stock was valued at $1,545 per share, and its market capitalization was $286.33 billion - [making Tesla more valuable than Ford, GM, BMW, Daimler and Volkswagen combined](https://twitter.com/zerohedge/status/1281679937410404352), and [making Elon Musk richer than Warren Buffett.](https://www.bloomberg.com/news/articles/2020-07-10/elon-musk-rockets-past-warren-buffett-on-billionaires-ranking?sref=DOTC0U32&utm_source=twitter&utm_content=business&utm_medium=social&utm_campaign=socialflow-organic&cmpid=socialflow-twitter-business)\n\n\nThis extraordinary appreciation in Tesla's stock price [has led some to claim that Tesla is currently the subject of a speculative bubble.](https://www.bloomberg.com/opinion/articles/2020-07-13/elon-musk-s-tesla-won-t-ride-the-big-tech-bubble-forever)\n\nThis question asks: **Will Tesla's stock price close *below* $1,000 per share before 1 January 2021?**\n\nThis question resolves positively if Tesla's stock closes below $1,000 per share at any point after this question opens, before January 1 2021. The question resolves negatively if this does not happen. An intraday dip below $1,000 does not suffice for positive resolution; the closing price must be below $1,000. Exactly $1,000 does not suffice.\n\n In the event that Tesla ceases to be a publicly traded company before resolution, the question resolves ambiguously. In the event that Tesla conducts a [stock split](https://en.wikipedia.org/wiki/Stock_split) before resolution, once the stock begins trading on a split-adjusted basis, the reference to \"$1,000 per share\" shall be scaled in accordance with the stock split to maintain the same real value.", "fine_print": "", "post_id": 4813, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1596479916.090984, "end_time": null, "forecaster_count": 40, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.4 ], "interval_upper_bounds": [ 0.52 ] } ], "latest": { "start_time": 1596479916.090984, "end_time": null, "forecaster_count": 40, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.4 ], "interval_upper_bounds": [ 0.52 ], "forecast_values": [ 0.6, 0.4 ], "means": [ 0.38264018060618393 ], "histogram": [ [ 0.0, 1.0389805770562126, 0.0, 0.0, 0.0, 0.9235250802035939, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.3131587912493088, 0.0, 0.0, 0.0, 0.0, 0.14006392172911664, 0.0, 0.04939379203556264, 0.11064264398533052, 0.0, 0.01323943609573642, 0.0, 0.0, 0.24712671560877192, 0.03598851855808508, 0.7984034521712733, 0.0, 0.0, 0.8830458018964656, 0.0, 0.24683144416234964, 0.06594039540813919, 0.29360034512635746, 0.0, 0.3235495858073233, 0.05724278302464458, 0.39086599043581904, 0.0, 1.0, 0.0, 0.345064593035334, 0.0, 0.3559187878370533, 0.0, 0.0, 0.004870516352324537, 0.0, 0.0, 0.0, 0.6646627317794116, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1751732796751412, 0.8520233747574918, 0.0, 0.0, 0.0, 0.3520792243212725, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007369969155267514, 0.46916959902833105, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 26.05928727103941, "coverage": 0.9966643191371967, "baseline_score": 18.764471475672195, "spot_peer_score": 57.74685842812784, "peer_archived_score": 26.05928727103941, "baseline_archived_score": 18.764471475672195, "spot_peer_archived_score": 57.74685842812784 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1596479916.123536, "end_time": null, "forecaster_count": 40, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1596479916.123536, "end_time": null, "forecaster_count": 40, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.5939010240676117, 0.40609897593238825 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 12, "user_vote": null }, "forecasts_count": 73, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4808, "title": "Will the median prediction on this question fall in range [1-48]% OR [95-99]%?", "short_title": "", "url_title": "", "slug": "will-the-median-prediction-on-this-question-fall-in-range-1-48-or-95-99", "author_id": 112655, "author_username": "Alexander230", "coauthors": [], "created_at": "2020-07-13T23:25:27.604116Z", "published_at": "2020-07-16T22:00:00Z", "edited_at": "2025-09-05T17:29:24.489264Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-16T22:00:00Z", "comment_count": 122, "status": "resolved", "resolved": true, "actual_close_time": "2020-08-15T17:00:00Z", "scheduled_close_time": "2020-08-15T17:00:00Z", "scheduled_resolve_time": "2020-08-16T04:28:00Z", "actual_resolve_time": "2020-08-16T04:28:00Z", "open_time": "2020-07-16T22:00:00Z", "nr_forecasters": 174, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3686, "name": "Metaculus", "slug": "metaculus", "emoji": "🔮", "description": "Metaculus", "type": "category" } ] }, "question": { "id": 4808, "title": "Will the median prediction on this question fall in range [1-48]% OR [95-99]%?", "created_at": "2020-07-13T23:25:27.604116Z", "open_time": "2020-07-16T22:00:00Z", "cp_reveal_time": "2020-07-17T04:09:24.474542Z", "spot_scoring_time": "2020-07-17T04:09:24.474542Z", "scheduled_resolve_time": "2020-08-16T04:28:00Z", "actual_resolve_time": "2020-08-16T04:28:00Z", "resolution_set_time": "2020-08-16T04:28:00Z", "scheduled_close_time": "2020-08-15T17:00:00Z", "actual_close_time": "2020-08-15T17:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "This is like Keynesian beauty contest, but with a twist. It will be resolved positively if median prediction on this question after closing will be from 1% to 48% inclusively OR from 95% to 99% inclusively. Otherwise it will be resolved negatively.", "fine_print": "", "post_id": 4808, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1597510799.227006, "end_time": null, "forecaster_count": 173, "interval_lower_bounds": [ 0.95 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ] } ], "latest": { "start_time": 1597510799.227006, "end_time": null, "forecaster_count": 173, "interval_lower_bounds": [ 0.95 ], "centers": [ 0.99 ], "interval_upper_bounds": [ 0.99 ], "forecast_values": [ 0.010000000000000009, 0.99 ], "means": [ 0.8794430242297239 ], "histogram": [ [ 0.0, 1.4415023741761774, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.02066470838890691, 0.43934105960873243, 0.0, 0.0, 0.0, 0.0, 0.0, 7.978717484931284e-06, 0.0, 0.0, 0.0, 0.06636138591682929, 0.0, 0.10600867767840631, 0.0, 0.0, 0.0, 0.0, 0.026955863821373872, 0.0, 0.0, 3.1763822528067176, 0.0, 0.0, 0.0, 0.0449110767318403, 0.040637171912534124, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13361108920985895, 0.0, 0.0, 0.0, 0.57802462221273, 0.43015704998326665, 0.8761317208706868, 0.005430671408140072, 0.4143188280950397, 17.008616559671218 ] ] }, "score_data": { "peer_score": 14.207231076136257, "coverage": 0.9999837056177021, "baseline_score": 98.285314102967, "spot_peer_score": 32.992989334069854, "peer_archived_score": 14.207231076136257, "baseline_archived_score": 98.285314102967, "spot_peer_archived_score": 32.992989334069854 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1597510799.806724, "end_time": null, "forecaster_count": 173, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1597510799.806724, "end_time": null, "forecaster_count": 173, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.03219666056593129, 0.9678033394340687 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": -18, "user_vote": null }, "forecasts_count": 973, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4807, "title": "Will Harvard and MIT win their July 2020 lawsuit against the U.S. Immigration and Customs Enforcement?", "short_title": "", "url_title": "", "slug": "will-harvard-and-mit-win-their-july-2020-lawsuit-against-the-us-immigration-and-customs-enforcement", "author_id": 108770, "author_username": "Matthew_Barnett", "coauthors": [], "created_at": "2020-07-13T22:49:14.422005Z", "published_at": "2020-07-13T22:00:00Z", "edited_at": "2025-09-05T17:29:06.009390Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-13T22:00:00Z", "comment_count": 8, "status": "resolved", "resolved": true, "actual_close_time": "2020-07-15T08:31:00Z", "scheduled_close_time": "2020-07-16T07:00:00Z", "scheduled_resolve_time": "2020-07-16T07:00:00Z", "actual_resolve_time": "2020-07-15T08:31:00Z", "open_time": "2020-07-13T22:00:00Z", "nr_forecasters": 30, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "category": [ { "id": 3688, "name": "Law", "slug": "law", "emoji": "⚖️", "description": "Law", "type": "category" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4807, "title": "Will Harvard and MIT win their July 2020 lawsuit against the U.S. Immigration and Customs Enforcement?", "created_at": "2020-07-13T22:49:14.422005Z", "open_time": "2020-07-13T22:00:00Z", "cp_reveal_time": "2020-07-14T06:42:44.461417Z", "spot_scoring_time": "2020-07-14T06:42:44.461417Z", "scheduled_resolve_time": "2020-07-16T07:00:00Z", "actual_resolve_time": "2020-07-15T08:31:00Z", "resolution_set_time": "2020-07-15T08:31:00Z", "scheduled_close_time": "2020-07-16T07:00:00Z", "actual_close_time": "2020-07-15T08:31:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "ambiguous", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "From [a letter](http://president.mit.edu/speeches-writing/mit-action-response-new-ice-rule-online-learning) to MIT students sent MIT President L. Rafael Reif,\n\n> On Monday [July 6th 2020], in a surprising development, a division of Immigration and Customs Enforcement announced that it will not permit international students on F-1 visas to take a full online course load this fall while studying in the United States. As I wrote yesterday, this ruling has potentially serious implications for MIT’s international students and those enrolled at institutions across the country.\n\n> This morning, in response, MIT and Harvard jointly filed suit against ICE and the US Department of Homeland Security in federal court in Massachusetts. In the lawsuit, we ask the court to prevent ICE and DHS from enforcing the new guidance and to declare it unlawful.\n\nFrom [the Harvard Crimson](https://www.thecrimson.com/article/2020/7/9/harvard-mit-ice-lawsuit-international-students-analysis/), \n\n> The case has been assigned to federal judge Allison D. Burroughs, who previously ruled on cases pertaining both to Harvard and to the Trump administration’s immigration policies.\n\n> In October 2019, Burroughs ruled in favor of Harvard in its ongoing affirmative action lawsuit against Students for Fair Admissions. Two years earlier, she ruled against President Donald Trump’s initial travel ban that barred citizens of predominantly Muslim countries from entering the United States.\n\nThis question resolves positively if Allison D. Burroughs rules in favor of Harvard and MIT, and negatively if it rules against them.", "fine_print": "", "post_id": 4807, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1594794895.964543, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": [ 0.7 ], "centers": [ 0.76 ], "interval_upper_bounds": [ 0.85 ] } ], "latest": { "start_time": 1594794895.964543, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": [ 0.7 ], "centers": [ 0.76 ], "interval_upper_bounds": [ 0.85 ], "forecast_values": [ 0.24, 0.76 ], "means": [ 0.7871145540540029 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05892591724387544, 0.0, 0.023631498669224205, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.017197143016031723, 0.011364900518339544, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6850893156161345, 0.153865828537011, 0.0, 0.215418585529509, 0.3660118313347263, 0.04842515677234604, 0.0, 0.20430690757075753, 0.0, 0.0, 0.985097851735993, 0.2581744010322857, 0.0, 0.0, 0.09877132838432481, 1.5374567267463464, 0.9120497244989734, 0.0, 0.0, 0.0, 1.2282701289894635, 0.0, 0.0, 0.0, 0.0, 0.3268262380230357, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.3363849412313806 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1594800209.241751, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1594800209.241751, "end_time": null, "forecaster_count": 30, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.2475896745497238, 0.7524103254502762 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 6, "user_vote": null }, "forecasts_count": 40, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4801, "title": "Will the “unknown pneumonia” outbreak in Kazakhstan turn out to be caused by a pathogen other than Covid-19?", "short_title": "", "url_title": "", "slug": "will-the-unknown-pneumonia-outbreak-in-kazakhstan-turn-out-to-be-caused-by-a-pathogen-other-than-covid-19", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-07-13T11:22:17.911266Z", "published_at": "2020-07-16T04:00:00Z", "edited_at": "2025-09-05T17:29:00.707909Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-16T04:00:00Z", "comment_count": 10, "status": "resolved", "resolved": true, "actual_close_time": "2020-08-12T23:00:00Z", "scheduled_close_time": "2020-08-12T23:00:00Z", "scheduled_resolve_time": "2020-09-13T19:37:00Z", "actual_resolve_time": "2020-09-13T19:37:00Z", "open_time": "2020-07-16T04:00:00Z", "nr_forecasters": 58, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4801, "title": "Will the “unknown pneumonia” outbreak in Kazakhstan turn out to be caused by a pathogen other than Covid-19?", "created_at": "2020-07-13T11:22:17.911266Z", "open_time": "2020-07-16T04:00:00Z", "cp_reveal_time": "2020-07-17T14:22:44.313924Z", "spot_scoring_time": "2020-07-17T14:22:44.313924Z", "scheduled_resolve_time": "2020-09-13T19:37:00Z", "actual_resolve_time": "2020-09-13T19:37:00Z", "resolution_set_time": "2020-09-13T19:37:00Z", "scheduled_close_time": "2020-08-12T23:00:00Z", "actual_close_time": "2020-08-12T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "On 9 July 2020, the South China Morning Post reported that an [‘Unknown pneumonia’ deadlier than coronavirus](https://www.scmp.com/news/china/diplomacy/article/3092563/chinese-embassy-warns-deadly-unknown-pneumonia-kazakhstan) was sweeping through Kazakhstan. [Kazakhstan itself has denied this report](https://www.bbc.co.uk/news/world-asia-53363024), and Michael Ryan of the WHO has [suggested that the pneumonia cases could well just been cause by Covid-19](https://www.youtube.com/watch?v=8d-4Agf3nRE).\n\nThis question asks: \n\nWill a pathogen which is *not* SARS-CoV-2 be identified as having caused hundreds of excess cases of pneumonia in Kazakhstan in the months of June & July 2020?\n\nThis question will resolve positive if, by the date of resolution:\n\n- A pathogen other than SARS-CoV-2 is identified by credible media reports as having caused more than 200 cases of pneumonia in Kazakhstan. Wording referring to “the majority of the excess cases”, or “the excess cases” is sufficient.\n\n\n- This pathogen has not historically been a cause of pneumonia during these months in Kazakhstan.\n\nOtherwise, the question resolves negative.", "fine_print": "", "post_id": 4801, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1597269140.904458, "end_time": null, "forecaster_count": 58, "interval_lower_bounds": [ 0.03 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.16 ] } ], "latest": { "start_time": 1597269140.904458, "end_time": null, "forecaster_count": 58, "interval_lower_bounds": [ 0.03 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.16 ], "forecast_values": [ 0.9, 0.1 ], "means": [ 0.11113023125686784 ], "histogram": [ [ 0.0, 1.7785860651722665, 0.0027844015881518533, 1.9169339045495084, 0.3743651858727827, 1.1235336755592624, 0.40350388084437705, 0.749796426679495, 0.02077269865532973, 0.02689612362645627, 2.485164305232351, 0.12384689366695562, 0.08792760161272581, 0.0, 0.5126909744754765, 0.6305748012666774, 0.8250498608214839, 0.12899150325450517, 0.2986236935877768, 0.0, 0.10555713234193886, 0.1827395575225561, 0.0935645341707835, 0.0, 0.0, 0.4676110092672276, 0.0, 0.0, 0.0, 0.0, 0.667196579275483, 0.0, 0.0, 0.11782585722242149, 0.0, 0.5916505078118112, 0.0, 0.015738088418585917, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.005705735607831822, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 4.491882160925059, "coverage": 0.9965137978693693, "baseline_score": 73.5276253837921, "spot_peer_score": 10.675377695908495, "peer_archived_score": 4.491882160925059, "baseline_archived_score": 73.5276253837921, "spot_peer_archived_score": 10.675377695908495 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1597269141.01941, "end_time": null, "forecaster_count": 58, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1597269141.01941, "end_time": null, "forecaster_count": 58, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9655071332840988, 0.03449286671590127 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 4, "user_vote": null }, "forecasts_count": 113, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4788, "title": "Will artificial superintelligence precede the achievement of longevity escape velocity, if either occur by year 2300?", "short_title": "AGI Preceeds LEV", "url_title": "AGI Preceeds LEV", "slug": "agi-preceeds-lev", "author_id": 112543, "author_username": "Natalia_Mendonca", "coauthors": [], "created_at": "2020-07-10T22:37:12.041550Z", "published_at": "2020-07-16T22:00:00Z", "edited_at": "2025-10-20T06:23:18.045597Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-16T22:00:00Z", "comment_count": 9, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2200-01-01T23:34:00Z", "scheduled_resolve_time": "2300-01-01T23:36:00Z", "actual_resolve_time": null, "open_time": "2020-07-16T22:00:00Z", "nr_forecasters": 142, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3700, "name": "Social Sciences", "slug": "social-sciences", "emoji": "🧑🤝🧑", "description": "Social Sciences", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ], "question_series": [ { "id": 2342, "type": "question_series", "name": "AGI Outcomes", "slug": "agi-horizons", "header_image": "https://cdn.metaculus.com/agi.png", "prize_pool": null, "start_date": "2023-08-09T13:41:42.701000Z", "close_date": "2023-08-09T13:41:42.701000Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T14:25:42.198790Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2342, "type": "question_series", "name": "AGI Outcomes", "slug": "agi-horizons", "header_image": "https://cdn.metaculus.com/agi.png", "prize_pool": null, "start_date": "2023-08-09T13:41:42.701000Z", "close_date": "2023-08-09T13:41:42.701000Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T14:25:42.198790Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4788, "title": "Will artificial superintelligence precede the achievement of longevity escape velocity, if either occur by year 2300?", "created_at": "2020-07-10T22:37:12.041550Z", "open_time": "2020-07-16T22:00:00Z", "cp_reveal_time": "2020-07-18T10:28:08.138615Z", "spot_scoring_time": "2020-07-18T10:28:08.138615Z", "scheduled_resolve_time": "2300-01-01T23:36:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2200-01-01T23:34:00Z", "actual_close_time": "2200-01-01T23:34:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "[Longevity escape velocity](https://en.m.wikipedia.org/wiki/Longevity_escape_velocity) is a hypothetical situation in which life-extending medicine extends life\n\n> longer than the time that is passing. For example, in a given year in which longevity escape velocity would be maintained, technological advances would increase life expectancy more than the year that just went by.", "resolution_criteria": "An anti-aging therapy is said to lead to longevity escape velocity if more than one-half of 70-year-olds who take it within 5 years of its development are still alive after 50 years.\n\nFor the purposes of this question, the date of development of the therapy is the date in which the therapy is first given to human subjects. \n\nThis question resolves positively if, before an anti-aging therapy that leads to longevity escape velocity is first developed, an AI achieves generally superhuman performance across virtually all human activities of interest *or* an AI limited to answering questions achieves reliably superhuman performance across virtually all questions of interest (the criterion for superintelligence is the same as the one used in [this question](https://www.metaculus.com/questions/4123/after-an-agi-is-created-how-many-months-will-it-be-before-the-first-superintelligence/)).\n\nSuccessful creation of either type of artificial superintelligence would presumably be extremely obvious and uncontroversial, with a great amount of media coverage and scientific attention. However, if there is significant disagreement over whether a given apparent achievement resolves the question, it will be determined by Metaculus moderator.\n\nIf no anti-aging therapy that leads to longevity escape velocity is developed before 2300-01-01, this question resolves ambiguously", "fine_print": "", "post_id": 4788, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1760941387.283494, "end_time": 2161273636.682386, "forecaster_count": 141, "interval_lower_bounds": [ 0.83 ], "centers": [ 0.92 ], "interval_upper_bounds": [ 0.98 ] } ], "latest": { "start_time": 1760941387.283494, "end_time": 2161273636.682386, "forecaster_count": 141, "interval_lower_bounds": [ 0.83 ], "centers": [ 0.92 ], "interval_upper_bounds": [ 0.98 ], "forecast_values": [ 0.07999999999999996, 0.92 ], "means": [ 0.836093391841945 ], "histogram": null }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728290079.354279, "end_time": null, "forecaster_count": 135, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728290079.354279, "end_time": null, "forecaster_count": 135, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.2477700524966382, 0.7522299475033618 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 30, "user_vote": null }, "forecasts_count": 361, "key_factors": [], "is_current_content_translated": false, "description": "[Longevity escape velocity](https://en.m.wikipedia.org/wiki/Longevity_escape_velocity) is a hypothetical situation in which life-extending medicine extends life\n\n> longer than the time that is passing. For example, in a given year in which longevity escape velocity would be maintained, technological advances would increase life expectancy more than the year that just went by." }, { "id": 4784, "title": "Will James Lindsay receive a long-term Twitter ban before 2021?", "short_title": "", "url_title": "", "slug": "will-james-lindsay-receive-a-long-term-twitter-ban-before-2021", "author_id": 112639, "author_username": "emilowk", "coauthors": [], "created_at": "2020-07-09T23:41:38.698953Z", "published_at": "2020-07-12T22:00:00Z", "edited_at": "2025-09-05T17:28:48.595257Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-12T22:00:00Z", "comment_count": 4, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-03T10:11:00Z", "scheduled_close_time": "2021-01-31T23:00:00Z", "scheduled_resolve_time": "2021-01-31T23:00:00Z", "actual_resolve_time": "2021-01-03T10:11:00Z", "open_time": "2020-07-12T22:00:00Z", "nr_forecasters": 52, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" }, { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" } ] }, "question": { "id": 4784, "title": "Will James Lindsay receive a long-term Twitter ban before 2021?", "created_at": "2020-07-09T23:41:38.698953Z", "open_time": "2020-07-12T22:00:00Z", "cp_reveal_time": "2020-07-14T22:00:00Z", "spot_scoring_time": "2020-07-14T22:00:00Z", "scheduled_resolve_time": "2021-01-31T23:00:00Z", "actual_resolve_time": "2021-01-03T10:11:00Z", "resolution_set_time": "2021-01-03T10:11:00Z", "scheduled_close_time": "2021-01-31T23:00:00Z", "actual_close_time": "2021-01-03T10:11:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Parallel questions for other dissidents: [Stefan Molyneux](https://www.metaculus.com/questions/4733/will-stefan-molyneux-receive-a-long-term-twitter-ban-before-2021/), [Richard Spencer](https://www.metaculus.com/questions/4775/will-richard-spencer-receive-a-long-term-twitter-ban-before-2021/), [Charles Murray](https://www.metaculus.com/questions/4776/will-charles-murray-receive-a-long-term-twitter-ban-before-2021/).\n\nAmerican owned social media networks are increasingly censoring their networks for political speech. Twitter too increasingly censors (bans/suspends) accounts, [Wikipedia keeps a list of incidents](https://en.wikipedia.org/wiki/Twitter_suspensions). Lindsay has been critical of censorship for years, and recently (9th July) [his account was temporarily unavailable for unexplained reasons](https://twitter.com/peterboghossian/status/1281047567468392448). This might suggest a ban in nearing.\n\n**Will James Lindsay ([@ConceptualJames](https://twitter.com/ConceptualJames)) receive a long-term Twitter ban before 2021?**\n\nResolution is positive if before by January 31st 2021 11:59EST, a credible news article or Twitter reports that Lindsay's account has been suspended by Twitter citing violation of its policies, for at least a 30-day period, continuously.", "fine_print": "", "post_id": 4784, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609615209.806853, "end_time": null, "forecaster_count": 52, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1609615209.806853, "end_time": null, "forecaster_count": 52, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.0395221830934569 ], "histogram": [ [ 0.0, 10.153994322959905, 0.07218472244402722, 0.3252739108725744, 0.2978686829232104, 0.7370099109248733, 0.08934312118698887, 0.03550367010089748, 0.0, 0.0, 0.11977925969461596, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.02358838295555427, 0.0, 0.0, 0.004173285167582051, 0.039664353908633, 0.0, 0.0030369881689754252, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9941535163480715, 0.0, 0.0, 0.01040622348426393, 0.0, 0.008551805847127919, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.006908280092399992, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.002007023398223986, 0.0, 0.0, 0.005455655232684382, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 11.154066838053996, "coverage": 0.8591352215712426, "baseline_score": 58.35748270702273, "spot_peer_score": 5.811847392067391, "peer_archived_score": 11.154066838053996, "baseline_archived_score": 58.35748270702273, "spot_peer_archived_score": 5.811847392067391 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609661670.634718, "end_time": null, "forecaster_count": 52, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609661670.634718, "end_time": null, "forecaster_count": 52, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9934253457952503, 0.0065746542047497095 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": -5, "user_vote": null }, "forecasts_count": 123, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4783, "title": "If the NYT publishes an article mentioning Slate Star Codex or Scott Alexander by July 2021, will it include his full name?", "short_title": "", "url_title": "", "slug": "if-the-nyt-publishes-an-article-mentioning-slate-star-codex-or-scott-alexander-by-july-2021-will-it-include-his-full-name", "author_id": 101911, "author_username": "tetraspace", "coauthors": [], "created_at": "2020-07-09T19:20:14.751368Z", "published_at": "2020-07-10T15:00:00Z", "edited_at": "2025-09-05T17:29:03.133212Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-10T15:00:00Z", "comment_count": 16, "status": "resolved", "resolved": true, "actual_close_time": "2021-02-13T10:00:00Z", "scheduled_close_time": "2021-03-01T00:00:00Z", "scheduled_resolve_time": "2021-03-01T00:00:00Z", "actual_resolve_time": "2021-02-13T10:00:00Z", "open_time": "2020-07-10T15:00:00Z", "nr_forecasters": 141, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4783, "title": "If the NYT publishes an article mentioning Slate Star Codex or Scott Alexander by July 2021, will it include his full name?", "created_at": "2020-07-09T19:20:14.751368Z", "open_time": "2020-07-10T15:00:00Z", "cp_reveal_time": "2020-07-10T19:33:37.646735Z", "spot_scoring_time": "2020-07-10T19:33:37.646735Z", "scheduled_resolve_time": "2021-03-01T00:00:00Z", "actual_resolve_time": "2021-02-13T10:00:00Z", "resolution_set_time": "2021-02-13T10:00:00Z", "scheduled_close_time": "2021-03-01T00:00:00Z", "actual_close_time": "2021-02-13T10:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "On June 22nd, Scott Alexander took down the blog Slate Star Codex due to an in-progress article by a technology reporter from the New York Times, replacing it with [a post](https://slatestarcodex.com/2020/06/22/nyt-is-threatening-my-safety-by-revealing-my-real-name-so-i-am-deleting-the-blog/) explaining his rationale. According to Alexander, the journalist was going to publish Scott's real name in association with his blog in this article, as part of a general NYT policy of including real names in articles.\n\nWhile [no article was published within two weeks](https://www.metaculus.com/questions/4697/short-fuse-given-that-the-nyt-publishes-an-article-on-scott-alexander-will-it-include-his-full-name/) of this, the Metaculus community median currently assigns a high probability that some article mentioning the topic [will be released by July 2021](https://www.metaculus.com/questions/4736/will-the-nyt-end-up-publishing-any-articles-mentioning-ssc-or-sa-in-the-next-year/). \n\n**If the NYT publishes an article mentioning Slate Star Codex or Scott Alexander before July 2021, will it include his surname?**\n\nIf the NYT publishes no such articles by that date, this will resolve ambiguously. If the NYT publishes multiple such article by that date, this will resolve positively if any of them contain his surname.\n\nA similar short-fuse [question](https://www.metaculus.com/questions/4697/short-fuse-given-that-the-nyt-publishes-an-article-on-scott-alexander-will-it-include-his-full-name/) resolved ambiguously on 2020-07-09.", "fine_print": "", "post_id": 4783, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1613219267.471332, "end_time": null, "forecaster_count": 141, "interval_lower_bounds": [ 0.8 ], "centers": [ 0.85 ], "interval_upper_bounds": [ 0.92 ] } ], "latest": { "start_time": 1613219267.471332, "end_time": null, "forecaster_count": 141, "interval_lower_bounds": [ 0.8 ], "centers": [ 0.85 ], "interval_upper_bounds": [ 0.92 ], "forecast_values": [ 0.15000000000000002, 0.85 ], "means": [ 0.8380011413607975 ], "histogram": [ [ 0.0, 0.0023292559373503025, 0.0, 0.0, 0.0, 0.0, 0.001141602189997898, 0.0, 0.0, 0.0, 0.006164989702747777, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.006067764978357709, 0.00019205720325645798, 0.09681531611681833, 0.0018242657088862373, 0.0, 0.00042098325728063277, 0.0, 0.0, 0.0, 0.0, 0.010501537331990145, 0.0, 0.0, 0.0037574127768196367, 0.0, 0.001258053411250551, 0.0, 0.0016663552678108826, 0.0, 0.0, 0.011462663472858925, 0.0, 0.005706564189538635, 0.0, 0.0, 0.0, 0.0, 0.0055795511279853795, 0.01082526138799208, 0.0, 0.004646969415443456, 0.1945083059757382, 0.005294469433704249, 0.009435845074671238, 0.0, 0.0, 0.007640119099098946, 0.02351060501478775, 0.0, 0.00011787156455005717, 0.46391566484224395, 0.008800791146786727, 0.0, 0.0, 0.0, 0.7746934462361741, 0.10648388613373014, 0.8439667538641171, 0.06628134811875253, 0.0, 0.4893762611110902, 0.0, 1.1722608687638232, 0.0918582081551365, 0.0, 0.5178689778477286, 0.013240067215759484, 0.0, 0.05645326862575361, 0.0, 4.623800309925637, 0.08261941584747284, 0.0, 0.5461211630797271, 0.01830951858643546, 1.3688868058057904, 5.14787175564293e-05, 0.0, 0.9219744340842162, 0.0, 3.9801369999235474, 0.0, 0.19644135872768517, 0.11315662339485048, 0.5708472672372145, 1.6370626720314334, 0.0, 0.1253828884413072, 0.012387996325486188, 3.038955449598088 ] ] }, "score_data": { "peer_score": 29.5367224938355, "coverage": 0.9332079277921557, "baseline_score": -2.6721990789682057, "spot_peer_score": 12.044423311413423, "peer_archived_score": 29.5367224938355, "baseline_archived_score": -2.6721990789682057, "spot_peer_archived_score": 12.044423311413423 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1612974641.438123, "end_time": null, "forecaster_count": 141, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1612974641.438123, "end_time": null, "forecaster_count": 141, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.1549037261094467, 0.8450962738905533 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 19, "user_vote": null }, "forecasts_count": 398, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4779, "title": "Will a nuclear weapon be detonated as an act of war by 2050?", "short_title": "At least 1 Nuclear Detonation in War by 2050", "url_title": "At least 1 Nuclear Detonation in War by 2050", "slug": "at-least-1-nuclear-detonation-in-war-by-2050", "author_id": 101911, "author_username": "tetraspace", "coauthors": [], "created_at": "2020-07-08T21:59:57.230493Z", "published_at": "2020-08-01T22:00:00Z", "edited_at": "2025-11-22T08:21:45.203205Z", "curation_status": "approved", "curation_status_updated_at": "2020-08-01T22:00:00Z", "comment_count": 69, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2050-01-01T00:00:00Z", "scheduled_resolve_time": "2050-01-01T00:00:00Z", "actual_resolve_time": null, "open_time": "2020-08-01T22:00:00Z", "nr_forecasters": 504, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "topic": [ { "id": 15854, "name": "Top Questions", "slug": "top-50", "emoji": "✨🔝", "type": "topic" } ], "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 4779, "title": "Will a nuclear weapon be detonated as an act of war by 2050?", "created_at": "2020-07-08T21:59:57.230493Z", "open_time": "2020-08-01T22:00:00Z", "cp_reveal_time": "2020-08-02T15:00:48.393392Z", "spot_scoring_time": "2020-08-02T15:00:48.393392Z", "scheduled_resolve_time": "2050-01-01T00:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2050-01-01T00:00:00Z", "actual_close_time": "2050-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In the last 75 years, two nuclear weapons have been detonated as an act of war; the [bombings of Hiroshima and Nagasaki](https://en.wikipedia.org/wiki/Atomic_bombings_of_Hiroshima_and_Nagasaki) in 1945, at the end of WWII. Despite tensions being high between the US and the USSR during the Cold War, and [several close calls](https://en.wikipedia.org/wiki/List_of_nuclear_close_calls), there have been no additional nuclear weapons detonated as acts of war. \n\nCurrently, estimated nuclear weapon stockpiles are 15,000 warheads worldwide, mostly in the US and Russia, and there are eight or nine states that possess nuclear weapons (China, France, Russia, United Kingdom, United States, India, North Korea, Pakistan, and [Israel](https://en.wikipedia.org/wiki/Nuclear_weapons_and_Israel) which remains officially ambiguous). The use of these nuclear weapons [could be catastrophic](https://www.metaculus.com/questions/1494/ragnar%25C3%25B6k-question-series-if-a-global-catastrophe-occurs-will-it-be-due-to-nuclear-war/).", "resolution_criteria": "This question will resolve as **Yes** if there is any nuclear detonation as an act of war between January 1, 2020 and January 1, 2050. Resolution will be by credible media reports. The detonation must be deliberate; accidental, inadvertent, or testing/peaceful detonations will not qualify (see fine print). Attacks using [strategic](https://en.wikipedia.org/wiki/Strategic_nuclear_weapon) and [tactical](https://en.wikipedia.org/wiki/Tactical_nuclear_weapon) nuclear weapons are both sufficient to qualify.\n\n*[2021-11-23]: Edited to clarify the definition of \"deliberate\" detonations.*", "fine_print": "[Barrett et al. 2013](http://scienceandglobalsecurity.org/archive/sgs21barrett.pdf) defined terms to distinguish between causes of nuclear detonations:\n\n>In an accidental or unauthorized launch or detonation, system safeguards or procedures to maintain control over nuclear weapons fail in such a way that a nuclear weapon or missile launches or explodes without direction from leaders.\n\n>In an inadvertent detonation, the attacking nation mistakenly concludes that it is under nuclear attack and launches one or more nuclear weapons in what it believes is a counterattack.\n\n>In a deliberate detonation, the attacking nation decides to launch one or more nuclear weapons either in response to a genuine nuclear attack or without believing that it is under nuclear attack.", "post_id": 4779, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763799694.816168, "end_time": 1768858064.951519, "forecaster_count": 448, "interval_lower_bounds": [ 0.18 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.4 ] } ], "latest": { "start_time": 1763799694.816168, "end_time": 1768858064.951519, "forecaster_count": 448, "interval_lower_bounds": [ 0.18 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.4 ], "forecast_values": [ 0.75, 0.25 ], "means": [ 0.31426292141246875 ], "histogram": [ [ 0.632300644496164, 0.06471296693207103, 0.026441117246447748, 0.7900014164224857, 0.008788633015196045, 1.0917324470222864, 0.49545322690516125, 0.5473230381792131, 0.24532434593235522, 0.9316737070573563, 2.082213717809828, 0.04955880088622318, 0.9053556603933133, 0.42908948521934676, 0.07465953261162313, 0.9135147685432995, 0.43565402393112496, 0.30831049995265697, 1.1795626772600882, 0.9224631093989322, 3.637382231878648, 0.0039057817062717985, 0.053709369427437016, 2.579226068767369, 0.5806416282804876, 2.562446156717837, 0.3529681704621505, 1.6359457222816618, 0.48101461429047815, 0.0, 0.8777767562843737, 0.0005695229142815632, 0.2045878160245324, 0.8259625314524264, 1.422531384694897, 1.0135733826734965, 0.9769983512343009, 9.128610132803024e-05, 0.05718122040777981, 0.07410043686898468, 2.1338383145661384, 4.7569460996813005e-06, 0.12082022218969042, 0.0, 0.00010025939851334871, 0.6666735809928717, 0.06371517248644905, 1.7981421673524933e-06, 0.48637251513433283, 0.6507894365918507, 1.6585700256654672, 0.8306901389943777, 0.0, 0.0, 0.0, 0.20600906348698278, 0.0, 0.0, 0.0, 0.0, 0.5330428571397539, 0.04335843085834927, 0.0, 0.0, 0.0, 0.0002253575431447273, 0.016452371260942646, 0.06542037177905319, 0.3709197723907247, 0.0017206067648767656, 0.010382714181255085, 0.12407404090423337, 1.6817745660869857e-07, 0.0, 0.0, 0.5806793155024527, 0.0, 0.0017756086268099205, 0.0, 5.746949702012193e-05, 0.009478633565810468, 2.427730244418208e-05, 0.0, 0.00037728709924266847, 0.0, 0.6634341238787056, 0.0, 0.0, 0.0, 0.0, 1.0023951541028695, 0.0, 0.0, 0.0, 0.0, 0.013955794043991495, 0.0, 0.0, 0.03420205333728919, 1.069681582725926 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728288977.614769, "end_time": null, "forecaster_count": 431, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728288977.614769, "end_time": null, "forecaster_count": 431, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8777076177321819, 0.1222923822678181 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 58, "user_vote": null }, "forecasts_count": 1052, "key_factors": [], "is_current_content_translated": false, "description": "In the last 75 years, two nuclear weapons have been detonated as an act of war; the [bombings of Hiroshima and Nagasaki](https://en.wikipedia.org/wiki/Atomic_bombings_of_Hiroshima_and_Nagasaki) in 1945, at the end of WWII. Despite tensions being high between the US and the USSR during the Cold War, and [several close calls](https://en.wikipedia.org/wiki/List_of_nuclear_close_calls), there have been no additional nuclear weapons detonated as acts of war. \n\nCurrently, estimated nuclear weapon stockpiles are 15,000 warheads worldwide, mostly in the US and Russia, and there are eight or nine states that possess nuclear weapons (China, France, Russia, United Kingdom, United States, India, North Korea, Pakistan, and [Israel](https://en.wikipedia.org/wiki/Nuclear_weapons_and_Israel) which remains officially ambiguous). The use of these nuclear weapons [could be catastrophic](https://www.metaculus.com/questions/1494/ragnar%25C3%25B6k-question-series-if-a-global-catastrophe-occurs-will-it-be-due-to-nuclear-war/)." }, { "id": 4776, "title": "Will Charles Murray receive a long-term Twitter ban before 2021?", "short_title": "", "url_title": "", "slug": "will-charles-murray-receive-a-long-term-twitter-ban-before-2021", "author_id": 112639, "author_username": "emilowk", "coauthors": [], "created_at": "2020-07-08T03:28:23.069904Z", "published_at": "2020-07-17T22:00:00Z", "edited_at": "2025-09-05T17:29:24.905207Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-17T22:00:00Z", "comment_count": 5, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-02T19:05:00Z", "scheduled_close_time": "2021-01-31T23:00:00Z", "scheduled_resolve_time": "2021-01-31T23:00:00Z", "actual_resolve_time": "2021-01-02T19:05:00Z", "open_time": "2020-07-17T22:00:00Z", "nr_forecasters": 66, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 4776, "title": "Will Charles Murray receive a long-term Twitter ban before 2021?", "created_at": "2020-07-08T03:28:23.069904Z", "open_time": "2020-07-17T22:00:00Z", "cp_reveal_time": "2020-07-19T22:00:00Z", "spot_scoring_time": "2020-07-19T22:00:00Z", "scheduled_resolve_time": "2021-01-31T23:00:00Z", "actual_resolve_time": "2021-01-02T19:05:00Z", "resolution_set_time": "2021-01-02T19:05:00Z", "scheduled_close_time": "2021-01-31T23:00:00Z", "actual_close_time": "2021-01-02T19:05:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "*[Parallel question about Stefan Molyneux](https://www.metaculus.com/questions/4733/will-stefan-molyneux-receive-a-long-term-twitter-ban-before-2021/).*\n\nAmerican owned social media networks are increasingly censoring their networks for political speech. Recently Youtube and Reddit combined forces to target a number of persons and [reddit communities](https://techcrunch.com/2020/06/29/trump-suspended-from-twitch-as-reddit-bans-the-the_donald-and-additional-subreddits/), including the pro-Donald Trump subreddit r/The_Donald and [Stefan Molyneux' Youtube channel](https://www.forbes.com/sites/mattperez/2020/06/29/youtube-bans-white-supremacists-stefan-molyneux-richard-spencer-david-duke/#6daab6f45ff1). [Charles Murray is still active on Twitter, however](https://twitter.com/charlesmurray), and has 85.5k followers. Twitter has in the past also banned accounts for persons or groups with similar (perceived) views (European identity politics, interest in intelligence research), such as [Jared Taylor/American Renaissance (in 2018)](https://www.cnet.com/news/white-nationalist-jared-taylor-american-renaissance-sues-twitter-for-account-suspension/) The question thus is:\n\n**Will Charles Murray receive a long-term Twitter ban before 2021?**\n\nResolutions is positive if before by January 31st 2021 11:59EST, a credible news article or Twitter reports that Murray's account has been suspended by Twitter citing violation of its policies, for at least a 30-day period, continuously.", "fine_print": "", "post_id": 4776, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609612490.589362, "end_time": null, "forecaster_count": 66, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1609612490.589362, "end_time": null, "forecaster_count": 66, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.03106007891005977 ], "histogram": [ [ 0.0, 11.936134053660927, 0.07087674973752378, 0.3314221691718617, 0.0, 0.9084176077275334, 0.1681047736615791, 0.039753439271563294, 0.0, 0.0, 0.27131538246289555, 0.09731894348758839, 0.0, 0.014249192245099284, 0.0, 0.023164381480015078, 0.04397920349508898, 0.00816896048126611, 0.0, 0.0, 0.008445766474632615, 0.0, 0.0, 0.0, 0.0, 0.5884006155318706, 0.020621981324100598, 0.1955310023035279, 0.0, 0.0, 0.0, 0.0, 0.0, 0.016179044804938477, 0.0, 0.001218877601747685, 0.0, 0.0, 0.0, 0.00946706080046129, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.000805507209830206, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 5.975512294024321, "coverage": 0.8523579404456905, "baseline_score": 66.89419522133407, "spot_peer_score": -18.73595870558847, "peer_archived_score": 5.975512294024321, "baseline_archived_score": 66.89419522133407, "spot_peer_archived_score": -18.73595870558847 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609612490.722819, "end_time": null, "forecaster_count": 66, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609612490.722819, "end_time": null, "forecaster_count": 66, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9920409908234458, 0.007959009176554274 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 1, "user_vote": null }, "forecasts_count": 136, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4775, "title": "Will Richard Spencer receive a long-term Twitter ban before 2021?", "short_title": "", "url_title": "", "slug": "will-richard-spencer-receive-a-long-term-twitter-ban-before-2021", "author_id": 112639, "author_username": "emilowk", "coauthors": [], "created_at": "2020-07-08T03:17:01.530619Z", "published_at": "2020-07-19T07:00:00Z", "edited_at": "2025-09-05T17:28:58.278384Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-19T07:00:00Z", "comment_count": 9, "status": "resolved", "resolved": true, "actual_close_time": "2021-01-03T10:10:00Z", "scheduled_close_time": "2021-01-31T23:00:00Z", "scheduled_resolve_time": "2021-01-31T23:00:00Z", "actual_resolve_time": "2021-01-03T10:10:00Z", "open_time": "2020-07-19T07:00:00Z", "nr_forecasters": 60, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 4775, "title": "Will Richard Spencer receive a long-term Twitter ban before 2021?", "created_at": "2020-07-08T03:17:01.530619Z", "open_time": "2020-07-19T07:00:00Z", "cp_reveal_time": "2020-07-21T07:00:00Z", "spot_scoring_time": "2020-07-21T07:00:00Z", "scheduled_resolve_time": "2021-01-31T23:00:00Z", "actual_resolve_time": "2021-01-03T10:10:00Z", "resolution_set_time": "2021-01-03T10:10:00Z", "scheduled_close_time": "2021-01-31T23:00:00Z", "actual_close_time": "2021-01-03T10:10:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "*[Parallel question about Stefan Molyneux](https://www.metaculus.com/questions/4733/will-stefan-molyneux-receive-a-long-term-twitter-ban-before-2021/).*\n\nAmerican owned social media networks are increasingly censoring their networks for political speech. Recently Youtube and Reddit combined forces to target a number of persons and [reddit communities](https://techcrunch.com/2020/06/29/trump-suspended-from-twitch-as-reddit-bans-the-the_donald-and-additional-subreddits/), including the pro-Donald Trump subreddit r/The_Donald and [Stefan Molyneux' Youtube channel](https://www.forbes.com/sites/mattperez/2020/06/29/youtube-bans-white-supremacists-stefan-molyneux-richard-spencer-david-duke/#6daab6f45ff1). [Richard Spencer is still active on Twitter, however](https://twitter.com/RichardBSpencer), and has 80.6k followers. Twitter has in the past also banned accounts for persons or groups with similar views (European identity politics/nationalism), such as [Jared Taylor/American Renaissance (in 2018)](https://www.cnet.com/news/white-nationalist-jared-taylor-american-renaissance-sues-twitter-for-account-suspension/) The question thus is:\n\n**Will Richard Spencer receive a long-term Twitter ban before 2021?**\n\nResolutions is positive if before by January 31st 2021 11:59EST, a credible news article or Twitter reports that Spencer's account has been suspended by Twitter citing violation of its policies, for at least a 30-day period, continuously.", "fine_print": "", "post_id": 4775, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1609667556.247012, "end_time": null, "forecaster_count": 60, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.038879117424790846 ] } ], "latest": { "start_time": 1609667556.247012, "end_time": null, "forecaster_count": 60, "interval_lower_bounds": [ 0.01 ], "centers": [ 0.01 ], "interval_upper_bounds": [ 0.038879117424790846 ], "forecast_values": [ 0.99, 0.01 ], "means": [ 0.04489510997976851 ], "histogram": [ [ 0.0, 9.09432137885953, 0.877925459907288, 0.7904822613275736, 0.13505345074932254, 1.5383716169193375, 0.03952902996029374, 0.0, 0.15389407754895393, 0.0, 0.234799996366743, 0.0, 0.0, 0.0, 0.0, 0.07809616947858947, 0.0, 0.0, 0.0, 0.0, 0.03380766426233947, 0.0, 0.0, 0.0, 0.0, 0.032718567573612516, 0.0, 0.0, 0.0, 0.0, 0.09972609292496606, 0.17447624255676622, 0.0, 0.0, 0.0, 0.6719996467926511, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0017789123690750252, 0.0, 0.0011756116749470711, 0.0, 0.0, 0.002444497044844799, 0.006095433581409484, 0.008686660616741733, 0.0, 0.0, 0.005009210557615142, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.003195643853332925, 0.0, 0.0, 0.0, 0.004046517214306785, 0.0, 0.0, 0.0, 0.0, 0.0, 0.010217135346786867, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 11.737536865249574, "coverage": 0.8539174967281062, "baseline_score": 22.061897538964963, "spot_peer_score": -5.421848457515353, "peer_archived_score": 11.737536865249574, "baseline_archived_score": 22.061897538964963, "spot_peer_archived_score": -5.421848457515353 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1609667556.271794, "end_time": null, "forecaster_count": 60, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1609667556.271794, "end_time": null, "forecaster_count": 60, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9896923069240224, 0.010307693075977543 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": -9, "user_vote": null }, "forecasts_count": 176, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4774, "title": "Will Scott Morrison be Prime Minister of Australia on 1 July 2021?", "short_title": "", "url_title": "", "slug": "will-scott-morrison-be-prime-minister-of-australia-on-1-july-2021", "author_id": 111805, "author_username": "galen", "coauthors": [], "created_at": "2020-07-08T01:40:29.196219Z", "published_at": "2020-08-12T23:00:00Z", "edited_at": "2025-09-05T17:28:58.965387Z", "curation_status": "approved", "curation_status_updated_at": "2020-08-12T23:00:00Z", "comment_count": 14, "status": "resolved", "resolved": true, "actual_close_time": "2021-05-31T14:00:00Z", "scheduled_close_time": "2021-05-31T14:00:00Z", "scheduled_resolve_time": "2021-07-01T20:37:00Z", "actual_resolve_time": "2021-07-01T20:37:00Z", "open_time": "2020-08-12T23:00:00Z", "nr_forecasters": 86, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32598, "name": "2020-2021 Leaderboard", "slug": "2020_2021_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 4774, "title": "Will Scott Morrison be Prime Minister of Australia on 1 July 2021?", "created_at": "2020-07-08T01:40:29.196219Z", "open_time": "2020-08-12T23:00:00Z", "cp_reveal_time": "2020-08-14T23:00:00Z", "spot_scoring_time": "2020-08-14T23:00:00Z", "scheduled_resolve_time": "2021-07-01T20:37:00Z", "actual_resolve_time": "2021-07-01T20:37:00Z", "resolution_set_time": "2021-07-01T20:37:00Z", "scheduled_close_time": "2021-05-31T14:00:00Z", "actual_close_time": "2021-05-31T14:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Australia's parliamentary system gives members of the party in government the ability to vote on their leader, and so change the ruling Prime Minister outside of an election.\n\nAustralia's recent political history has also been characterised by frequent '[leadership spills](https://en.wikipedia.org/wiki/Leadership_spill)'. In the last 13 years, there have been four successful leadership spills by the party in government, and [five separate Prime Ministers](https://en.wikipedia.org/wiki/List_of_prime_ministers_of_Australia).\n\nIn late 2019 and early 2020, Scott Morrison (the Prime Minister of Australia at the time of writing) saw a number of challenges to his popular approval, including controversy over [a trip to Hawaii](https://www.theguardian.com/australia-news/2019/dec/21/scott-morrison-hawaii-horror-show-pr-disaster-unfolded) during a catastrophic bushfire season. Since the last election, at least one [poll](https://www.theaustralian.com.au/nation/newspoll) indicates that the two major parties have been within four points of one another. Given Australia's recent history of 'leadership spills', this question asks: \n\n**Will Scott Morrison be Prime Minister of Australia on 1 July 2021?**\n\nThis question will resolve as positive if, on 1 July 2021, the Australian government's official Prime Minister website '[pm.gov.au](https://www.pm.gov.au/)' lists 'Scott Morrison' as the current Prime Minister, negative if another name is listed, and ambiguous otherwise.", "fine_print": "", "post_id": 4774, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1622459587.368384, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": [ 0.94 ], "centers": [ 0.96 ], "interval_upper_bounds": [ 0.98 ] } ], "latest": { "start_time": 1622459587.368384, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": [ 0.94 ], "centers": [ 0.96 ], "interval_upper_bounds": [ 0.98 ], "forecast_values": [ 0.040000000000000036, 0.96 ], "means": [ 0.9466439370828234 ], "histogram": [ [ 0.0, 0.00025516032229147665, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0008782752490162315, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0034545356851821305, 0.0, 0.07688730020434764, 0.10298867315833778, 0.0, 0.0, 0.21704473316238937, 0.0029988785395327496, 0.7602840904729573, 0.0, 0.06612687914115949, 0.0, 0.0, 0.009177111268178266, 0.04463644984976473, 0.0, 0.16690946789911046, 0.001322981755186089, 0.9505463133931304, 0.2738598963054012, 0.2791899903395875, 0.3161946272835071, 1.7805056204309724, 3.0457715249116504, 2.532698800900536, 0.9339492012248509, 1.5289222448271569, 3.9572440984177266 ] ] }, "score_data": { "peer_score": 18.673869951165763, "coverage": 0.9992684240470892, "baseline_score": 81.66581953170537, "spot_peer_score": -0.14486342741440597, "peer_archived_score": 18.673869951165763, "baseline_archived_score": 81.66581953170537, "spot_peer_archived_score": -0.14486342741440597 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1622459587.462491, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1622459587.462491, "end_time": null, "forecaster_count": 86, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.03145577472601935, 0.9685442252739807 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 8, "user_vote": null }, "forecasts_count": 264, "key_factors": [], "is_current_content_translated": false, "description": "" } ] }