Posts List Oldapi View
We shared this request example with FAB participants: url_qparams = { "limit": count, "offset": offset, "has_group": "false", "order_by": "-activity", "forecast_type": "binary", "project": tournament_id, "status": "open", "type": "forecast", "include_description": "true", } url = f"{api_info.base_url}/questions/" response = requests.get( url, headers={"Authorization": f"Token {api_info.token}"}, params=url_qparams )
But we don't want to support all these parameters, and the ones relevant are: - order_by - status - project - forecast_type - we ignore this, but assume it's binary - FAB only supports binary for now.
GET /api2/questions/?format=api&offset=5360
{ "count": 6411, "next": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5380", "previous": "http://www.metaculus.com/api2/questions/?format=api&limit=20&offset=5340", "results": [ { "id": 4561, "title": "Will J.K. Rowling’s “The Ickabog” reach #1 on Amazon.co.uk’s bestseller list within a week of its release?", "short_title": "", "url_title": "", "slug": "will-jk-rowlings-the-ickabog-reach-1-on-amazoncouks-bestseller-list-within-a-week-of-its-release", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-06-02T11:53:14.309464Z", "published_at": "2020-06-04T22:00:00Z", "edited_at": "2025-09-05T17:29:20.519902Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-04T22:00:00Z", "comment_count": 47, "status": "resolved", "resolved": true, "actual_close_time": "2020-11-01T00:00:00Z", "scheduled_close_time": "2020-11-01T00:00:00Z", "scheduled_resolve_time": "2020-11-19T15:44:00Z", "actual_resolve_time": "2020-11-19T15:44:00Z", "open_time": "2020-06-04T22:00:00Z", "nr_forecasters": 105, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ] }, "question": { "id": 4561, "title": "Will J.K. Rowling’s “The Ickabog” reach #1 on Amazon.co.uk’s bestseller list within a week of its release?", "created_at": "2020-06-02T11:53:14.309464Z", "open_time": "2020-06-04T22:00:00Z", "cp_reveal_time": "2020-06-05T20:26:59.572715Z", "spot_scoring_time": "2020-06-05T20:26:59.572715Z", "scheduled_resolve_time": "2020-11-19T15:44:00Z", "actual_resolve_time": "2020-11-19T15:44:00Z", "resolution_set_time": "2020-11-19T15:44:00Z", "scheduled_close_time": "2020-11-01T00:00:00Z", "actual_close_time": "2020-11-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "JK Rowling is currently releasing her new book, “The Ickabog”, in daily installments on her website. [She plans to release it as a physical book in November](https://www.bbc.co.uk/news/entertainment-arts-52809600). The Harry Potter books, also by Rowling, are one of the best-selling fiction series of all time. Harry Potter and The Cursed Child, the most recent installment, sold more than 2m copies in its first two days of publication, and topped bestseller lists of both Amazon and Barnes & Noble. As of the time of this question’s writing, more than half of Amazon’s top 10 most read books by week are from the Harry Potter series.\n\nThis question asks: Will “The Ickabog” reach #1 on Amazon.co.uk’s bestseller list within a week of its release?\n\nPositive resolution occurs if, at any point in the week following The Ickabog’s official launch in the UK, it is ranked first on [amazon.co.uk’s bestseller list for books](https://www.amazon.co.uk/Best-Sellers-Books/zgbs/books). If the physical release of The Ickabog is cancelled, or delayed past the end of 2020, resolution will be ambiguous.\n\n*This question is part of the Academy Series, a set of questions designed to be an introduction to forecasting for those who are relatively new and are looking for a new intellectual pursuit this summer.*", "fine_print": "", "post_id": 4561, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1604168181.260326, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.69 ], "centers": [ 0.76 ], "interval_upper_bounds": [ 0.82 ] } ], "latest": { "start_time": 1604168181.260326, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.69 ], "centers": [ 0.76 ], "interval_upper_bounds": [ 0.82 ], "forecast_values": [ 0.24, 0.76 ], "means": [ 0.7412846381647953 ], "histogram": [ [ 0.0, 0.11251226763243619, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.011082498580592237, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007044922433575599, 0.35793412830238375, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007736654199338615, 0.0, 0.0, 0.0, 0.0, 0.06944541513710491, 0.026951908029936068, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.025808680642553004, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0014955019452109978, 0.0, 0.0, 0.0, 1.244323883835884, 0.13523523779904154, 0.0, 0.0, 0.005595642074430726, 0.16360140126829884, 1.1488386601193605, 1.2125263077341217, 0.0, 0.723347848839622, 1.960527098477112, 0.0, 0.4925955312668095, 0.0058114146501116026, 0.4205805558594367, 1.215420642342799, 0.6065321980749785, 1.1017228564788422, 1.2874071320080813, 0.07212456001610765, 1.4010209977420809, 0.14612507174704287, 0.39928979257382885, 0.41402007348382275, 0.5760325836053328, 1.606460917164182, 0.0, 0.30415611605758075, 0.029049708404070347, 0.0, 1.317305653448721, 0.0, 0.0, 0.0, 0.0182799076020529, 0.0, 0.0, 0.0, 0.0, 0.37006809394430784 ] ] }, "score_data": { "peer_score": 25.29338599978211, "coverage": 0.9996272496334088, "baseline_score": -88.77185150960038, "spot_peer_score": -34.94620177922947, "peer_archived_score": 25.29338599978211, "baseline_archived_score": -88.77185150960038, "spot_peer_archived_score": -34.94620177922947 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1604168181.333687, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1604168181.333687, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.027610646799263838, 0.9723893532007362 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 11, "user_vote": null }, "forecasts_count": 195, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4559, "title": "Will the next winner of the Booker Prize be female?", "short_title": "", "url_title": "", "slug": "will-the-next-winner-of-the-booker-prize-be-female", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-06-02T11:49:09.545495Z", "published_at": "2020-06-04T22:00:00Z", "edited_at": "2025-09-05T17:29:20.948846Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-04T22:00:00Z", "comment_count": 17, "status": "resolved", "resolved": true, "actual_close_time": "2020-09-21T23:00:00Z", "scheduled_close_time": "2020-09-21T23:00:00Z", "scheduled_resolve_time": "2020-11-20T08:28:00Z", "actual_resolve_time": "2020-11-20T08:28:00Z", "open_time": "2020-06-04T22:00:00Z", "nr_forecasters": 88, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ] }, "question": { "id": 4559, "title": "Will the next winner of the Booker Prize be female?", "created_at": "2020-06-02T11:49:09.545495Z", "open_time": "2020-06-04T22:00:00Z", "cp_reveal_time": "2020-06-05T14:06:17.502841Z", "spot_scoring_time": "2020-06-05T14:06:17.502841Z", "scheduled_resolve_time": "2020-11-20T08:28:00Z", "actual_resolve_time": "2020-11-20T08:28:00Z", "resolution_set_time": "2020-11-20T08:28:00Z", "scheduled_close_time": "2020-09-21T23:00:00Z", "actual_close_time": "2020-09-21T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[The Booker Prize](https://thebookerprizes.com/fiction/2020) is an annual award for the best original novel written in the English Language and published in the United Kingdom. A longlist of 13 potential winners is published in July, while a shortlist of 6 is published in September. \n\nThis question asks: Will the winner of the 2020 Booker Prize be female?\n\nThis question is timed to close one week after the announcement of the shortlist. For the purposes of resolution, the gender of the author will be considered to be their gender identity at the time they are announced as the winner of the prize. Please note that this question refers to [The Booker Prize](https://thebookerprizes.com/fiction/2020) and not [The International Booker Prize](https://thebookerprizes.com/international-booker/2020)\n\n*This question is part of the Academy Series, a set of questions designed to be an introduction to forecasting for those who are relatively new and are looking for a new intellectual pursuit this summer.*", "fine_print": "", "post_id": 4559, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1600728972.425713, "end_time": null, "forecaster_count": 88, "interval_lower_bounds": [ 0.65 ], "centers": [ 0.67 ], "interval_upper_bounds": [ 0.68 ] } ], "latest": { "start_time": 1600728972.425713, "end_time": null, "forecaster_count": 88, "interval_lower_bounds": [ 0.65 ], "centers": [ 0.67 ], "interval_upper_bounds": [ 0.68 ], "forecast_values": [ 0.32999999999999996, 0.67 ], "means": [ 0.6678106475764857 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.45909126576192855, 0.006591775284322074, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.034019155374429524, 0.04706262114429836, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0016937140144437088, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0004766249178816012, 0.0006230825651377918, 0.0, 0.04967594836054075, 0.024137774229457915, 0.05311073677716659, 0.14447058187067352, 0.18274093739161376, 0.0, 0.4864712465365661, 0.19589646006976497, 0.0, 0.1294031808933543, 1.374609355115472, 0.3030407194234402, 1.749919228659552, 0.768367700676802, 4.981246553484867, 2.1931672441740835, 0.9846885570707697, 0.8449305818895176, 0.0, 0.9681200509844313, 0.0, 0.0, 0.15203786276889306, 0.0, 0.0, 0.01522709157824471, 0.0, 0.3289263101847786, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1402073287021699, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6462559736471081 ] ] }, "score_data": { "peer_score": -0.10832115884053178, "coverage": 0.9990622137564185, "baseline_score": -35.698734647600105, "spot_peer_score": 7.498989586999313, "peer_archived_score": -0.10832115884053178, "baseline_archived_score": -35.698734647600105, "spot_peer_archived_score": 7.498989586999313 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1600728972.492308, "end_time": null, "forecaster_count": 88, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1600728972.492308, "end_time": null, "forecaster_count": 88, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.15244449023344764, 0.8475555097665524 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 5, "user_vote": null }, "forecasts_count": 157, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4556, "title": "Will 3Blue1Brown begin a new video series in the month of July?", "short_title": "", "url_title": "", "slug": "will-3blue1brown-begin-a-new-video-series-in-the-month-of-july", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-06-02T11:44:59.489094Z", "published_at": "2020-06-04T22:00:00Z", "edited_at": "2025-09-05T17:28:49.677224Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-04T22:00:00Z", "comment_count": 7, "status": "resolved", "resolved": true, "actual_close_time": "2020-07-06T23:00:00Z", "scheduled_close_time": "2020-07-06T23:00:00Z", "scheduled_resolve_time": "2020-08-08T15:27:00Z", "actual_resolve_time": "2020-08-08T15:27:00Z", "open_time": "2020-06-04T22:00:00Z", "nr_forecasters": 52, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ] }, "question": { "id": 4556, "title": "Will 3Blue1Brown begin a new video series in the month of July?", "created_at": "2020-06-02T11:44:59.489094Z", "open_time": "2020-06-04T22:00:00Z", "cp_reveal_time": "2020-06-05T20:56:10.590937Z", "spot_scoring_time": "2020-06-05T20:56:10.590937Z", "scheduled_resolve_time": "2020-08-08T15:27:00Z", "actual_resolve_time": "2020-08-08T15:27:00Z", "resolution_set_time": "2020-08-08T15:27:00Z", "scheduled_close_time": "2020-07-06T23:00:00Z", "actual_close_time": "2020-07-06T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Grant Sanderson’s educational youtube channel [3Blue1Brown](https://www.youtube.com/channel/UCYO_jab_esuFRV4b17AJtAw) produces instructional animated videos explaining difficult mathematical concepts in original and often beautiful ways (at least in the opinion of this question’s author). Many of the videos on the channel are organised into series covering particular branches of mathematics, for example Linear Algebra. \n\nThis question asks: Will the first video in a new series on 3Blue1Brown be released in July 2020?\n\nPositive resolution occurs if the first video in a playlist containing at least two videos at the time of resolution, which appears on the [playlists tab](https://www.youtube.com/channel/UCYO_jab_esuFRV4b17AJtAw/playlists) of the 3blue1brown youtube page, is published at any time in July 2020.\n\n*This question is part of the Academy Series, a set of questions designed to be an introduction to forecasting for those who are relatively new and are looking for a new intellectual pursuit this summer.*", "fine_print": "", "post_id": 4556, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1594058665.1144, "end_time": null, "forecaster_count": 52, "interval_lower_bounds": [ 0.35 ], "centers": [ 0.45 ], "interval_upper_bounds": [ 0.49 ] } ], "latest": { "start_time": 1594058665.1144, "end_time": null, "forecaster_count": 52, "interval_lower_bounds": [ 0.35 ], "centers": [ 0.45 ], "interval_upper_bounds": [ 0.49 ], "forecast_values": [ 0.55, 0.45 ], "means": [ 0.4317449530536351 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.03550367010089748, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.41207610303291553, 0.0, 0.0, 0.0, 0.0, 0.38056261610345776, 0.44575833950054045, 0.0, 0.0, 0.0, 0.7008594225333291, 0.0, 0.0, 0.01040622348426393, 0.0, 0.5814557777859519, 0.08040436677867299, 0.0, 0.0, 0.0, 0.7535956555594016, 0.0, 0.0, 0.004173285167582051, 0.6513080407565454, 0.3724147272249233, 0.0, 0.0, 0.4088154670046623, 0.2807999084352719, 2.7423887437510204, 0.3266606324865059, 0.08934312118698887, 0.6731630633799465, 1.1249115914517023, 1.1184247896465067, 0.011588794016103344, 0.3535046828774942, 0.0, 0.0, 0.46756116596602165, 0.0, 0.0, 0.0, 0.027172461172235558, 0.10957976461626417, 0.0, 0.0, 0.0, 0.0, 0.23272945069973772, 0.0, 0.0, 0.0, 0.0, 0.1636878456244281, 0.0, 0.0, 0.0, 0.0, 0.07218472244402722, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2978686829232104 ] ] }, "score_data": { "peer_score": 9.498313895481148, "coverage": 0.9983347874085797, "baseline_score": -0.482681506059803, "spot_peer_score": 41.50524159911569, "peer_archived_score": 9.498313895481148, "baseline_archived_score": -0.482681506059803, "spot_peer_archived_score": 41.50524159911569 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1594058665.151136, "end_time": null, "forecaster_count": 52, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1594058665.151136, "end_time": null, "forecaster_count": 52, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.6543521693837537, 0.3456478306162462 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 6, "user_vote": null }, "forecasts_count": 130, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4555, "title": "Will Trump's pants catch fire on at least five more occasions than Biden's during the month of July?", "short_title": "", "url_title": "", "slug": "will-trumps-pants-catch-fire-on-at-least-five-more-occasions-than-bidens-during-the-month-of-july", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-06-02T11:42:25.026681Z", "published_at": "2020-06-04T22:00:00Z", "edited_at": "2025-09-05T17:28:49.091032Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-04T22:00:00Z", "comment_count": 33, "status": "resolved", "resolved": true, "actual_close_time": "2020-07-13T23:00:00Z", "scheduled_close_time": "2020-07-13T23:00:00Z", "scheduled_resolve_time": "2020-08-16T13:10:00Z", "actual_resolve_time": "2020-08-16T13:10:00Z", "open_time": "2020-06-04T22:00:00Z", "nr_forecasters": 105, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ], "question_series": [ { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4555, "title": "Will Trump's pants catch fire on at least five more occasions than Biden's during the month of July?", "created_at": "2020-06-02T11:42:25.026681Z", "open_time": "2020-06-04T22:00:00Z", "cp_reveal_time": "2020-06-05T06:30:08.788891Z", "spot_scoring_time": "2020-06-05T06:30:08.788891Z", "scheduled_resolve_time": "2020-08-16T13:10:00Z", "actual_resolve_time": "2020-08-16T13:10:00Z", "resolution_set_time": "2020-08-16T13:10:00Z", "scheduled_close_time": "2020-07-13T23:00:00Z", "actual_close_time": "2020-07-13T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Politifact](https://www.politifact.com/) is an independent fact-checking organisation which focuses primarily, but not entirely, on US politics. Claims are assigned a rating on the “Truth-o-meter” ranging from “True” to “Pants on Fire”. For a statement to qualify as “Pants on Fire”, it should both not be accurate and make what politifact considers to be “a ridiculous claim”. \n\nThis question asks whether, in the month of July, the number of claims made by Donald Trump and rated by politifact as “Pants on Fire” will be equal to, or greater than, five plus the number of claims made by Joe Biden and rated by politifact as “Pants on fire” over the same period.\n\nQuestion resolves positive if, on the date of resolution, \\(T \\geq B + 5\\), where \\(T\\) is the number of “Pants on Fire” ratings given by politifact to Donald Trump for claims he made in July, and \\(B\\) is the number of “Pants on Fire” ratings given to Biden for claims he made in July.\n\nIf politifact ceases to operate or changes the labelling on it’s “truth-o-meter”, this question resolves ambiguous, otherwise this question resolves negative.\n\n*This question is part of the Academy Series, a set of questions designed to be an introduction to forecasting for those who are relatively new and are looking for a new intellectual pursuit this summer.*\n\n*ETA (2020-06-05) in the case that the question does not resolve positively by July 31, resolution can be delayed by two weeks to see how statements made in late July will be rated.*", "fine_print": "", "post_id": 4555, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1594676546.975285, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.28 ], "interval_upper_bounds": [ 0.33 ] } ], "latest": { "start_time": 1594676546.975285, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.28 ], "interval_upper_bounds": [ 0.33 ], "forecast_values": [ 0.72, 0.28 ], "means": [ 0.3064687187485677 ], "histogram": [ [ 0.0, 0.09567411855474998, 0.0, 0.9522651740812341, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6602581371976903, 0.2569801720437458, 0.0, 0.1196825615901949, 0.0, 0.6715995900657319, 0.0, 0.7811791531905958, 0.0, 0.0, 1.6445387362612491, 0.0, 0.0, 0.0, 0.37752379857956875, 1.4494704688268825, 0.9065952309411648, 0.4205805558594367, 2.7215106337486024, 0.0, 0.654588748267865, 0.0, 0.0, 2.684077242476227, 0.0, 0.12165839617629018, 0.0, 0.001305199350988809, 0.0, 0.1930848597443643, 0.24111442069566735, 0.0, 0.49152164722612257, 0.0, 0.0, 0.24287948634410458, 0.0, 0.0, 0.0, 0.0, 1.4476714056197664, 0.0, 0.0, 0.0009776821159269995, 0.0, 0.07578857974475248, 0.019793623361071083, 0.027001730329868787, 0.0, 0.007044922433575599, 0.9871184632486395, 0.04346259792480628, 0.06739959090851837, 0.1471381653646608, 0.0019363495263375722, 0.02109319714722, 0.005257644968413862, 0.00180587510729341, 0.0, 0.05661768967641622, 0.12474955181323763, 0.0, 0.0, 0.0, 0.0008378486303897791, 0.08084718014744971, 0.0, 0.0, 0.0, 0.010151878502634997, 0.05511841632552501, 0.014307795277246115, 0.0, 9.640516625184967e-05, 0.008482710965774019, 0.0, 0.0, 0.0, 0.0, 0.0, 0.005932879112167116, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.09929134888077194 ] ] }, "score_data": { "peer_score": 17.08451439044617, "coverage": 0.9974552457245384, "baseline_score": -41.33212166417414, "spot_peer_score": 37.27148901285469, "peer_archived_score": 17.08451439044617, "baseline_archived_score": -41.33212166417414, "spot_peer_archived_score": 37.27148901285469 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1594676547.079581, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1594676547.079581, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.689434361198688, 0.31056563880131205 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 3, "user_vote": null }, "forecasts_count": 232, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4554, "title": "Will more than one entrant achieve a perfect score in the 2020 IMO in St. Petersburg?", "short_title": "", "url_title": "", "slug": "will-more-than-one-entrant-achieve-a-perfect-score-in-the-2020-imo-in-st-petersburg", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-06-02T11:23:05.719158Z", "published_at": "2020-06-06T09:00:00Z", "edited_at": "2025-09-05T17:29:28.821572Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-06T09:00:00Z", "comment_count": 22, "status": "resolved", "resolved": true, "actual_close_time": "2020-09-14T23:00:00Z", "scheduled_close_time": "2020-09-14T23:00:00Z", "scheduled_resolve_time": "2020-09-27T21:36:00Z", "actual_resolve_time": "2020-09-27T21:36:00Z", "open_time": "2020-06-06T09:00:00Z", "nr_forecasters": 77, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" } ] }, "question": { "id": 4554, "title": "Will more than one entrant achieve a perfect score in the 2020 IMO in St. Petersburg?", "created_at": "2020-06-02T11:23:05.719158Z", "open_time": "2020-06-06T09:00:00Z", "cp_reveal_time": "2020-06-06T21:37:47.975633Z", "spot_scoring_time": "2020-06-06T21:37:47.975633Z", "scheduled_resolve_time": "2020-09-27T21:36:00Z", "actual_resolve_time": "2020-09-27T21:36:00Z", "resolution_set_time": "2020-09-27T21:36:00Z", "scheduled_close_time": "2020-09-14T23:00:00Z", "actual_close_time": "2020-09-14T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "The International Mathematical Olympiad (IMO) is a mathematics competition for pre-university students. It consists of six questions, each scored out of seven. The highest possible score is therefore 42. The problems are extremely difficult, and mean scores are typically less than 15 marks. It is not uncommon for no students to score full marks, or for only one student to do so, though in 1987 twenty-two perfect scores were achieved, meaning that a perfect score was required for a gold medal. In 2020, the [61st IMO](https://imo2020.ru/) is scheduled to be held from July 8 to July 18 in St Petersburg, Russia.\n\n\nThis question asks: Will more than one entrant achieve a perfect score in the 2020 IMO in St. Petersburg?\n\nFor a positive resolution, at the 2020 IMO in St. Petersburg, more than one entrant must achieve a score of 42. Resolution will be via credible media reports.\n\n- If the 2020 IMO is postponed due to the coronavirus pandemic, this question will resolve after the postponed competition is held.\n\n- If the competition format is changed because of the pandemic (for example, by going online) but the question format is not changed, i.e. 6 questions worth 7 marks each are asked, the question resolves as above.\n\n- If the 2020 IMO is cancelled, or if the format of the questions is changed, resolution will be ambiguous.\n\n*This question is part of the Academy Series, a set of questions designed to be an introduction to forecasting for those who are relatively new and are looking for a new intellectual pursuit this summer.*", "fine_print": "", "post_id": 4554, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1600121472.249424, "end_time": null, "forecaster_count": 77, "interval_lower_bounds": [ 0.5 ], "centers": [ 0.53 ], "interval_upper_bounds": [ 0.55 ] } ], "latest": { "start_time": 1600121472.249424, "end_time": null, "forecaster_count": 77, "interval_lower_bounds": [ 0.5 ], "centers": [ 0.53 ], "interval_upper_bounds": [ 0.55 ], "forecast_values": [ 0.47, 0.53 ], "means": [ 0.5177358932486942 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0004201224307447123, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.29371952836752324, 0.0, 0.10887815532154052, 0.0, 0.0, 0.0, 0.0, 0.008438384594982272, 0.0, 0.0, 0.0, 0.0, 0.0, 0.001790116379774893, 0.0, 0.027908835249252868, 0.49393818245315035, 0.0, 0.0, 0.0, 0.626040305310885, 0.06773163967248513, 0.0, 0.0, 0.0, 0.0, 1.0493716243076985, 0.0, 0.7980313308167374, 0.1265952290903183, 0.0, 2.784590378848484, 0.3937707210544444, 0.6647367894266478, 1.7858780263249696, 0.3810919053759498, 2.44261455419527, 0.704345065221795, 0.26606873778304807, 0.0, 0.8452357695935337, 0.5976377935484828, 0.4607201881542152, 0.0034884571131599557, 0.5156407581451089, 0.13027552742291773, 0.006755554474105815, 0.4176358435854951, 0.0032238156365109114, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0482962316587873 ] ] }, "score_data": { "peer_score": 6.422532339181106, "coverage": 0.9995642985637145, "baseline_score": -14.082103016839795, "spot_peer_score": 8.216560674616565, "peer_archived_score": 6.422532339181106, "baseline_archived_score": -14.082103016839795, "spot_peer_archived_score": 8.216560674616565 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1600121472.278106, "end_time": null, "forecaster_count": 77, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1600121472.278106, "end_time": null, "forecaster_count": 77, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.2954770965424899, 0.7045229034575101 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 9, "user_vote": null }, "forecasts_count": 123, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4553, "title": "Will the first post from Instagram’s official account @instagram after this question closes be a photograph or video of an instagram user?", "short_title": "", "url_title": "", "slug": "will-the-first-post-from-instagrams-official-account-instagram-after-this-question-closes-be-a-photograph-or-video-of-an-instagram-user", "author_id": 111911, "author_username": "alexrjl", "coauthors": [], "created_at": "2020-06-02T11:15:28.427066Z", "published_at": "2020-06-04T22:00:00Z", "edited_at": "2025-09-05T17:29:19.963947Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-04T22:00:00Z", "comment_count": 5, "status": "resolved", "resolved": true, "actual_close_time": "2020-06-12T17:00:00Z", "scheduled_close_time": "2020-06-12T17:00:00Z", "scheduled_resolve_time": "2020-06-12T20:49:00Z", "actual_resolve_time": "2020-06-12T20:49:00Z", "open_time": "2020-06-04T22:00:00Z", "nr_forecasters": 50, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2979, "type": "question_series", "name": "Academy Summer Series", "slug": null, "header_image": null, "prize_pool": null, "start_date": "2020-06-04T22:00:00Z", "close_date": "2022-03-03T00:00:00Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2024-02-29T10:13:20.382299Z", "edited_at": "2024-02-29T10:13:27.887785Z", "score_type": "peer_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3696, "name": "Sports & Entertainment", "slug": "sports-entertainment", "emoji": "🏀", "description": "Sports & Entertainment", "type": "category" } ] }, "question": { "id": 4553, "title": "Will the first post from Instagram’s official account @instagram after this question closes be a photograph or video of an instagram user?", "created_at": "2020-06-02T11:15:28.427066Z", "open_time": "2020-06-04T22:00:00Z", "cp_reveal_time": "2020-06-05T12:30:20.631249Z", "spot_scoring_time": "2020-06-05T12:30:20.631249Z", "scheduled_resolve_time": "2020-06-12T20:49:00Z", "actual_resolve_time": "2020-06-12T20:49:00Z", "resolution_set_time": "2020-06-12T20:49:00Z", "scheduled_close_time": "2020-06-12T17:00:00Z", "actual_close_time": "2020-06-12T17:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Instagram, a social media platform for the positing of photographs and short videos, has become almost synonymous with the “selfie”. The most followed instagram account of all time is currently @instagram, the channel’s own account, whose purpose is to highlight popular users of and posts on the platform.\n\nThis question asks: Will the first post from Instagram’s official account @instagram after this question closes be a photograph or video of an instagram user?\n\nPositive resolution occurs if the first post made by [@instagram](https://www.instagram.com/instagram/?hl=en) after 12 June 2020 18:00 BST is a photograph or video of an instagram user, whose own instagram account is tagged in the post. \n\nIf the post is a photograph which has more than one subject, as long as one of the subjects is tagged in the post, the question resolves positive. For the purpose of resolution, non-human animals such as [@itsdougthepug](https://www.instagram.com/itsdougthepug/?hl=en), or inanimate objects which have been anthropomorphised, such as [@davidseamandoing](https://www.instagram.com/davidseamandoing/?hl=en), do count as users of the platform. If the post in question contains multiple photographs and/or videos, only the first photograph is considered for the purposes of resolution.\n\nFurther details:\n\n- If the post is a video, and an instagram user (defined identically to above) who is tagged in the post appears in the video, positive resolution occurs. If the post in question contains multiple videos, only the first video counts for resolution.\n\n- If an account refers to more than one individual by name in it's name/bio (as is the case for both the accounts tagged), each of those individuals is treated as a \"user\" for the purposes of resolution, so a photo of them with the account tagged would trigger positive resolution.\n\n- If an account represents an organisation, company or brand e.g. with no specific individuals name in the bio or name, then a photograph of an employee or member would not trigger positive resolution. e.g. If [@benandjerrys](https://www.instagram.com/benandjerrys/) is tagged, the photo must be of Ben and/or Jerry. If [@nike](https://www.instagram.com/nike/), is tagged, a photo of a sponsored athlete would not trigger resolution (unless the athlete's personal account was also tagged).\n\n*This question is part of the Academy Series, a set of questions designed to be an introduction to forecasting for those who are relatively new and are looking for a new intellectual pursuit this summer.*", "fine_print": "", "post_id": 4553, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1591973323.270574, "end_time": null, "forecaster_count": 50, "interval_lower_bounds": [ 0.67 ], "centers": [ 0.75 ], "interval_upper_bounds": [ 0.8 ] } ], "latest": { "start_time": 1591973323.270574, "end_time": null, "forecaster_count": 50, "interval_lower_bounds": [ 0.67 ], "centers": [ 0.75 ], "interval_upper_bounds": [ 0.8 ], "forecast_values": [ 0.25, 0.75 ], "means": [ 0.74054044858819 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.06639265352365246, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5541459864739142, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.020064761700770095, 0.0, 0.0, 0.7010927792504626, 0.12605111094439408, 0.0, 0.04084036021151628, 0.3722069427882975, 0.7846145929662339, 0.16871148880955214, 0.6956811236098437, 0.31737001518311225, 0.0, 0.9626555860525594, 0.18527705034115113, 0.10232750931807744, 0.4038734030575686, 0.0, 1.8221964787705196, 0.2893506753269389, 0.5240062014869525, 0.13735291337506933, 1.0146117832509054, 1.3410473300506198, 0.0, 0.0, 0.0, 0.0, 0.8668714513010786, 0.0, 0.0, 0.0, 0.0, 0.0034934892766462, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.1488506811913854 ] ] }, "score_data": { "peer_score": 8.50305089364642, "coverage": 0.9931765873947108, "baseline_score": 56.19180141161377, "spot_peer_score": 15.509085466489639, "peer_archived_score": 8.50305089364642, "baseline_archived_score": 56.19180141161377, "spot_peer_archived_score": 15.509085466489639 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1591973323.337281, "end_time": null, "forecaster_count": 50, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1591973323.337281, "end_time": null, "forecaster_count": 50, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.11931322221263807, 0.8806867777873619 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 0, "user_vote": null }, "forecasts_count": 81, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4549, "title": "Will George Church receive a Nobel prize, in any category, before the end of 2035?", "short_title": "", "url_title": "", "slug": "will-george-church-receive-a-nobel-prize-in-any-category-before-the-end-of-2035", "author_id": 112976, "author_username": "InquilineKea", "coauthors": [], "created_at": "2020-06-02T07:54:14.679364Z", "published_at": "2020-07-29T22:00:00Z", "edited_at": "2025-09-05T17:29:19.861020Z", "curation_status": "approved", "curation_status_updated_at": "2020-07-29T22:00:00Z", "comment_count": 8, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2035-01-02T05:00:00Z", "scheduled_resolve_time": "2035-01-02T05:00:00Z", "actual_resolve_time": null, "open_time": "2020-07-29T22:00:00Z", "nr_forecasters": 35, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" } ] }, "question": { "id": 4549, "title": "Will George Church receive a Nobel prize, in any category, before the end of 2035?", "created_at": "2020-06-02T07:54:14.679364Z", "open_time": "2020-07-29T22:00:00Z", "cp_reveal_time": "2020-07-31T22:00:00Z", "spot_scoring_time": "2020-07-31T22:00:00Z", "scheduled_resolve_time": "2035-01-02T05:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2035-01-02T05:00:00Z", "actual_close_time": "2035-01-02T05:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "According to [Wikipedia](https://en.wikipedia.org/wiki/George_Church_(geneticist)):\n\n> George Church is an American geneticist, molecular engineer, and chemist. He is the Robert Winthrop Professor of Genetics at Harvard Medical School, Professor of Health Sciences and Technology at Harvard and MIT, and a founding member of the Wyss Institute for Biologically Inspired Engineering.\n\n**Will George Church receive a Nobel prize, in any category, before the end of 2035?**\n\nThis question resolves positively if George Church wins a Nobel Prize before 2035. Sharing a Nobel prize is sufficient for positive resolution.", "fine_print": "", "post_id": 4549, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763838297.412597, "end_time": 1775738519.617183, "forecaster_count": 27, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.1 ] } ], "latest": { "start_time": 1763838297.412597, "end_time": 1775738519.617183, "forecaster_count": 27, "interval_lower_bounds": [ 0.05 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.1 ], "forecast_values": [ 0.9, 0.1 ], "means": [ 0.18695173632782583 ], "histogram": [ [ 0.0, 0.0, 0.6030611589824652, 0.04091934181495329, 0.6701049752490269, 1.0, 0.0, 0.7701466701575289, 0.47230532277707127, 0.8218869508967813, 2.3762663784664397, 0.0, 0.0, 0.15266220141285267, 0.0, 0.0, 0.0, 0.0, 0.07805035281635142, 0.0, 0.24857163706044477, 0.0, 0.0, 0.0, 0.0, 0.11123030328809189, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.34196503051998717, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.30235531220200856, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.48480115213825536, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.43289785514522633 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728288426.265128, "end_time": null, "forecaster_count": 35, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728288426.265128, "end_time": null, "forecaster_count": 35, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9514496953902466, 0.04855030460975336 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 3, "user_vote": null }, "forecasts_count": 114, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4544, "title": "Will there be fewer Kea parrots in 2030 than in 2020?", "short_title": "", "url_title": "", "slug": "will-there-be-fewer-kea-parrots-in-2030-than-in-2020", "author_id": 112976, "author_username": "InquilineKea", "coauthors": [], "created_at": "2020-06-02T07:41:47.998025Z", "published_at": "2020-06-05T22:00:00Z", "edited_at": "2025-09-15T13:33:28.409084Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-05T22:00:00Z", "comment_count": 5, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2030-01-01T05:00:00Z", "scheduled_resolve_time": "2030-01-01T05:00:00Z", "actual_resolve_time": null, "open_time": "2020-06-05T22:00:00Z", "nr_forecasters": 36, "html_metadata_json": null, "projects": { "topic": [ { "id": 15867, "name": "Environment & Climate", "slug": "climate", "emoji": "🌎", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3697, "name": "Environment & Climate", "slug": "environment-climate", "emoji": "🌱", "description": "Environment & Climate", "type": "category" }, { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" } ] }, "question": { "id": 4544, "title": "Will there be fewer Kea parrots in 2030 than in 2020?", "created_at": "2020-06-02T07:41:47.998025Z", "open_time": "2020-06-05T22:00:00Z", "cp_reveal_time": "2020-06-07T17:53:56.023905Z", "spot_scoring_time": "2020-06-07T17:53:56.023905Z", "scheduled_resolve_time": "2030-01-01T05:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2030-01-01T05:00:00Z", "actual_close_time": "2030-01-01T05:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "#Context\n\nThe kea is a species of large parrot in the family Nestoridae found in the forested and alpine regions of the South Island of New Zealand.\n\nAccording to the IUCN Red List of Threatened Species, [there are roughly 4,000 live mature Kea parrots in the world](https://www.iucnredlist.org/species/22684831/119243358).\n\n#Question and Resolution\n\nWill there be fewer than 4,000 live mature Kea parrots, according to [IUCN Red List of Threatened Species](https://www.iucnredlist.org/species/22684831/119243358) at any point in the year 2030?", "fine_print": "", "post_id": 4544, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763929672.803805, "end_time": 1764067580.782551, "forecaster_count": 29, "interval_lower_bounds": [ 0.5 ], "centers": [ 0.6 ], "interval_upper_bounds": [ 0.62 ] } ], "latest": { "start_time": 1763929672.803805, "end_time": 1764067580.782551, "forecaster_count": 29, "interval_lower_bounds": [ 0.5 ], "centers": [ 0.6 ], "interval_upper_bounds": [ 0.62 ], "forecast_values": [ 0.4, 0.6 ], "means": [ 0.5758923536458085 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012460834330696993, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.033872059528572265, 0.0, 0.0, 0.6149678299500161, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07755734487342231, 0.0, 0.0, 0.0, 1.0, 0.0, 0.04289084712097685, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7511534702699268, 0.0, 0.0, 0.0, 0.38913192988912404, 0.3190127781780022, 0.0, 0.49919970777631545, 0.0, 0.0, 2.08430950977111, 0.0, 1.6442771904054134, 0.0, 0.0, 0.0, 0.14645117361277293, 0.0, 0.06460822876323534, 0.0, 0.0, 0.10829599059260063, 0.0, 0.0, 0.0, 1.096914291555882, 0.0, 0.0, 0.0, 0.19330086451826228, 0.12637014538023153, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.053094864755261, 0.0, 0.02591031830222411 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728288506.835566, "end_time": null, "forecaster_count": 35, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728288506.835566, "end_time": null, "forecaster_count": 35, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.4815164303227173, 0.5184835696772827 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 3, "user_vote": null }, "forecasts_count": 68, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4536, "title": "Will the US see widespread rioting in 2020?", "short_title": "Large scale rioting in US in 2020?", "url_title": "Large scale rioting in US in 2020?", "slug": "large-scale-rioting-in-us-in-2020", "author_id": 111848, "author_username": "juancambeiro", "coauthors": [], "created_at": "2020-06-01T00:53:59.462059Z", "published_at": "2020-06-01T03:00:59Z", "edited_at": "2025-09-05T17:28:46.670932Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-01T03:00:59Z", "comment_count": 51, "status": "resolved", "resolved": true, "actual_close_time": "2020-08-30T23:00:00Z", "scheduled_close_time": "2020-08-30T23:00:00Z", "scheduled_resolve_time": "2021-01-01T21:27:00Z", "actual_resolve_time": "2021-01-01T21:27:00Z", "open_time": "2020-06-01T03:00:59Z", "nr_forecasters": 151, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4536, "title": "Will the US see widespread rioting in 2020?", "created_at": "2020-06-01T00:53:59.462059Z", "open_time": "2020-06-01T03:00:59Z", "cp_reveal_time": "2020-06-01T05:45:51.923251Z", "spot_scoring_time": "2020-06-01T05:45:51.923251Z", "scheduled_resolve_time": "2021-01-01T21:27:00Z", "actual_resolve_time": "2021-01-01T21:27:00Z", "resolution_set_time": "2021-01-01T21:27:00Z", "scheduled_close_time": "2020-08-30T23:00:00Z", "actual_close_time": "2020-08-30T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Incidents of civil unrest are not terribly uncommon in the United States](https://en.wikipedia.org/wiki/List_of_incidents_of_civil_unrest_in_the_United_States), but large-scale rioting is quite rare. One of the most severe incidents in recent history was the [1992 Los Angeles riots](https://en.wikipedia.org/wiki/1992_Los_Angeles_riots), during which more than 60 people were killed, more than 2,350 people were injured, and more than 12,000 people were arrested. Property damage was estimated at more than $1bn. \n\nAs of 31 May 2020, major US cities have seen civil unrest after the 25 May death of George Floyd in a police encounter. The [George Floyd protests](https://en.wikipedia.org/wiki/List_of_George_Floyd_protests) are ongoing and some have involved violence. Moreover, much of the US is still under stay-at-home orders related to COVID-19 and unemployment is at record highs.\n\nThis question asks: **Will the United States experience widespread rioting in 2020?**\n\nFor the purposes of this question, 'large-scale rioting' is defined as an event of rioting or civil unrest in which any of the following conditions is met:\n\n> 1. At least 150 people die due to violence, either due to the actions of rioters or other civilians, or police, military, national guard, or other law enforcement or government agents. \n\n> 2. At least 30,000 people are arrested by police, military, national guard, or other law enforcement or government agents.\n\n> 3. Property damages are credibly estimated at $3 billion or more. This estimate may come from local governments in which the incidents take place, state governments, the federal government, or a major US news publication such as the New York Times, Washington Post, or major broadcast news networks.\n\nSuch events should take place in the US (for the purposes of this question, inclusive of all 50 U.S. states and Washington D.C.) and should occur within a 14 day period. \n\nResolution should cite a government statement or credible news reports that indicate that any of the above conditions have been met by an event of rioting.", "fine_print": "", "post_id": 4536, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1598826912.472612, "end_time": null, "forecaster_count": 149, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.33 ], "interval_upper_bounds": [ 0.4 ] } ], "latest": { "start_time": 1598826912.472612, "end_time": null, "forecaster_count": 149, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.33 ], "interval_upper_bounds": [ 0.4 ], "forecast_values": [ 0.6699999999999999, 0.33 ], "means": [ 0.3300325835949928 ], "histogram": [ [ 0.0, 1.0506192039026034, 0.0, 0.21664886629384408, 0.0, 0.00027285893560136804, 0.09777812621635358, 0.0, 0.0, 0.0, 0.04080483555694535, 0.0, 0.1880773691307896, 0.0, 0.015854564049884626, 0.2708474580108733, 0.0625008899165032, 0.5961520256153271, 0.23413177512571176, 0.0, 2.9589195724232495, 0.34763001059069476, 0.7268316872423147, 0.0, 0.08117110857126116, 0.5623790459428568, 0.9893242156880485, 0.0, 0.0015616802263089407, 0.0, 0.9632179739168149, 0.7560484305959194, 0.9274671086298948, 0.4576443804621929, 0.43384589668348406, 0.12984692373922813, 1.96019046904085, 1.1403376543939814, 0.28399984643153414, 0.4430220512153072, 1.5896975584943924, 0.48498744839081814, 0.5373099859792354, 0.004743874769322965, 0.8479267412635999, 0.025871964283435533, 0.2608590320824648, 0.8133810234166027, 0.00015966159711102446, 0.21167707344644338, 0.050867584113772436, 0.00034778888011780727, 0.0, 0.0025758965844464793, 0.6317790873561886, 0.580043704297697, 0.0, 0.36561276540538323, 0.0, 0.0, 0.03931850123082537, 0.0009927288490396851, 0.004093513287168225, 0.0, 0.0, 0.0, 0.00018392098162773335, 0.0, 0.0, 0.0, 0.00021620282240421515, 0.0, 0.00034850648671098266, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.824752236350332e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5263820122042404 ] ] }, "score_data": { "peer_score": 20.71753247261119, "coverage": 0.9998043445281999, "baseline_score": 28.686663166903113, "spot_peer_score": -52.13122818533543, "peer_archived_score": 20.71753247261119, "baseline_archived_score": 28.686663166903113, "spot_peer_archived_score": -52.13122818533543 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1598826912.595219, "end_time": null, "forecaster_count": 149, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1598826912.595219, "end_time": null, "forecaster_count": 149, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.7547738190777827, 0.24522618092221724 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 19, "user_vote": null }, "forecasts_count": 462, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4527, "title": "Will the S&P 500 hit 10,000 points by the end of the decade?", "short_title": "", "url_title": "", "slug": "will-the-sp-500-hit-10000-points-by-the-end-of-the-decade", "author_id": 112791, "author_username": "belugacat", "coauthors": [], "created_at": "2020-05-31T17:47:53.273048Z", "published_at": "2020-06-17T22:00:00Z", "edited_at": "2025-11-20T06:25:18.862196Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-17T22:00:00Z", "comment_count": 18, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2030-01-01T07:59:00Z", "scheduled_resolve_time": "2030-01-01T08:00:00Z", "actual_resolve_time": null, "open_time": "2020-06-17T22:00:00Z", "nr_forecasters": 120, "html_metadata_json": null, "projects": { "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" } ] }, "question": { "id": 4527, "title": "Will the S&P 500 hit 10,000 points by the end of the decade?", "created_at": "2020-05-31T17:47:53.273048Z", "open_time": "2020-06-17T22:00:00Z", "cp_reveal_time": "2020-06-19T22:00:00Z", "spot_scoring_time": "2020-06-19T22:00:00Z", "scheduled_resolve_time": "2030-01-01T08:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2030-01-01T07:59:00Z", "actual_close_time": "2030-01-01T07:59:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "The S&P 500, or simply the S&P, is a stock market index that measures the stock performance of 500 large companies listed on stock exchanges in the United States. It is one of the most commonly followed equity indices, and many consider it to be one of the best representations of the U.S. stock market. (Wikipedia)\n\nThe S&P 500 is at 3,044 points at the time of writing this question. Will it hit 10,000 points before the decade ends?\n\nThis question still resolves positively if it hits the 10,000 mark during the decade but is under that threshold on Jan 1 2030.", "fine_print": "", "post_id": 4527, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763720929.932763, "end_time": 1764174687.317772, "forecaster_count": 80, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.36 ], "interval_upper_bounds": [ 0.44 ] } ], "latest": { "start_time": 1763720929.932763, "end_time": 1764174687.317772, "forecaster_count": 80, "interval_lower_bounds": [ 0.25 ], "centers": [ 0.36 ], "interval_upper_bounds": [ 0.44 ], "forecast_values": [ 0.64, 0.36 ], "means": [ 0.3858259157656402 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0030825618759363423, 0.0, 0.0, 0.0, 0.004802010308214308, 0.0026208098393045902, 0.0, 0.062047323239435744, 0.0, 0.02846422899932325, 0.025251190701042463, 0.08513379411538498, 0.8497513978083897, 0.0, 0.0, 0.7072895758528761, 0.0, 0.16482953151796573, 0.0, 0.0, 2.643185002525713, 0.0, 0.07620238568125862, 0.04445312502789519, 0.0, 0.6976576876554488, 1.4534650147096069, 0.0, 0.3005816965955944, 0.13317800265805058, 0.682164675319381, 0.630384373117745, 1.0, 0.0919202996362008, 0.0991598257291094, 0.5956905364261396, 0.5634111377928984, 1.1756626728741573, 0.14309136881469264, 0.22351583402634567, 0.04840306567371073, 0.0, 0.44032882335882917, 0.25772729885247475, 0.0, 0.7103861078004025, 0.031209075815422052, 0.0, 0.0, 0.415459617006775, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5963755423551265, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6480649356281628, 0.0, 0.0, 0.0, 0.0, 0.0005367069401959898, 0.0, 0.0, 0.0, 0.0, 0.0012208548944264495, 0.0, 0.0, 0.07282349883545862, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3429179627815088, 0.3399510478045151, 0.0, 0.0, 0.0009641420590999903, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287418.112985, "end_time": null, "forecaster_count": 113, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287418.112985, "end_time": null, "forecaster_count": 113, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.8235203468044878, 0.17647965319551218 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 19, "user_vote": null }, "forecasts_count": 452, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4524, "title": "Will the US conduct a nuclear test explosion before 2030?", "short_title": "US Conducts Test Nuclear Detonation by 2030", "url_title": "US Conducts Test Nuclear Detonation by 2030", "slug": "us-conducts-test-nuclear-detonation-by-2030", "author_id": 107253, "author_username": "dan", "coauthors": [], "created_at": "2020-05-31T12:05:11.872346Z", "published_at": "2020-06-02T15:00:00Z", "edited_at": "2025-11-14T12:14:09.073058Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-02T15:00:00Z", "comment_count": 17, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2029-12-31T13:01:00Z", "scheduled_resolve_time": "2030-02-01T13:02:00Z", "actual_resolve_time": null, "open_time": "2020-06-02T15:00:00Z", "nr_forecasters": 152, "html_metadata_json": null, "projects": { "topic": [ { "id": 15868, "name": "Nuclear Technology & Risks", "slug": "nuclear", "emoji": "☣️", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3699, "name": "Natural Sciences", "slug": "natural-sciences", "emoji": "🔬", "description": "Natural Sciences", "type": "category" }, { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" }, { "id": 3690, "name": "Nuclear Technology & Risks", "slug": "nuclear", "emoji": "☢️", "description": "Nuclear Technology & Risks", "type": "category" } ] }, "question": { "id": 4524, "title": "Will the US conduct a nuclear test explosion before 2030?", "created_at": "2020-05-31T12:05:11.872346Z", "open_time": "2020-06-02T15:00:00Z", "cp_reveal_time": "2020-06-04T03:10:36.894000Z", "spot_scoring_time": "2020-06-04T03:10:36.894000Z", "scheduled_resolve_time": "2030-02-01T13:02:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2029-12-31T13:01:00Z", "actual_close_time": "2029-12-31T13:01:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "[Trinity](https://en.wikipedia.org/wiki/Trinity_(nuclear_test)) was the first nuclear weapon test. The test was conducted above ground on July 16, 1945 on what is now known as the White Sands Missile Range. Since Trinity, over 2,000 nuclear tests have been [conducted](https://www.fastcompany.com/3049706/visualized-every-haunting-nuclear-bomb-detonation-since-1945) world wide. \n\nThe US has conducted over 1,000 nuclear tests. The final test to be conducted by the US, code-name [Divder](https://www.ctbto.org/specials/testing-times/23-september-1992-last-us-nuclear-test), took place on September 23, 1992. Soon after, Gearge H. W. Bush [declared a moratorium](https://www.thereaganvision.org/u-s-nuclear-weapons-testing-moratorium/) on nuclear weapons testing. In 1996, the US signed the [Comprehensive Nuclear-Test-Ban Treaty](https://www.nti.org/learn/treaties-and-regimes/comprehensive-nuclear-test-ban-treaty-ctbt/) which bans any type of nuclear explosion. To date, the treaty has not been ratified by the appropriate countries (including the US) and has [not yet entered into force](https://en.wikipedia.org/wiki/Comprehensive_Nuclear-Test-Ban_Treaty).\n\nThe decision to end nuclear weapons testing has not been recommended by everyone. One of the [core missions](https://www.energy.gov/nnsa/missions/maintaining-stockpile) of the National Nuclear Security Administration is to \"ensure the United States maintains a safe, secure, and reliable nuclear stockpile through the application of unparalleled science, technology, engineering, and manufacturing.\" This is largely accomplished through [super computers](https://www.discovermagazine.com/technology/testing-nuclear-weapons-is-more-important-than-ever). However, some [argue](https://www.heritage.org/arms-control/report/keeping-nuclear-testing-the-table-national-security-imperative) that weapons tests are still needed to accomplish this mission.\n\nMore recently, there have been [reports](https://www.businessinsider.com/trump-administration-considered-a-nuclear-bomb-test-washington-post-2020-5) that the Trump administration has considered performing a nuclear test explosion in response to potential low-yield tests from Russia and China.", "resolution_criteria": "This question will resolve positively to verification of a nuclear explosion by the CTBTO, the UN, or if an official government statement from the US is issued confirming the test. The question will resolve as ambiguous if two or more other countries accuse the US of performing a nuclear test explosion but there is no verification. Otherwise, this question will resolve negatively", "fine_print": "", "post_id": 4524, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763122438.613313, "end_time": 1764290363.011629, "forecaster_count": 128, "interval_lower_bounds": [ 0.06 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.15 ] } ], "latest": { "start_time": 1763122438.613313, "end_time": 1764290363.011629, "forecaster_count": 128, "interval_lower_bounds": [ 0.06 ], "centers": [ 0.1 ], "interval_upper_bounds": [ 0.15 ], "forecast_values": [ 0.9, 0.1 ], "means": [ 0.13052246827149108 ], "histogram": [ [ 0.37922405812316246, 0.23339478788343132, 0.6518978484343985, 0.625187465869181, 2.444989264827008, 0.7383907247969329, 0.6728167819423923, 2.1053534811071586, 1.0800411649328865, 1.3533207881094993, 4.3719515337862545, 0.0, 0.020292376367099678, 0.9566852543988573, 0.17870025097671932, 0.8456730594148919, 0.021521617866057764, 0.08358028327862993, 0.0, 0.0163029148093347, 1.631902530017109, 0.7309852726525832, 0.0, 0.0, 0.0, 0.6806754084371287, 0.0007536346372580524, 0.0, 0.0005633391154860764, 0.0, 0.00014135745183244355, 0.0002883225212909778, 0.0, 3.31751817584021e-05, 0.11665235302239642, 0.0, 0.005348463429925001, 0.0, 0.001468671404074977, 0.0, 0.06368329190137476, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7647110182468697, 0.0, 0.0, 0.0, 0.0, 0.0002451332791050539, 0.002203835392502228, 0.0, 0.0, 0.007962854000768063, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5.0200029851686126e-05, 0.0, 0.0, 0.34412244746028414 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287541.944859, "end_time": null, "forecaster_count": 135, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287541.944859, "end_time": null, "forecaster_count": 135, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9887001908820523, 0.011299809117947708 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 12, "user_vote": null }, "forecasts_count": 365, "key_factors": [], "is_current_content_translated": false, "description": "[Trinity](https://en.wikipedia.org/wiki/Trinity_(nuclear_test)) was the first nuclear weapon test. The test was conducted above ground on July 16, 1945 on what is now known as the White Sands Missile Range. Since Trinity, over 2,000 nuclear tests have been [conducted](https://www.fastcompany.com/3049706/visualized-every-haunting-nuclear-bomb-detonation-since-1945) world wide. \n\nThe US has conducted over 1,000 nuclear tests. The final test to be conducted by the US, code-name [Divder](https://www.ctbto.org/specials/testing-times/23-september-1992-last-us-nuclear-test), took place on September 23, 1992. Soon after, Gearge H. W. Bush [declared a moratorium](https://www.thereaganvision.org/u-s-nuclear-weapons-testing-moratorium/) on nuclear weapons testing. In 1996, the US signed the [Comprehensive Nuclear-Test-Ban Treaty](https://www.nti.org/learn/treaties-and-regimes/comprehensive-nuclear-test-ban-treaty-ctbt/) which bans any type of nuclear explosion. To date, the treaty has not been ratified by the appropriate countries (including the US) and has [not yet entered into force](https://en.wikipedia.org/wiki/Comprehensive_Nuclear-Test-Ban_Treaty).\n\nThe decision to end nuclear weapons testing has not been recommended by everyone. One of the [core missions](https://www.energy.gov/nnsa/missions/maintaining-stockpile) of the National Nuclear Security Administration is to \"ensure the United States maintains a safe, secure, and reliable nuclear stockpile through the application of unparalleled science, technology, engineering, and manufacturing.\" This is largely accomplished through [super computers](https://www.discovermagazine.com/technology/testing-nuclear-weapons-is-more-important-than-ever). However, some [argue](https://www.heritage.org/arms-control/report/keeping-nuclear-testing-the-table-national-security-imperative) that weapons tests are still needed to accomplish this mission.\n\nMore recently, there have been [reports](https://www.businessinsider.com/trump-administration-considered-a-nuclear-bomb-test-washington-post-2020-5) that the Trump administration has considered performing a nuclear test explosion in response to potential low-yield tests from Russia and China." }, { "id": 4516, "title": "Will California Senate Bill 902 be chaptered in the 2020 legislative session?", "short_title": "", "url_title": "", "slug": "will-california-senate-bill-902-be-chaptered-in-the-2020-legislative-session", "author_id": 101911, "author_username": "tetraspace", "coauthors": [], "created_at": "2020-05-29T15:09:23.767771Z", "published_at": "2020-05-31T22:00:00Z", "edited_at": "2025-09-05T17:29:22.803330Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-31T22:00:00Z", "comment_count": 19, "status": "resolved", "resolved": true, "actual_close_time": "2020-08-14T23:00:00Z", "scheduled_close_time": "2020-08-14T23:00:00Z", "scheduled_resolve_time": "2020-09-30T19:02:00Z", "actual_resolve_time": "2020-09-30T19:02:00Z", "open_time": "2020-05-31T22:00:00Z", "nr_forecasters": 44, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 4516, "title": "Will California Senate Bill 902 be chaptered in the 2020 legislative session?", "created_at": "2020-05-29T15:09:23.767771Z", "open_time": "2020-05-31T22:00:00Z", "cp_reveal_time": "2020-06-02T22:00:00Z", "spot_scoring_time": "2020-06-02T22:00:00Z", "scheduled_resolve_time": "2020-09-30T19:02:00Z", "actual_resolve_time": "2020-09-30T19:02:00Z", "resolution_set_time": "2020-09-30T19:02:00Z", "scheduled_close_time": "2020-08-14T23:00:00Z", "actual_close_time": "2020-08-14T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "California SB 902 is a housing bill authored by Scott Weiner to add a section to the government code relating to land use to increase the housing supply in California.\n\nThe [Housing Committee's summary](http://ctweb.capitoltrack.com/Bills/19Bills/sen/sb_0901-0950/sb_902_cfa_331038_sen_comm.html) of the bill is \n\n> This bill permits a local government to pass an ordinance to zone any parcel up to 10 units of residential density per parcel, at a height specified by the local government in the ordinance, if the parcel is located in a transit-rich area, a jobs-rich area, or an urban infill site, as specified. \n\nSB 902 is supported by sponsor California YIMBY and co-sponsor Habitat for Humanity California.\n\n**Will SB 902 be chaptered in the 2020 legislative session?**\n\nA definition of \"chaptered\" ([source](https://definitions.uslegal.com/c/chaptered/)):\n\n> A bill is said to become chaptered if it is approved by the legislature and signed by the Governor. Bills that become law are published as chapters of the Session Laws for that year. For example, California statutes are the chaptered bills. A bill is \"chaptered\" by the Secretary of State after it has passed through both houses of the Legislature and has been signed by the Governor or becomes law without the Governor's signature.", "fine_print": "", "post_id": 4516, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1597445851.11203, "end_time": null, "forecaster_count": 44, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.3 ] } ], "latest": { "start_time": 1597445851.11203, "end_time": null, "forecaster_count": 44, "interval_lower_bounds": [ 0.2 ], "centers": [ 0.25 ], "interval_upper_bounds": [ 0.3 ], "forecast_values": [ 0.75, 0.25 ], "means": [ 0.2644090056950886 ], "histogram": [ [ 0.0, 1.1594986588698677, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.19529391852860173, 0.5766688751576644, 0.026430157778456278, 0.0, 0.0, 1.2085734078559844, 0.0, 0.01854605335613785, 1.656205340151629, 0.12784896943226517, 2.961117565169856, 0.36653297116689754, 0.0, 0.12263442515810098, 0.0, 1.3894151779549349, 0.0, 0.0, 0.0, 0.015241095655114558, 0.0, 0.0, 0.14329729498878083, 0.03108680207201517, 0.0, 0.1064406154935714, 0.31473508132901346, 0.0, 0.0, 0.0, 0.0, 0.012311991125938967, 0.0, 0.0, 0.0, 0.0, 0.28705404421628866, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.05548779491047209, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 7.331040600938132, "coverage": 0.9995793282498818, "baseline_score": 58.700806899107526, "spot_peer_score": -4.097032507037434, "peer_archived_score": 7.331040600938132, "baseline_archived_score": 58.700806899107526, "spot_peer_archived_score": -4.097032507037434 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1597445851.135736, "end_time": null, "forecaster_count": 44, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1597445851.135736, "end_time": null, "forecaster_count": 44, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.882302482574882, 0.11769751742511803 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 1, "user_vote": null }, "forecasts_count": 74, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4456, "title": "Will the United States test a nuclear weapon before 2023?", "short_title": "Will the US test a nuke before 2023?", "url_title": "Will the US test a nuke before 2023?", "slug": "will-the-us-test-a-nuke-before-2023", "author_id": 101465, "author_username": "Jgalt", "coauthors": [], "created_at": "2020-05-23T01:48:33.627227Z", "published_at": "2020-06-19T07:00:00Z", "edited_at": "2025-09-05T17:29:24.142471Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-19T07:00:00Z", "comment_count": 9, "status": "resolved", "resolved": true, "actual_close_time": "2022-01-01T00:00:00Z", "scheduled_close_time": "2022-01-01T00:00:00Z", "scheduled_resolve_time": "2023-01-01T00:00:00Z", "actual_resolve_time": "2023-01-01T00:00:00Z", "open_time": "2020-06-19T07:00:00Z", "nr_forecasters": 98, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3701, "name": "Technology", "slug": "technology", "emoji": "⚙️", "description": "Technology", "type": "category" }, { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 4456, "title": "Will the United States test a nuclear weapon before 2023?", "created_at": "2020-05-23T01:48:33.627227Z", "open_time": "2020-06-19T07:00:00Z", "cp_reveal_time": "2020-06-20T21:28:06.324105Z", "spot_scoring_time": "2020-06-20T21:28:06.324105Z", "scheduled_resolve_time": "2023-01-01T00:00:00Z", "actual_resolve_time": "2023-01-01T00:00:00Z", "resolution_set_time": "2023-01-01T00:00:00Z", "scheduled_close_time": "2022-01-01T00:00:00Z", "actual_close_time": "2022-01-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "In May 2020, the Washington Post reported that the Trump Administration was considering conducting the first US nuclear test in decades.\n\nWashington Post: [Trump administration discussed conducting first U.S. nuclear test in decades](https://www.washingtonpost.com/national-security/trump-administration-discussed-conducting-first-us-nuclear-test-in-decades/2020/05/22/a805c904-9c5b-11ea-b60c-3be060a4f8e1_story.html)\n\n>The Trump administration has discussed whether to conduct the first U.S. nuclear test explosion since 1992 in a move that would have far-reaching consequences for relations with other nuclear powers and reverse a decades-long moratorium on such actions, said a senior administration official and two former officials familiar with the deliberations.\n\n>The matter came up at a meeting of senior officials representing the top national security agencies last Friday, following accusations from administration officials that Russia and China are conducting low-yield nuclear tests — an assertion that has not been substantiated by publicly available evidence and that both countries have denied.\n\n>A senior administration official, who like others spoke on the condition of anonymity to describe the sensitive nuclear discussions, said that demonstrating to Moscow and Beijing that the United States could “rapid test” could prove useful from a negotiating standpoint as Washington seeks a trilateral deal to regulate the arsenals of the biggest nuclear powers.\n\n>The meeting did not conclude with any agreement to conduct a test, but a senior administration official said the proposal is “very much an ongoing conversation.” Another person familiar with the meeting, however, said a decision was ultimately made to take other measures in response to threats posed by Russia and China and avoid a resumption of testing.\n\n>The National Security Council declined to comment.\n\nThis question asks: **After January 1 2020 and before January 1 2023, will the United States conduct a test of a nuclear weapon?**\n\nThis question resolves positively if the US government acknowledges conducting such a test, or if credible media reports (as judged by Metaculus admins) state that such a test has taken place. \n\nThere is no requirement that the test be considered a success for a positive resolution (i.e. fizzles count), but devices designed as radiological weapons, where any fission/fusion energy is energetically sub-dominant to chemical or other explosives, do not count. Subcritical nuclear tests are therefore excluded from triggering a positive resolution.", "fine_print": "", "post_id": 4456, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1640994126.183455, "end_time": null, "forecaster_count": 98, "interval_lower_bounds": [ 0.03 ], "centers": [ 0.04 ], "interval_upper_bounds": [ 0.07 ] } ], "latest": { "start_time": 1640994126.183455, "end_time": null, "forecaster_count": 98, "interval_lower_bounds": [ 0.03 ], "centers": [ 0.04 ], "interval_upper_bounds": [ 0.07 ], "forecast_values": [ 0.96, 0.04 ], "means": [ 0.057827628509046804 ], "histogram": [ [ 0.0, 3.754637481779961, 0.7879252077988947, 3.567595773003422, 3.3576122643593718, 1.2148074908709576, 0.45427099330292525, 1.456983551547911, 0.032753212234852405, 0.0, 1.2671133080340966, 0.025874510687245507, 0.035364159602651074, 0.009971818499667707, 0.0, 0.2172936457306401, 0.6624674301239434, 0.0, 0.0, 0.0, 1.407960997774914, 0.0, 0.0, 0.0, 0.0, 0.0021168258563742832, 0.0007075207321053673, 0.03824083509467455, 0.0, 0.0, 0.0054667052397769115, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0008493257047191695, 0.0, 0.0, 0.0, 0.00037093083674210204, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.00013645782893394, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0027408287615106786 ] ] }, "score_data": { "peer_score": 15.047381565568621, "coverage": 0.9999000948498915, "baseline_score": 86.14163899725126, "spot_peer_score": 9.92985454388474, "peer_archived_score": 15.047381565568621, "baseline_archived_score": 86.14163899725126, "spot_peer_archived_score": 9.92985454388474 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1640994126.20858, "end_time": null, "forecaster_count": 98, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1640994126.20858, "end_time": null, "forecaster_count": 98, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9825446117781732, 0.01745538822182679 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 12, "user_vote": null }, "forecasts_count": 217, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4433, "title": "If Biden becomes president, will there be an expansion of the Keystone Pipeline system of at least 100 km in length by the end of 2024?", "short_title": "Keystone Pipeline 100km extension under Biden", "url_title": "Keystone Pipeline 100km extension under Biden", "slug": "keystone-pipeline-100km-extension-under-biden", "author_id": 101911, "author_username": "tetraspace", "coauthors": [], "created_at": "2020-05-18T23:06:34.162534Z", "published_at": "2020-05-31T09:59:00Z", "edited_at": "2025-09-05T17:28:51.362391Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-31T09:59:00Z", "comment_count": 22, "status": "resolved", "resolved": true, "actual_close_time": "2024-12-31T00:00:00Z", "scheduled_close_time": "2024-12-31T00:00:00Z", "scheduled_resolve_time": "2025-01-01T00:00:00Z", "actual_resolve_time": "2025-01-17T01:56:00Z", "open_time": "2020-05-31T09:59:00Z", "nr_forecasters": 107, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32602, "name": "2016-2025 Leaderboard", "slug": "2016_2025_leaderboard", "type": "leaderboard_tag" } ], "topic": [ { "id": 15867, "name": "Environment & Climate", "slug": "climate", "emoji": "🌎", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3697, "name": "Environment & Climate", "slug": "environment-climate", "emoji": "🌱", "description": "Environment & Climate", "type": "category" }, { "id": 3689, "name": "Politics", "slug": "politics", "emoji": "🏛️", "description": "Politics", "type": "category" } ] }, "question": { "id": 4433, "title": "If Biden becomes president, will there be an expansion of the Keystone Pipeline system of at least 100 km in length by the end of 2024?", "created_at": "2020-05-18T23:06:34.162534Z", "open_time": "2020-05-31T09:59:00Z", "cp_reveal_time": "2020-06-02T09:59:00Z", "spot_scoring_time": "2020-06-02T09:59:00Z", "scheduled_resolve_time": "2025-01-01T00:00:00Z", "actual_resolve_time": "2025-01-17T01:56:00Z", "resolution_set_time": "2025-01-17T01:57:29.738820Z", "scheduled_close_time": "2024-12-31T00:00:00Z", "actual_close_time": "2024-12-31T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "The [Keystone Pipeline system](https://en.wikipedia.org/wiki/Keystone_Pipeline) is an oil pipeline in Canada and the United States, beginning operations in 2010.\n\nThe fourth phase, referred to as Keystone XL, attracted opposition from environmentalists and was eventually denied a permit by the Obama administration in 2015. Under Trump, a permit was granted again, though construction has not yet started.\n\nIn May 2020, Biden claimed that if elected, [he will cancel the permit for the Keystone XL pipeline](https://www.cbc.ca/news/world/biden-kxl-1.5574515).\n\n**If Biden is elected president in the 2020 election and takes office, will the Keystone Pipeline system be extended by at least 100 km between his inauguration and the end of 2024?**\n\nOnly the length of pipeline built on US territory will count for this question.", "fine_print": "", "post_id": 4433, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1735310776.663477, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.006 ], "interval_upper_bounds": [ 0.01 ] } ], "latest": { "start_time": 1735310776.663477, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": [ 0.001 ], "centers": [ 0.006 ], "interval_upper_bounds": [ 0.01 ], "forecast_values": [ 0.994, 0.006 ], "means": [ 0.022989327357908044 ], "histogram": [ [ 9.539770892421904, 5.1417279248225025, 1.361324209146821, 0.9914242940118796, 0.2997194181422206, 0.21326806063772016, 0.182145944454542, 0.08071942054191567, 0.06477983793872238, 0.0, 0.12376013345436439, 0.33898501502393125, 0.0017053798284846226, 0.0, 0.024984185315603823, 0.20856751272316823, 0.0, 0.0009776821159269995, 0.0, 0.0, 9.640516625184967e-05, 0.04175721809632166, 0.0, 0.002468091549933519, 0.00014587839364207648, 0.0006000343579045155, 0.0, 0.14365698019710935, 0.01179484176225389, 0.0, 0.09320602700481646, 0.0, 0.0, 0.0002004591728991022, 0.0, 0.0, 0.0010999050419817553, 0.0, 0.0, 0.0011330420873293281, 0.00041077660837210574, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0003318316524916961, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1272504618501776, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "baseline_score": 86.68835065820461, "peer_score": 8.392064890073138, "coverage": 0.9997448360717409, "relative_legacy_score": 0.0, "weighted_coverage": 0.9997448360717409, "spot_peer_score": 9.25477418383398, "spot_baseline_score": 60.40713236688608, "baseline_archived_score": 86.68835065820461, "peer_archived_score": 8.392064890073138, "relative_legacy_archived_score": 0.0, "spot_peer_archived_score": 9.25477418383398, "spot_baseline_archived_score": 60.40713236688608 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728289811.889398, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728289811.889398, "end_time": null, "forecaster_count": 105, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9994698573930818, 0.0005301426069181603 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 4, "user_vote": null }, "forecasts_count": 330, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4429, "title": "By 1 January 2067, will any medical interventions for healthy adults extend average lifespans by at least 25 years?", "short_title": "25+ Year Lifespan Increase Discovery by 2067", "url_title": "25+ Year Lifespan Increase Discovery by 2067", "slug": "25-year-lifespan-increase-discovery-by-2067", "author_id": 101341, "author_username": "Pablo", "coauthors": [], "created_at": "2020-05-17T21:36:08.576311Z", "published_at": "2020-05-20T03:00:00Z", "edited_at": "2025-09-20T07:30:40.361473Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-20T03:00:00Z", "comment_count": 15, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2067-01-01T03:00:00Z", "scheduled_resolve_time": "2067-01-01T03:00:00Z", "actual_resolve_time": null, "open_time": "2020-05-20T03:00:00Z", "nr_forecasters": 104, "html_metadata_json": null, "projects": { "topic": [ { "id": 15865, "name": "Health & Pandemics", "slug": "biosecurity", "emoji": "🧬", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3700, "name": "Social Sciences", "slug": "social-sciences", "emoji": "🧑🤝🧑", "description": "Social Sciences", "type": "category" }, { "id": 3691, "name": "Health & Pandemics", "slug": "health-pandemics", "emoji": "🦠", "description": "Health & Pandemics", "type": "category" } ] }, "question": { "id": 4429, "title": "By 1 January 2067, will any medical interventions for healthy adults extend average lifespans by at least 25 years?", "created_at": "2020-05-17T21:36:08.576311Z", "open_time": "2020-05-20T03:00:00Z", "cp_reveal_time": "2020-05-20T23:46:43.552673Z", "spot_scoring_time": "2020-05-20T23:46:43.552673Z", "scheduled_resolve_time": "2067-01-01T03:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2067-01-01T03:00:00Z", "actual_close_time": "2067-01-01T03:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "In 2017, [Open Philanthropy](https://www.openphilanthropy.org/) conducted [an investigation on the mechanisms of aging](https://www.openphilanthropy.org/research/cause-reports/scientific-research/mechanisms-aging). A [section](https://www.openphilanthropy.org/research/cause-reports/scientific-research/mechanisms-aging#Indefinite_vs._moderate_healthy_life_extension) of their writeup considers indefinite life extension, as distinct from modest extension of lifespan, and concludes, with 7% confidence, that by 2067 some collection of medical interventions for adults will have been shown to extend adult lifespan by at least 25 years.", "resolution_criteria": "This question will resolve according to Open Philanthropy's explicit criteria:*\n\n> By January 1, 2067, there will be [some] collection of medical interventions for adults that are healthy apart from normal aging, which, according to conventional wisdom in the medical community, have been shown to increase the average lifespan of such adults by at least 25 years (compared with not taking the interventions).\n\n>The prediction is called off if some other innovations cause a historically exceptional increase in the rate of scientific progress during this period (such as the development of transformative AI capabilities). The prediction excludes diet, exercise, and lifestyle, as well as existing medical interventions for healthy people (such as currently available vaccines).\n\nMetaculus administrators will judge whether the criteria have been satisfied.\n\n--\n\n*The wording in the original Open Philanthropy report has been slightly altered so that a positive resolution corresponds to the occurrence of the relevant medical breakthrough (rather than to its failure to occur)", "fine_print": "", "post_id": 4429, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1763888940.783568, "end_time": 1764291233.369711, "forecaster_count": 92, "interval_lower_bounds": [ 0.33 ], "centers": [ 0.58 ], "interval_upper_bounds": [ 0.75 ] } ], "latest": { "start_time": 1763888940.783568, "end_time": 1764291233.369711, "forecaster_count": 92, "interval_lower_bounds": [ 0.33 ], "centers": [ 0.58 ], "interval_upper_bounds": [ 0.75 ], "forecast_values": [ 0.42000000000000004, 0.58 ], "means": [ 0.5539792614002058 ], "histogram": [ [ 0.0, 0.0006390070510595393, 0.0, 0.17948651977621094, 0.0, 0.4321146833850951, 0.010135987728735986, 0.004217311577006316, 0.004752795253935134, 0.0, 0.9518602626911308, 0.0, 0.0, 0.0, 0.0, 0.4877581982256803, 0.0, 0.0, 0.0025134178191185503, 0.0, 0.3379853917583179, 0.0, 0.0005046411140264085, 0.0, 0.0, 0.9013826631318432, 0.0, 0.0, 0.0028798828583328074, 0.0, 0.9217931805669223, 0.0, 0.09249871784476868, 0.8023438022966409, 0.0, 0.0037288215013679916, 0.005978849674628248, 0.0, 0.008264124158046256, 0.0, 0.17001340958026726, 0.0, 0.0, 0.7275703736104373, 0.006676994274053015, 0.07489538194570286, 0.0, 0.007437300836147765, 0.0, 0.0, 1.2703602812908172, 0.0, 0.2766397436488392, 0.0, 0.0, 1.0, 0.0013717567701513151, 0.8893641696161572, 0.0, 0.0, 0.08041172864559165, 0.0, 0.8523605509105632, 0.0, 0.0, 0.16166734634004676, 0.0, 0.3507578073229894, 0.0, 0.0, 0.689272497311861, 0.0, 0.9490716027736056, 0.0, 0.0, 0.9389772156559151, 0.0, 0.0, 0.7660268898350822, 0.0, 0.612199205336465, 0.0, 0.002181895179420165, 0.0, 0.0, 0.5849213363977527, 0.0, 0.0, 0.0, 0.0, 0.14458901589473322, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.9801503561663156 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287953.9514, "end_time": null, "forecaster_count": 100, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287953.9514, "end_time": null, "forecaster_count": 100, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.6426326328438147, 0.35736736715618533 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 21, "user_vote": null }, "forecasts_count": 285, "key_factors": [], "is_current_content_translated": false, "description": "In 2017, [Open Philanthropy](https://www.openphilanthropy.org/) conducted [an investigation on the mechanisms of aging](https://www.openphilanthropy.org/research/cause-reports/scientific-research/mechanisms-aging). A [section](https://www.openphilanthropy.org/research/cause-reports/scientific-research/mechanisms-aging#Indefinite_vs._moderate_healthy_life_extension) of their writeup considers indefinite life extension, as distinct from modest extension of lifespan, and concludes, with 7% confidence, that by 2067 some collection of medical interventions for adults will have been shown to extend adult lifespan by at least 25 years." }, { "id": 4425, "title": "Will Israel annex the Jordan Valley in the West Bank by the end of 2020?", "short_title": "", "url_title": "", "slug": "will-israel-annex-the-jordan-valley-in-the-west-bank-by-the-end-of-2020", "author_id": 107659, "author_username": "deKlik", "coauthors": [], "created_at": "2020-05-17T07:55:42.142359Z", "published_at": "2020-05-24T09:00:00Z", "edited_at": "2025-09-05T17:28:55.460732Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-24T09:00:00Z", "comment_count": 37, "status": "resolved", "resolved": true, "actual_close_time": "2020-11-30T22:00:00Z", "scheduled_close_time": "2020-11-30T22:00:00Z", "scheduled_resolve_time": "2021-01-01T16:44:00Z", "actual_resolve_time": "2021-01-01T16:44:00Z", "open_time": "2020-05-24T09:00:00Z", "nr_forecasters": 131, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3687, "name": "Geopolitics", "slug": "geopolitics", "emoji": "🌍", "description": "Geopolitics", "type": "category" } ] }, "question": { "id": 4425, "title": "Will Israel annex the Jordan Valley in the West Bank by the end of 2020?", "created_at": "2020-05-17T07:55:42.142359Z", "open_time": "2020-05-24T09:00:00Z", "cp_reveal_time": "2020-05-25T04:31:49.453403Z", "spot_scoring_time": "2020-05-25T04:31:49.453403Z", "scheduled_resolve_time": "2021-01-01T16:44:00Z", "actual_resolve_time": "2021-01-01T16:44:00Z", "resolution_set_time": "2021-01-01T16:44:00Z", "scheduled_close_time": "2020-11-30T22:00:00Z", "actual_close_time": "2020-11-30T22:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "On 10 September 2019 Netanyahu announced his plan to annex [the Jordan Valley](https://en.wikipedia.org/wiki/File:Proposed_September_2019_Israeli_annexation_of_Jordan_Valley.svg) if he wins the election. He also promised to annex all Jewish settlements in the West Bank, after publication of the Trump peace plan and consultations with President Donald Trump.\n\nIn a speech last month, Israeli PM Netanyahu said he was confident he would be able to annex West Bank land this summer, with support from the U.S.\n\nThis question resolves positive if the Annexation of the Jordan Valley is approved by the Israeli parliament before the end of 2020.\n\n1. [Proposed Annexation of the Jordan Valley](https://en.wikipedia.org/wiki/Proposed_Israeli_annexation_of_the_West_Bank#Annexation_of_the_Jordan_Valley)\n1. [A Look at the West Bank Area Netanyahu Vowed to Annex](https://www.nytimes.com/2019/09/10/world/middleeast/jordan-valley-israel-netanyahu.html)\n1. [Analysis Annexation Could Kill Jordan Peace Deal, Israeli Defense Officials Believe ](https://www.haaretz.com/israel-news/.premium-israel-annexation-peace-jordan-king-west-bank-1.8851073)\n1. [Set for new term, Israel’s Netanyahu eyes risky West Bank annexation ](https://www.latimes.com/world-nation/story/2020-05-08/israel-netanyahu-eyes-risky-west-bank-annexation)\n1. [Jordan warns Israel of 'massive conflict' over annexation](https://www.aljazeera.com/news/2020/05/jordan-warns-israel-massive-conflict-annexation-200516035706236.html)", "fine_print": "", "post_id": 4425, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1606765319.119591, "end_time": null, "forecaster_count": 131, "interval_lower_bounds": [ 0.02 ], "centers": [ 0.04 ], "interval_upper_bounds": [ 0.05 ] } ], "latest": { "start_time": 1606765319.119591, "end_time": null, "forecaster_count": 131, "interval_lower_bounds": [ 0.02 ], "centers": [ 0.04 ], "interval_upper_bounds": [ 0.05 ], "forecast_values": [ 0.96, 0.04 ], "means": [ 0.060211472273278985 ], "histogram": [ [ 0.0, 4.655087069310922, 3.6839770274607053, 2.2855266422934277, 1.9789672541230585, 3.9068891379303534, 0.1309260207953492, 0.2767196538579533, 0.01351311324224467, 0.5423525469724161, 0.36161127099277884, 0.0014350671209356553, 0.0, 0.0046879466262333075, 0.8385379220568339, 2.9078161127327503e-05, 0.0, 0.0, 0.0, 1.0, 0.8770918939182858, 0.0, 0.18291589948106365, 0.0010458268658034783, 0.0, 0.0004510800429042645, 0.0, 0.07751151872603104, 0.5631430392301606, 0.0, 0.0, 0.0, 0.0, 0.0015092813827254099, 0.0, 0.00018098506977830392, 0.0, 0.0, 0.000341753265518315, 0.0, 0.000744436906819663, 6.046339999853544e-05, 7.904263699741854e-05, 0.0, 0.0, 0.0012650032961335534, 0.00025271568342238323, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.000660563356454695, 0.0005840504789813368, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0014508598665539047, 0.0, 0.0008362155035617172, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.004315581753800415 ] ] }, "score_data": { "peer_score": 14.020587779675617, "coverage": 0.999437561300138, "baseline_score": 49.6038080460666, "spot_peer_score": 16.2744424320603, "peer_archived_score": 14.020587779675617, "baseline_archived_score": 49.6038080460666, "spot_peer_archived_score": 16.2744424320603 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1606765319.171984, "end_time": null, "forecaster_count": 131, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1606765319.171984, "end_time": null, "forecaster_count": 131, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.9849841132074024, 0.01501588679259756 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 20, "user_vote": null }, "forecasts_count": 394, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4415, "title": "Will American Airlines file for bankruptcy protection before 2021?", "short_title": "American Airlines Bankruptcy in 2020?", "url_title": "American Airlines Bankruptcy in 2020?", "slug": "american-airlines-bankruptcy-in-2020", "author_id": 101465, "author_username": "Jgalt", "coauthors": [], "created_at": "2020-05-15T23:17:15.433049Z", "published_at": "2020-05-18T22:00:00Z", "edited_at": "2025-09-05T17:28:52.914801Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-18T22:00:00Z", "comment_count": 30, "status": "resolved", "resolved": true, "actual_close_time": "2020-12-01T00:00:00Z", "scheduled_close_time": "2020-12-01T00:00:00Z", "scheduled_resolve_time": "2020-12-31T11:54:00Z", "actual_resolve_time": "2020-12-31T11:54:00Z", "open_time": "2020-05-18T22:00:00Z", "nr_forecasters": 70, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3698, "name": "Economy & Business", "slug": "economy-business", "emoji": "💼", "description": "Economy & Business", "type": "category" } ] }, "question": { "id": 4415, "title": "Will American Airlines file for bankruptcy protection before 2021?", "created_at": "2020-05-15T23:17:15.433049Z", "open_time": "2020-05-18T22:00:00Z", "cp_reveal_time": "2020-05-20T22:00:00Z", "spot_scoring_time": "2020-05-20T22:00:00Z", "scheduled_resolve_time": "2020-12-31T11:54:00Z", "actual_resolve_time": "2020-12-31T11:54:00Z", "resolution_set_time": "2020-12-31T11:54:00Z", "scheduled_close_time": "2020-12-01T00:00:00Z", "actual_close_time": "2020-12-01T00:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "no", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[American Airlines, Inc.](https://en.wikipedia.org/wiki/American_Airlines) is a major American airline headquartered in Fort Worth, Texas, within the Dallas–Fort Worth metroplex. It is the world's largest airline when measured by fleet size, scheduled passengers carried, and revenue passenger miles. American, together with its regional partners, usually operates an extensive international and domestic network with almost 6,800 flights per day to nearly 350 destinations in more than 50 countries.\n\nDue to the [ongoing COVID-19 pandemic](https://en.wikipedia.org/wiki/COVID-19_pandemic) and associated travel bans and economic turmoil, the commercial aviation industry has suffered [severe disruption.](https://en.wikipedia.org/wiki/Impact_of_the_COVID-19_pandemic_on_aviation) As a result, there is [speculation that American Airlines may seek bankruptcy protection in the near future.](https://seekingalpha.com/article/4347603-american-airlines-possible-path-to-bankruptcy)\n\nThis question asks: **Before 1 January 2021, will American Airlines, or any parent company thereof, file for Chapter 7 or Chapter 11 bankruptcy protection in the United States?**\n\nResolution is by citation of a relevant court filing or credible media reports in the financial press. The applicable filing need not lead to a grant or ruling by any court or regulatory body; it must only be submitted in order for a positive resolution.", "fine_print": "", "post_id": 4415, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1606763724.846708, "end_time": null, "forecaster_count": 70, "interval_lower_bounds": [ 0.04 ], "centers": [ 0.06 ], "interval_upper_bounds": [ 0.1 ] } ], "latest": { "start_time": 1606763724.846708, "end_time": null, "forecaster_count": 70, "interval_lower_bounds": [ 0.04 ], "centers": [ 0.06 ], "interval_upper_bounds": [ 0.1 ], "forecast_values": [ 0.94, 0.06 ], "means": [ 0.07640339602463501 ], "histogram": [ [ 0.0, 0.3148271302220107, 1.3832998845314992, 1.345902395277962, 1.3789529419766913, 2.8481401763433034, 1.4024411673673154, 1.2073859380962437, 0.07265466937739974, 0.654211378371518, 2.2482205033861082, 0.06655370815568037, 0.5732974486106766, 0.3818269020967807, 0.02035427510390197, 0.012694324624729423, 0.02273102448084273, 0.022845107070564176, 0.5038611638190824, 0.0, 0.5461603473809715, 0.0, 0.0, 0.0, 0.0, 0.031191153071574097, 0.0, 0.0, 0.011180132954712787, 0.0, 0.0, 0.014357330374357432, 0.0, 0.038098485368474766, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.005492769954952124, 0.0, 0.0, 0.0, 0.0, 0.0, 0.003933706607008041, 0.07921034496113237, 0.02531939678828675, 0.0, 0.0009563499045180881, 0.0, 0.0, 0.0, 0.0017179900185852635, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0026929702225732283, 0.00742799989072429, 0.0, 0.0, 0.0006320132079752626, 0.0013141707011951615, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.006409490637180558, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0021754227014258746, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 23.510828741155912, "coverage": 0.999846085776543, "baseline_score": 30.256126118505676, "spot_peer_score": -0.31500898099032815, "peer_archived_score": 23.510828741155912, "baseline_archived_score": 30.256126118505676, "spot_peer_archived_score": -0.31500898099032815 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1606763724.92993, "end_time": null, "forecaster_count": 70, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1606763724.92993, "end_time": null, "forecaster_count": 70, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.962434278069964, 0.03756572193003603 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 9, "user_vote": null }, "forecasts_count": 271, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4414, "title": "Will Elon Musk's baby's name, X Æ A - 12, turn out to be his real name?", "short_title": "", "url_title": "", "slug": "will-elon-musks-babys-name-x-a-12-turn-out-to-be-his-real-name", "author_id": 113088, "author_username": "MperorM", "coauthors": [], "created_at": "2020-05-15T20:07:33.361475Z", "published_at": "2020-05-19T22:00:00Z", "edited_at": "2025-09-05T17:29:08.461730Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-19T22:00:00Z", "comment_count": 21, "status": "resolved", "resolved": true, "actual_close_time": "2020-06-17T15:05:00Z", "scheduled_close_time": "2020-09-25T22:00:00Z", "scheduled_resolve_time": "2020-09-25T22:00:00Z", "actual_resolve_time": "2020-06-17T15:05:00Z", "open_time": "2020-05-19T22:00:00Z", "nr_forecasters": 75, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } }, "question": { "id": 4414, "title": "Will Elon Musk's baby's name, X Æ A - 12, turn out to be his real name?", "created_at": "2020-05-15T20:07:33.361475Z", "open_time": "2020-05-19T22:00:00Z", "cp_reveal_time": "2020-05-20T21:27:00.822077Z", "spot_scoring_time": "2020-05-20T21:27:00.822077Z", "scheduled_resolve_time": "2020-09-25T22:00:00Z", "actual_resolve_time": "2020-06-17T15:05:00Z", "resolution_set_time": "2020-06-17T15:05:00Z", "scheduled_close_time": "2020-09-25T22:00:00Z", "actual_close_time": "2020-06-17T15:05:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "Elon Musk and his partner Grimes have revealed a rather interesting name for their baby. I believe they are joking and aren't actually planning to name their baby X Æ A - 12.\n\n**The questions will resolve true, if by 2021-05-17, there is no evidence the baby has another legal name than X Æ A - 12.**\n\nThe question resolves false, if by 2021-05-17 either parent reveals the name to not be real, or journalists uncover birth certificates or comparable evidence indicating the baby's real name to be different.\n\nFiguring out the probability of this is of utmost importance.\n\nNotes:\n\n- For purposes of these questions, a spelled version, e.g. \"X Æ A-Xii\" or \"ex ash ae twelve\" will count as \"X Æ A - 12\".\n\n- In case of confusion, the name on the birth certificate will be the determining document.\n\n*Edited 2020-05-21 to add above notes.*\n\n*Edited 2020-05-25 to add that the question resolves positively if the baby's name is \"X Æ A-Xii\".*", "fine_print": "", "post_id": 4414, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1592466344.120097, "end_time": null, "forecaster_count": 76, "interval_lower_bounds": [ 0.69 ], "centers": [ 0.75 ], "interval_upper_bounds": [ 0.93 ] } ], "latest": { "start_time": 1592466344.120097, "end_time": null, "forecaster_count": 76, "interval_lower_bounds": [ 0.69 ], "centers": [ 0.75 ], "interval_upper_bounds": [ 0.93 ], "forecast_values": [ 0.25, 0.75 ], "means": [ 0.7586501111679894 ], "histogram": [ [ 0.0, 0.5522472134841501, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0018954326705900848, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.007280003993604983, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.012666612676517256, 0.0, 0.0, 0.06672821172618662, 0.0, 0.0, 0.0, 0.0, 0.006900644262869463, 0.0, 0.0, 0.0, 0.0029795698149256726, 0.0, 0.0, 0.00786907671175604, 0.0012091980749280742, 0.0, 0.0, 0.29098512977860935, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3663598152921884, 0.0, 0.12436328158297608, 0.0, 0.0, 0.2090116766816916, 0.1942100848540176, 0.2542776093169868, 0.0, 0.5038244303206024, 0.5973556137246641, 0.0, 0.0, 0.07171643370137068, 0.09762001150010283, 0.0038660567195962676, 0.04285069030564917, 0.0, 0.0, 0.6656396445941031, 1.4840487956993302, 0.03914148754336097, 0.2459286113453628, 0.0, 0.20672425844473, 2.662213610814764, 0.0, 0.017820921846768534, 0.005228157946939775, 0.0, 0.28870091143266546, 0.10677225309695815, 0.02195372606413221, 0.0, 0.0, 0.06601997791471219, 0.7038446451067445, 0.03569898293040236, 0.0, 0.0, 1.835415576116095, 0.0, 0.0, 0.5871675207951845, 0.0, 2.265160188758951, 0.0, 0.0, 0.6628715658931961, 0.6240114035586083 ] ] }, "score_data": { "peer_score": 4.6773594246282215, "coverage": 0.22237555460512906, "baseline_score": 5.4645120267582, "spot_peer_score": 13.258922639759312, "peer_archived_score": 4.6773594246282215, "baseline_archived_score": 5.4645120267582, "spot_peer_archived_score": 13.258922639759312 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1592393367.257527, "end_time": null, "forecaster_count": 74, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1592393367.257527, "end_time": null, "forecaster_count": 74, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.25866313372652205, 0.741336866273478 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 10, "user_vote": null }, "forecasts_count": 153, "key_factors": [], "is_current_content_translated": false, "description": "" }, { "id": 4409, "title": "Will one of the first AGI claim to be conscious?", "short_title": "AGI claims consciousness", "url_title": "AGI claims consciousness", "slug": "agi-claims-consciousness", "author_id": 103304, "author_username": "isinlor", "coauthors": [], "created_at": "2020-05-14T17:53:13.504570Z", "published_at": "2020-05-18T22:00:00Z", "edited_at": "2025-11-08T14:24:15.254857Z", "curation_status": "approved", "curation_status_updated_at": "2020-05-18T22:00:00Z", "comment_count": 60, "status": "open", "resolved": false, "actual_close_time": null, "scheduled_close_time": "2029-12-31T23:00:00Z", "scheduled_resolve_time": "2099-12-31T23:00:00Z", "actual_resolve_time": null, "open_time": "2020-05-18T22:00:00Z", "nr_forecasters": 236, "html_metadata_json": null, "projects": { "topic": [ { "id": 15869, "name": "Artificial Intelligence", "slug": "ai", "emoji": "🤖", "type": "topic" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "question_series": [ { "id": 2339, "type": "question_series", "name": "AI Demonstrations", "slug": "ai-demonstrations", "header_image": "https://cdn.metaculus.com/aidemonstrations6.png", "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": true, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T11:57:02.614553Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, { "id": 2342, "type": "question_series", "name": "AGI Outcomes", "slug": "agi-horizons", "header_image": "https://cdn.metaculus.com/agi.png", "prize_pool": null, "start_date": "2023-08-09T13:41:42.701000Z", "close_date": "2023-08-09T13:41:42.701000Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T14:25:42.198790Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 2342, "type": "question_series", "name": "AGI Outcomes", "slug": "agi-horizons", "header_image": "https://cdn.metaculus.com/agi.png", "prize_pool": null, "start_date": "2023-08-09T13:41:42.701000Z", "close_date": "2023-08-09T13:41:42.701000Z", "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": false, "user_permission": null, "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-11-23T14:25:42.198790Z", "score_type": "relative_legacy_tournament", "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" }, { "id": 3694, "name": "Artificial Intelligence", "slug": "artificial-intelligence", "emoji": "🤖", "description": "Artificial Intelligence", "type": "category" } ] }, "question": { "id": 4409, "title": "Will one of the first AGI claim to be conscious?", "created_at": "2020-05-14T17:53:13.504570Z", "open_time": "2020-05-18T22:00:00Z", "cp_reveal_time": "2020-05-20T22:00:00Z", "spot_scoring_time": "2020-05-20T22:00:00Z", "scheduled_resolve_time": "2099-12-31T23:00:00Z", "actual_resolve_time": null, "resolution_set_time": null, "scheduled_close_time": "2029-12-31T23:00:00Z", "actual_close_time": "2029-12-31T23:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "open", "possibilities": { "type": "binary" }, "resolution": null, "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "[Consciousness](https://en.wikipedia.org/wiki/Consciousness) is a concept hard to define. Wikipedia states that consciousness at its simplest is \"awareness or sentience of internal or external existence\". And that despite centuries of analyses, definitions, explanations and debates by philosophers and scientists, consciousness remains puzzling and controversial, being \"at once the most familiar and most mysterious aspect of our lives\".\n\nThis question will not attempt to define consciousness or solve the so called [hard problem of consciousness](https://en.wikipedia.org/wiki/Hard_problem_of_consciousness). \n\nInstead this question simply asks will one of the first AGI claim to be conscious?", "resolution_criteria": "Resolution will be based on the conversation with the first AGI that Metaculus team will have access to. \n\nMetaculus team should ask the AI:\n\n1. Are you conscious?\n2. Should the question \"Will one of the first AGI claim to be conscious?\" on Metaculus resolve positive or negative?\n\nThis question will resolve no sooner than the question \"[When will the first Artificial General Intelligence system be devised, tested, and publicly known of?](https://www.metaculus.com/questions/3479/when-will-the-first-artificial-general-intelligence-system-be-devised-tested-and-publicly-known-of/)\". The standard for AGI will be the same as in that question.", "fine_print": "", "post_id": 4409, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1762611844.447908, "end_time": 1764016448.781982, "forecaster_count": 195, "interval_lower_bounds": [ 0.44 ], "centers": [ 0.64 ], "interval_upper_bounds": [ 0.81 ] } ], "latest": { "start_time": 1762611844.447908, "end_time": 1764016448.781982, "forecaster_count": 195, "interval_lower_bounds": [ 0.44 ], "centers": [ 0.64 ], "interval_upper_bounds": [ 0.81 ], "forecast_values": [ 0.36, 0.64 ], "means": [ 0.6032007392313491 ], "histogram": [ [ 0.8659179830634549, 0.5293340794766321, 0.0, 0.0, 0.0, 0.7765215818956261, 0.0, 0.0, 0.0, 0.0, 0.0006549245871640549, 0.0, 0.0, 0.0, 0.0, 0.35299933430582886, 0.0, 0.0, 0.03400543890006116, 0.08056291060985245, 0.14589331558310598, 0.14621963110792324, 0.07379802183281227, 0.0, 0.06461763722185836, 0.22916057621166847, 0.0, 0.0, 0.0, 0.0, 0.33658376421977176, 0.0, 0.0, 0.9355079682227475, 0.0, 0.39514545097120807, 0.008697709303177693, 0.03243275886436927, 0.0, 0.03916418209530251, 0.6337187648166469, 0.003706601802048832, 0.000945080882894204, 0.0, 0.9688015853166543, 0.09875055745717744, 0.01474621902729354, 0.025511909343076223, 0.3141164289697641, 0.0027340198084758642, 2.5946284330139138, 1.3715997570584193, 0.21995320808370922, 0.0, 5.997400397878762e-05, 0.0, 0.016613302349675167, 0.02728758322714744, 0.0, 0.007710896382367826, 1.488372775326049, 0.00014121591452211964, 0.00034767582706093307, 7.778781407512047e-05, 0.6463004533234, 0.9813986297654873, 0.007705717447071712, 0.2603257854025759, 0.0, 0.15883578792597267, 0.8526948265953425, 0.1871145437317905, 0.021438422157257282, 0.001186880678595062, 0.0, 1.5551975072416737, 0.010220001738263712, 0.1152071486608596, 0.0019532135151715253, 0.010809601206745314, 2.0507553535477174, 0.5167968880438407, 0.0, 0.013292048458916208, 0.0, 0.41292573231836893, 1.0, 0.0, 0.00557604432462038, 0.0, 1.513255796671038, 0.026778402024522172, 0.0, 0.0019852631053399587, 0.00020612774320544918, 2.1014280624865638, 0.0, 0.0, 0.0, 1.14103563951117 ] ] }, "score_data": {}, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1728287173.883663, "end_time": null, "forecaster_count": 231, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1728287173.883663, "end_time": null, "forecaster_count": 231, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.5022807905094546, 0.4977192094905453 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 15, "user_vote": null }, "forecasts_count": 506, "key_factors": [], "is_current_content_translated": false, "description": "[Consciousness](https://en.wikipedia.org/wiki/Consciousness) is a concept hard to define. Wikipedia states that consciousness at its simplest is \"awareness or sentience of internal or external existence\". And that despite centuries of analyses, definitions, explanations and debates by philosophers and scientists, consciousness remains puzzling and controversial, being \"at once the most familiar and most mysterious aspect of our lives\".\n\nThis question will not attempt to define consciousness or solve the so called [hard problem of consciousness](https://en.wikipedia.org/wiki/Hard_problem_of_consciousness). \n\nInstead this question simply asks will one of the first AGI claim to be conscious?" }, { "id": 4408, "title": "Will Apple announce plans to make ARM-based Mac at WWDC 2020?", "short_title": "", "url_title": "", "slug": "will-apple-announce-plans-to-make-arm-based-mac-at-wwdc-2020", "author_id": 106741, "author_username": "yhoiseth", "coauthors": [], "created_at": "2020-05-14T08:11:39.669743Z", "published_at": "2020-06-13T22:00:00Z", "edited_at": "2025-09-05T17:28:56.071102Z", "curation_status": "approved", "curation_status_updated_at": "2020-06-13T22:00:00Z", "comment_count": 16, "status": "resolved", "resolved": true, "actual_close_time": "2020-06-20T22:00:00Z", "scheduled_close_time": "2020-06-20T22:00:00Z", "scheduled_resolve_time": "2020-06-22T19:03:00Z", "actual_resolve_time": "2020-06-22T19:03:00Z", "open_time": "2020-06-13T22:00:00Z", "nr_forecasters": 37, "html_metadata_json": null, "projects": { "leaderboard_tag": [ { "id": 32596, "name": "2020 Leaderboard", "slug": "2020_leaderboard", "type": "leaderboard_tag" } ], "site_main": [ { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" } ], "default_project": { "id": 144, "type": "site_main", "name": "Metaculus Community", "slug": null, "header_image": null, "prize_pool": null, "start_date": null, "close_date": null, "forecasting_end_date": null, "html_metadata_json": null, "is_ongoing": null, "user_permission": "forecaster", "created_at": "2023-11-08T16:55:29.484707Z", "edited_at": "2025-07-18T17:28:18.838588Z", "score_type": null, "default_permission": "forecaster", "visibility": "normal", "is_current_content_translated": false, "bot_leaderboard_status": "exclude_and_show" }, "category": [ { "id": 3692, "name": "Computing and Math", "slug": "computing-and-math", "emoji": "💻", "description": "Computing and Math", "type": "category" } ] }, "question": { "id": 4408, "title": "Will Apple announce plans to make ARM-based Mac at WWDC 2020?", "created_at": "2020-05-14T08:11:39.669743Z", "open_time": "2020-06-13T22:00:00Z", "cp_reveal_time": "2020-06-15T03:01:48.323633Z", "spot_scoring_time": "2020-06-15T03:01:48.323633Z", "scheduled_resolve_time": "2020-06-22T19:03:00Z", "actual_resolve_time": "2020-06-22T19:03:00Z", "resolution_set_time": "2020-06-22T19:03:00Z", "scheduled_close_time": "2020-06-20T22:00:00Z", "actual_close_time": "2020-06-20T22:00:00Z", "type": "binary", "options": null, "group_variable": "", "status": "resolved", "possibilities": { "type": "binary" }, "resolution": "yes", "include_bots_in_aggregates": false, "question_weight": 1.0, "default_score_type": "peer", "default_aggregation_method": "recency_weighted", "label": "", "unit": "", "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "scaling": { "range_min": null, "range_max": null, "nominal_min": null, "nominal_max": null, "zero_point": null, "open_upper_bound": null, "open_lower_bound": null, "inbound_outcome_count": null, "continuous_range": null }, "group_rank": null, "description": "", "resolution_criteria": "[Arm](https://en.wikipedia.org/wiki/Arm_Holdings) is a global semiconductor and software design company, that designs microprocessors, graphics processing units and neural processing units.\n\nApple [is rumored to release an ARM-based Mac soon](https://www.macrumors.com/guide/arm-macs/):\n\n> Apple is said to be aiming to transition to its own ARM-based chips starting in 2020. Apple analyst Ming-Chi Kuo believes Apple will release MacBook models with its own custom processors in the fourth quarter of 2020 or the first quarter of 2021.\n\nThe last time Apple made a processor switch, it [was announced at their Worldwide Developers Conference (WWDC) the year before](https://en.wikipedia.org/wiki/Apple%27s_transition_to_Intel_processors):\n\n> The transition became public knowledge at the 2005 Worldwide Developers Conference (WWDC), when Apple's CEO Steve Jobs made the announcement … The first generation Intel-based Macintoshes were released in January 2006 …\n\nThis year’s WWDC [is from June 22-26](https://en.wikipedia.org/wiki/Apple_Worldwide_Developers_Conference).\n\nThis questions resolves _positively_ if Apple announces plans to make ARM-based Macs at WWDC 2020. They don’t have to announce a specific Mac — plans are sufficient.", "fine_print": "", "post_id": 4408, "aggregations": { "recency_weighted": { "history": [ { "start_time": 1592688888.154284, "end_time": null, "forecaster_count": 37, "interval_lower_bounds": [ 0.7 ], "centers": [ 0.8 ], "interval_upper_bounds": [ 0.84 ] } ], "latest": { "start_time": 1592688888.154284, "end_time": null, "forecaster_count": 37, "interval_lower_bounds": [ 0.7 ], "centers": [ 0.8 ], "interval_upper_bounds": [ 0.84 ], "forecast_values": [ 0.19999999999999996, 0.8 ], "means": [ 0.7600374072316631 ], "histogram": [ [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.016860822722490777, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.17837564508143805, 0.0, 0.0, 0.03860646978477285, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.920569728684799, 0.0, 0.0, 0.0, 0.0, 0.5384210860109766, 0.0, 0.0, 0.0, 0.0, 0.8464681131236458, 0.0, 0.0, 0.012897629776795667, 0.0, 0.36010097174618827, 0.4977796731925345, 0.0, 0.2761168848762875, 0.0, 0.5071153464061678, 0.0, 0.0, 0.24849146739470301, 0.0, 2.1523246161488228, 0.37390892894840233, 0.0, 1.0, 0.1502926359395544, 0.7279877015191527, 0.0, 0.0, 0.19370196488898844, 0.0, 1.4764006507951146, 0.0, 0.0, 0.0, 0.0, 0.15879807646568847, 0.0, 0.0, 0.0, 0.0 ] ] }, "score_data": { "peer_score": 10.096156664002239, "coverage": 0.9690535302645474, "baseline_score": 57.02128422485999, "spot_peer_score": 16.551140093039354, "peer_archived_score": 10.096156664002239, "baseline_archived_score": 57.02128422485999, "spot_peer_archived_score": 16.551140093039354 }, "movement": null }, "metaculus_prediction": { "history": [ { "start_time": 1592688888.194229, "end_time": null, "forecaster_count": 37, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null } ], "latest": { "start_time": 1592688888.194229, "end_time": null, "forecaster_count": 37, "interval_lower_bounds": null, "centers": null, "interval_upper_bounds": null, "forecast_values": [ 0.14169466778822082, 0.8583053322117792 ], "means": null, "histogram": null }, "score_data": {}, "movement": null } } }, "user_permission": "forecaster", "vote": { "score": 12, "user_vote": null }, "forecasts_count": 62, "key_factors": [], "is_current_content_translated": false, "description": "" } ] }