|
1 | 1 | # -*- coding: utf-8 -*- |
2 | 2 | """controllers_ajax.py |
3 | | -
|
4 | 3 | Created by Alan D. Snow, Curtis Rae, Shawn Crawley 2015. |
5 | 4 | License: BSD 3-Clause |
6 | 5 | """ |
|
18 | 17 | from sqlalchemy.orm.exc import ObjectDeletedError |
19 | 18 | import xarray |
20 | 19 |
|
| 20 | +from .functions import ecmwf_find_most_current_files |
| 21 | + |
21 | 22 | # django imports |
22 | 23 | from django.contrib.auth.decorators import user_passes_test, login_required |
23 | 24 | from django.core.exceptions import PermissionDenied |
|
45 | 46 | get_return_period_dict, |
46 | 47 | get_return_period_ploty_info) |
47 | 48 | from .controllers_validators import (validate_historical_data, |
48 | | - validate_watershed_info) |
| 49 | + validate_watershed_info, |
| 50 | + validate_rivid_info) |
49 | 51 | from .functions import (delete_from_database, |
50 | 52 | format_name, |
51 | 53 | get_units_title, |
@@ -408,6 +410,7 @@ def get_ecmwf_hydrograph_plot(request): |
408 | 410 | forecast_statistics, watershed_name, subbasin_name, river_id, units = \ |
409 | 411 | get_ecmwf_forecast_statistics(request) |
410 | 412 |
|
| 413 | + |
411 | 414 | # ensure lower std dev values limited by the min |
412 | 415 | std_dev_lower_df = \ |
413 | 416 | forecast_statistics['std_dev_range_lower'] |
@@ -953,7 +956,6 @@ def get_monthly_seasonal_streamflow_chart(request): |
953 | 956 | def get_flow_duration_curve(request): |
954 | 957 | """ |
955 | 958 | Generate flow duration curve for hydrologic time series data |
956 | | -
|
957 | 959 | Based on: http://earthpy.org/flow.html |
958 | 960 | """ |
959 | 961 | historical_data_file, river_id, watershed_name, subbasin_name = \ |
@@ -1800,3 +1802,89 @@ def watershed_group_update(request): |
1800 | 1802 | return JsonResponse({ |
1801 | 1803 | 'success': "Watershed group successfully updated." |
1802 | 1804 | }) |
| 1805 | + |
| 1806 | + |
| 1807 | +@require_GET |
| 1808 | +@login_required |
| 1809 | +@exceptions_to_http_status |
| 1810 | +def get_ecmwf_forecast_probabilities(request): |
| 1811 | + """ |
| 1812 | + Returns the statistics for the 52 member forecast |
| 1813 | + """ |
| 1814 | + |
| 1815 | + path_to_rapid_output = app.get_custom_setting('ecmwf_forecast_folder') |
| 1816 | + if not os.path.exists(path_to_rapid_output): |
| 1817 | + raise SettingsError('Location of ECMWF forecast files faulty. ' |
| 1818 | + 'Please check settings.') |
| 1819 | + |
| 1820 | + # get/check information from AJAX request |
| 1821 | + get_info = request.GET |
| 1822 | + watershed_name, subbasin_name = validate_watershed_info(get_info) |
| 1823 | + river_id = validate_rivid_info(get_info) |
| 1824 | + |
| 1825 | + forecast_folder = get_info.get('forecast_folder') |
| 1826 | + if not forecast_folder: |
| 1827 | + forecast_folder = 'most_recent' |
| 1828 | + |
| 1829 | + # find/check current output datasets |
| 1830 | + path_to_output_files = \ |
| 1831 | + os.path.join(path_to_rapid_output, |
| 1832 | + "{0}-{1}".format(watershed_name, subbasin_name)) |
| 1833 | + forecast_nc_list, start_date = \ |
| 1834 | + ecmwf_find_most_current_files(path_to_output_files, forecast_folder) |
| 1835 | + if not forecast_nc_list or not start_date: |
| 1836 | + raise NotFoundError('ECMWF forecast for %s (%s).' |
| 1837 | + % (watershed_name, subbasin_name)) |
| 1838 | + |
| 1839 | + # combine 52 ensembles |
| 1840 | + qout_datasets = [] |
| 1841 | + ensemble_index_list = [] |
| 1842 | + with rivid_exception_handler("ECMWF Forecast", river_id): |
| 1843 | + for forecast_nc in forecast_nc_list: |
| 1844 | + if forecast_nc.endswith("52.nc"): |
| 1845 | + continue |
| 1846 | + else: |
| 1847 | + ensemble_index_list.append( |
| 1848 | + int(os.path.basename(forecast_nc)[:-3].split("_")[-1]) |
| 1849 | + ) |
| 1850 | + qout_datasets.append( |
| 1851 | + xarray.open_dataset(forecast_nc, autoclose=True) |
| 1852 | + .sel(rivid=river_id).Qout |
| 1853 | + ) |
| 1854 | + |
| 1855 | + merged_ds = xarray.concat(qout_datasets, |
| 1856 | + pd.Index(ensemble_index_list, name='ensemble')) |
| 1857 | + |
| 1858 | + returnperiods = {} |
| 1859 | + |
| 1860 | + return_period_data = get_return_period_dict(request) |
| 1861 | + |
| 1862 | + returnperiods['two'] = float(return_period_data["two"]) |
| 1863 | + returnperiods['ten'] = float(return_period_data["ten"]) |
| 1864 | + returnperiods['twenty'] = float(return_period_data["twenty"]) |
| 1865 | + |
| 1866 | + timelist = merged_ds.time.values |
| 1867 | + datelist = [] |
| 1868 | + shortdate = [] |
| 1869 | + problist = {'two':[],'ten':[],'twenty':[]} |
| 1870 | + |
| 1871 | + for timestep in timelist: |
| 1872 | + x = pd.to_datetime(timestep) |
| 1873 | + if str(x.date()) not in datelist: |
| 1874 | + datelist.append(str(x.date())) |
| 1875 | + shortdate.append(str(x.date())[-5:]) |
| 1876 | + |
| 1877 | + for date in datelist: |
| 1878 | + mergedtime = merged_ds.sel(time=date) |
| 1879 | + for period in returnperiods: |
| 1880 | + retperflow = float(returnperiods[period]) |
| 1881 | + probabilities = xarray.where(mergedtime > retperflow, 1, 0).sum(dim='ensemble').astype(float)/51.0*100. |
| 1882 | + probability = probabilities.max().round(2).values |
| 1883 | + problist[period].append((date,np.asscalar(probability))) |
| 1884 | + |
| 1885 | + problist['dates'] = shortdate |
| 1886 | + |
| 1887 | + for entries in problist: |
| 1888 | + problist[entries] = problist[entries][:-5 or None] |
| 1889 | + |
| 1890 | + return JsonResponse({'success':True, 'percentages':problist}) |
0 commit comments