diff --git a/docs/Users_Guide/series-analysis.rst b/docs/Users_Guide/series-analysis.rst
index 0be681585f..ed9f5578ab 100644
--- a/docs/Users_Guide/series-analysis.rst
+++ b/docs/Users_Guide/series-analysis.rst
@@ -33,6 +33,7 @@ The usage statement for the Series-Analysis tool is shown below:
-fcst file_1 ... file_n | fcst_file_list
-obs file_1 ... file_n | obs_file_list
[-both file_1 ... file_n | both_file_list]
+ [-aggr file]
[-paired]
-out file
-config file
@@ -58,13 +59,17 @@ Optional Arguments for series_analysis
5. To set both the forecast and observations to the same set of files, use the optional -both file_1 ... file_n | both_file_list option to the same set of files. This is useful when reading the NetCDF matched pair output of the Grid-Stat tool which contains both forecast and observation data.
-6. The -paired option indicates that the -fcst and -obs file lists are already paired, meaning there is a one-to-one correspondence between the files in those lists. This option affects how missing data is handled. When -paired is not used, missing or incomplete files result in a runtime error with no output file being created. When -paired is used, missing or incomplete files result in a warning with output being created using the available data.
+6. The -aggr option specifies the path to an existing Series-Analysis output file. When computing statistics for the input forecast and observation data, Series-Analysis aggregates the partial sums (SL1L2, SAL1L2 line types) and contingency table counts (CTC, MCTC, and PCT line types) with data provided in the aggregate file. This option enables Series-Analysis to run iteratively and update existing partial sums, counts, and statistics with new data.
-7. The -log file outputs log messages to the specified file.
+.. note:: When the -aggr option is used, only statistics that are derivable from partial sums and contingency table counts can be requested. Runtimes are generally much slower when aggregating data since it requires many additional NetCDF variables containing the scalar partial sums and contingency table counts to be read and written.
-8. The -v level overrides the default level of logging (2).
+7. The -paired option indicates that the -fcst and -obs file lists are already paired, meaning there is a one-to-one correspondence between the files in those lists. This option affects how missing data is handled. When -paired is not used, missing or incomplete files result in a runtime error with no output file being created. When -paired is used, missing or incomplete files result in a warning with output being created using the available data.
-9. The -compress level option indicates the desired level of compression (deflate level) for NetCDF variables. The valid level is between 0 and 9. The value of "level" will override the default setting of 0 from the configuration file or the environment variable MET_NC_COMPRESS. Setting the compression level to 0 will make no compression for the NetCDF output. Lower number is for fast compression and higher number is for better compression.
+8. The -log file outputs log messages to the specified file.
+
+9. The -v level overrides the default level of logging (2).
+
+10. The -compress level option indicates the desired level of compression (deflate level) for NetCDF variables. The valid level is between 0 and 9. The value of "level" will override the default setting of 0 from the configuration file or the environment variable MET_NC_COMPRESS. Setting the compression level to 0 will make no compression for the NetCDF output. Lower number is for fast compression and higher number is for better compression.
An example of the series_analysis calling sequence is shown below:
@@ -179,3 +184,5 @@ The output_stats array controls the type of output that the Series-Analysis tool
11. PJC for Joint and Conditional factorization for Probabilistic forecasts (See :numref:`table_PS_format_info_PJC`)
12. PRC for Receiver Operating Characteristic for Probabilistic forecasts (See :numref:`table_PS_format_info_PRC`)
+
+.. note:: When the -input option is used, all partial sum and contingency table count columns are required to aggregate statistics across multiple runs. To facilitate this, the output_stats entries for the CTC, SL1L2, SAL1L2, and PCT line types can be set to "ALL" to indicate that all available columns for those line types should be written.
diff --git a/internal/scripts/installation/config/install_met_env.wcoss2 b/internal/scripts/installation/config/install_met_env.wcoss2
index 2d02be1f87..68e7ef2872 100644
--- a/internal/scripts/installation/config/install_met_env.wcoss2
+++ b/internal/scripts/installation/config/install_met_env.wcoss2
@@ -4,7 +4,7 @@ module load ve/evs/2.0
module use /apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304
module load netcdf/4.7.4
module load hdf5/1.10.6
-module load bufr/11.5.0
+module load bufr/11.6.0
module load zlib/1.2.11
module load jasper/2.0.25
module load libpng/1.6.37
diff --git a/internal/test_unit/config/SeriesAnalysisConfig_climo b/internal/test_unit/config/SeriesAnalysisConfig_climo
index 3728482541..f19bac7a20 100644
--- a/internal/test_unit/config/SeriesAnalysisConfig_climo
+++ b/internal/test_unit/config/SeriesAnalysisConfig_climo
@@ -132,13 +132,13 @@ vld_thresh = 0.5;
//
output_stats = {
fho = [ "TOTAL", "F_RATE", "H_RATE", "O_RATE" ];
- ctc = [ ];
+ ctc = [ "ALL" ];
cts = [ ];
mctc = [ ];
- mcts = [ "ACC" ];
- cnt = [ "TOTAL", "RMSE", "ANOM_CORR" ];
- sl1l2 = [ ];
- sal1l2 = [ ];
+ mcts = [ ];
+ cnt = [ "TOTAL", "RMSE", "ANOM_CORR", "RMSFA", "RMSOA" ];
+ sl1l2 = [ "ALL" ];
+ sal1l2 = [ "ALL" ];
pct = [ ];
pstd = [ ];
pjc = [ ];
diff --git a/internal/test_unit/config/SeriesAnalysisConfig_climo_prob b/internal/test_unit/config/SeriesAnalysisConfig_climo_prob
index 8b55c508d3..149062dc41 100644
--- a/internal/test_unit/config/SeriesAnalysisConfig_climo_prob
+++ b/internal/test_unit/config/SeriesAnalysisConfig_climo_prob
@@ -148,7 +148,7 @@ output_stats = {
cnt = [ ];
sl1l2 = [ ];
sal1l2 = [ ];
- pct = [ ];
+ pct = [ "ALL" ];
pstd = [ "TOTAL", "ROC_AUC", "BRIER", "BRIERCL", "BSS", "BSS_SMPL" ];
pjc = [ ];
prc = [ ];
diff --git a/internal/test_unit/xml/unit_climatology_1.0deg.xml b/internal/test_unit/xml/unit_climatology_1.0deg.xml
index a07d47ff6e..fcd6b59668 100644
--- a/internal/test_unit/xml/unit_climatology_1.0deg.xml
+++ b/internal/test_unit/xml/unit_climatology_1.0deg.xml
@@ -154,20 +154,18 @@
&OUTPUT_DIR;/climatology_1.0deg/stat_analysis_MPR_to_PSTD.stat
-
+--!>
&MET_BIN;/series_analysis
CLIMO_MEAN_FILE_LIST
"&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590409",
- "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590410",
- "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590411"
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590410"
CLIMO_STDEV_FILE_LIST
"&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590409",
- "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590410",
- "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590411"
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590410"
@@ -175,11 +173,9 @@
-fcst &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F012.grib2 \
&DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F024.grib2 \
&DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F036.grib2 \
- &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F048.grib2 \
-obs &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120409_1200_000.grb2 \
&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_0000_000.grb2 \
&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_1200_000.grb2 \
- &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120411_0000_000.grb2 \
-paired \
-out &OUTPUT_DIR;/climatology_1.0deg/series_analysis_GFS_CLIMO_1.0DEG.nc \
-config &CONFIG_DIR;/SeriesAnalysisConfig_climo \
@@ -190,25 +186,84 @@
+
+ &MET_BIN;/series_analysis
+
+ CLIMO_MEAN_FILE_LIST
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590411"
+
+
+ CLIMO_STDEV_FILE_LIST
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590411"
+
+
+
+ \
+ -fcst &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F048.grib2 \
+ -obs &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120411_0000_000.grb2 \
+ -paired \
+ -aggr &OUTPUT_DIR;/climatology_1.0deg/series_analysis_GFS_CLIMO_1.0DEG.nc \
+ -out &OUTPUT_DIR;/climatology_1.0deg/series_analysis_GFS_CLIMO_1.0DEG_AGGR.nc \
+ -config &CONFIG_DIR;/SeriesAnalysisConfig_climo \
+ -v 2
+
+
+
+
echo "&DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F003.grib2 \
&DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F009.grib2 \
&DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F015.grib2 \
- &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F021.grib2 \
- &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F027.grib2 \
- &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F033.grib2 \
- &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F039.grib2 \
- &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F045.grib2" \
- > &OUTPUT_DIR;/climatology_1.0deg/input_fcst_file_list; \
+ &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F021.grib2" \
+ > &OUTPUT_DIR;/climatology_1.0deg/20120409_fcst_file_list; \
echo "&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120409_0000_000.grb2 \
&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120409_0600_000.grb2 \
&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120409_1200_000.grb2 \
- &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120409_1800_000.grb2 \
- &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_0000_000.grb2 \
+ &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120409_1800_000.grb2" \
+ > &OUTPUT_DIR;/climatology_1.0deg/20120409_obs_file_list; \
+ &MET_BIN;/series_analysis
+
+ DAY_INTERVAL 1
+ HOUR_INTERVAL 6
+ CLIMO_MEAN_FILE_LIST
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590409",
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590410",
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cmean_1d.19590411"
+
+
+ CLIMO_STDEV_FILE_LIST
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590409",
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590410",
+ "&DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg/cstdv_1d.19590411"
+
+
+
+ \
+ -fcst &OUTPUT_DIR;/climatology_1.0deg/20120409_fcst_file_list \
+ -obs &OUTPUT_DIR;/climatology_1.0deg/20120409_obs_file_list \
+ -paired \
+ -out &OUTPUT_DIR;/climatology_1.0deg/series_analysis_PROB_CLIMO_1.0DEG.nc \
+ -config &CONFIG_DIR;/SeriesAnalysisConfig_climo_prob \
+ -v 2
+
+
+
+
+
+ echo "&DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F027.grib2 \
+ &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F033.grib2 \
+ &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F039.grib2 \
+ &DATA_DIR_MODEL;/grib2/sref_pr/sref_prob_2012040821_F045.grib2" \
+ > &OUTPUT_DIR;/climatology_1.0deg/20120410_fcst_file_list; \
+ echo "&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_0000_000.grb2 \
&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_0600_000.grb2 \
&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_1200_000.grb2 \
&DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_1800_000.grb2" \
- > &OUTPUT_DIR;/climatology_1.0deg/input_obs_file_list; \
+ > &OUTPUT_DIR;/climatology_1.0deg/20120410_obs_file_list; \
&MET_BIN;/series_analysis
DAY_INTERVAL 1
@@ -227,15 +282,16 @@
\
- -fcst &OUTPUT_DIR;/climatology_1.0deg/input_fcst_file_list \
- -obs &OUTPUT_DIR;/climatology_1.0deg/input_obs_file_list \
+ -fcst &OUTPUT_DIR;/climatology_1.0deg/20120410_fcst_file_list \
+ -obs &OUTPUT_DIR;/climatology_1.0deg/20120410_obs_file_list \
-paired \
- -out &OUTPUT_DIR;/climatology_1.0deg/series_analysis_PROB_CLIMO_1.0DEG.nc \
+ -aggr &OUTPUT_DIR;/climatology_1.0deg/series_analysis_PROB_CLIMO_1.0DEG.nc \
+ -out &OUTPUT_DIR;/climatology_1.0deg/series_analysis_PROB_CLIMO_1.0DEG_AGGR.nc \
-config &CONFIG_DIR;/SeriesAnalysisConfig_climo_prob \
-v 2
diff --git a/internal/test_unit/xml/unit_series_analysis.xml b/internal/test_unit/xml/unit_series_analysis.xml
index c1e64416b3..96cda729dd 100644
--- a/internal/test_unit/xml/unit_series_analysis.xml
+++ b/internal/test_unit/xml/unit_series_analysis.xml
@@ -29,12 +29,12 @@
OBS_FIELD { name = "APCP"; level = [ "A06" ]; }
MASK_POLY
FHO_STATS "F_RATE", "O_RATE"
- CTC_STATS "FY_OY", "FN_ON"
+ CTC_STATS "ALL"
CTS_STATS "CSI", "GSS"
- MCTC_STATS "F1_O1", "F2_O2", "F3_O3"
+ MCTC_STATS "ALL"
MCTS_STATS "ACC", "ACC_NCL", "ACC_NCU"
CNT_STATS "TOTAL", "ME", "ME_NCL", "ME_NCU"
- SL1L2_STATS "FBAR", "OBAR"
+ SL1L2_STATS "ALL"
SAL1L2_STATS
PCT_STATS
PSTD_STATS
@@ -46,22 +46,56 @@
&DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F012.grib \
&DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F018.grib \
&DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F024.grib \
- &DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F030.grib \
- &DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F036.grib \
- &DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F042.grib \
-obs &DATA_DIR_OBS;/stage4_hmt/stage4_2012040906_06h.grib \
&DATA_DIR_OBS;/stage4_hmt/stage4_2012040912_06h.grib \
&DATA_DIR_OBS;/stage4_hmt/stage4_2012040918_06h.grib \
&DATA_DIR_OBS;/stage4_hmt/stage4_2012041000_06h.grib \
- &DATA_DIR_OBS;/stage4_hmt/stage4_2012041006_06h.grib \
+ -out &OUTPUT_DIR;/series_analysis/series_analysis_CMD_LINE_APCP_06_2012040900_to_2012041000.nc \
+ -config &CONFIG_DIR;/SeriesAnalysisConfig \
+ -v 1
+
+
+
+
+
+ &MET_BIN;/series_analysis
+
+ MODEL GFS
+ OBTYPE STAGE4
+ FCST_CAT_THRESH >0.0, >5.0
+ FCST_FIELD { name = "APCP"; level = [ "A06" ]; }
+ OBS_CAT_THRESH >0.0, >5.0
+ OBS_FIELD { name = "APCP"; level = [ "A06" ]; }
+ MASK_POLY
+ FHO_STATS "F_RATE", "O_RATE"
+ CTC_STATS "ALL"
+ CTS_STATS "CSI", "GSS"
+ MCTC_STATS "ALL"
+ MCTS_STATS "ACC", "ACC_NCL", "ACC_NCU"
+ CNT_STATS "TOTAL", "ME", "ME_NCL", "ME_NCU"
+ SL1L2_STATS "ALL"
+ SAL1L2_STATS
+ PCT_STATS
+ PSTD_STATS
+ PJC_STATS
+ PRC_STATS
+
+ \
+ -fcst &DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F030.grib \
+ &DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F036.grib \
+ &DATA_DIR_MODEL;/grib1/gfs_hmt/gfs_2012040900_F042.grib \
+ -obs &DATA_DIR_OBS;/stage4_hmt/stage4_2012041006_06h.grib \
&DATA_DIR_OBS;/stage4_hmt/stage4_2012041012_06h.grib \
&DATA_DIR_OBS;/stage4_hmt/stage4_2012041018_06h.grib \
- -out &OUTPUT_DIR;/series_analysis/series_analysis_CMD_LINE_APCP_06_2012040900_to_2012041100.nc \
+ -aggr &OUTPUT_DIR;/series_analysis/series_analysis_CMD_LINE_APCP_06_2012040900_to_2012041000.nc \
+ -out &OUTPUT_DIR;/series_analysis/series_analysis_AGGR_CMD_LINE_APCP_06_2012040900_to_2012041018.nc \
-config &CONFIG_DIR;/SeriesAnalysisConfig \
-v 1
@@ -69,18 +103,12 @@
echo "&DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F009.grib \
&DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F015.grib \
&DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F021.grib \
- &DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F027.grib \
- &DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F033.grib \
- &DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F039.grib \
- &DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F045.grib" \
+ &DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F027.grib" \
> &OUTPUT_DIR;/series_analysis/input_fcst_file_list; \
echo "&DATA_DIR_OBS;/stage4_hmt/stage4_2012040906_06h.grib \
&DATA_DIR_OBS;/stage4_hmt/stage4_2012040912_06h.grib \
&DATA_DIR_OBS;/stage4_hmt/stage4_2012040918_06h.grib \
- &DATA_DIR_OBS;/stage4_hmt/stage4_2012041000_06h.grib \
- &DATA_DIR_OBS;/stage4_hmt/stage4_2012041006_06h.grib \
- &DATA_DIR_OBS;/stage4_hmt/stage4_2012041012_06h.grib \
- &DATA_DIR_OBS;/stage4_hmt/stage4_2012041018_06h.grib" \
+ &DATA_DIR_OBS;/stage4_hmt/stage4_2012041000_06h.grib" \
> &OUTPUT_DIR;/series_analysis/input_obs_file_list; \
&MET_BIN;/series_analysis
@@ -99,7 +127,7 @@
CNT_STATS
SL1L2_STATS
SAL1L2_STATS
- PCT_STATS "OY_1", "ON_1"
+ PCT_STATS "ALL"
PSTD_STATS "TOTAL", "ROC_AUC", "BRIER", "BRIER_NCL", "BRIER_NCU"
PJC_STATS "CALIBRATION_1", "REFINEMENT_1"
PRC_STATS "PODY_1", "POFD_1"
@@ -107,12 +135,56 @@
\
-fcst &OUTPUT_DIR;/series_analysis/input_fcst_file_list \
-obs &OUTPUT_DIR;/series_analysis/input_obs_file_list \
- -out &OUTPUT_DIR;/series_analysis/series_analysis_FILE_LIST_PROB_APCP_06_2012040900_to_2012041100.nc \
+ -out &OUTPUT_DIR;/series_analysis/series_analysis_FILE_LIST_PROB_APCP_06_2012040900_to_2012041000.nc \
+ -config &CONFIG_DIR;/SeriesAnalysisConfig \
+ -v 1
+
+
+
+
+
+ echo "&DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F033.grib \
+ &DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F039.grib \
+ &DATA_DIR_MODEL;/grib1/sref/sref_2012040821_F045.grib" \
+ > &OUTPUT_DIR;/series_analysis/aggregate_fcst_file_list; \
+ echo "&DATA_DIR_OBS;/stage4_hmt/stage4_2012041006_06h.grib \
+ &DATA_DIR_OBS;/stage4_hmt/stage4_2012041012_06h.grib \
+ &DATA_DIR_OBS;/stage4_hmt/stage4_2012041018_06h.grib" \
+ > &OUTPUT_DIR;/series_analysis/aggregate_obs_file_list; \
+ &MET_BIN;/series_analysis
+
+ MODEL SREF
+ OBTYPE STAGE4
+ FCST_CAT_THRESH >=0.00, >=0.25, >=0.50, >=0.75, >=1.00
+ FCST_FIELD { name = "PROB"; level = "A06"; prob = { name = "APCP"; thresh_lo = 0.25; }; }
+ OBS_CAT_THRESH >0.25
+ OBS_FIELD { name = "APCP"; level = "A06"; }
+ MASK_POLY
+ FHO_STATS
+ CTC_STATS
+ CTS_STATS
+ MCTC_STATS
+ MCTS_STATS
+ CNT_STATS
+ SL1L2_STATS
+ SAL1L2_STATS
+ PCT_STATS "ALL"
+ PSTD_STATS "TOTAL", "ROC_AUC", "BRIER", "BRIER_NCL", "BRIER_NCU"
+ PJC_STATS "CALIBRATION_1", "REFINEMENT_1"
+ PRC_STATS "PODY_1", "POFD_1"
+
+ \
+ -fcst &OUTPUT_DIR;/series_analysis/aggregate_fcst_file_list \
+ -obs &OUTPUT_DIR;/series_analysis/aggregate_obs_file_list \
+ -aggr &OUTPUT_DIR;/series_analysis/series_analysis_FILE_LIST_PROB_APCP_06_2012040900_to_2012041000.nc \
+ -out &OUTPUT_DIR;/series_analysis/series_analysis_AGGR_FILE_LIST_PROB_APCP_06_2012040900_to_2012041018.nc \
-config &CONFIG_DIR;/SeriesAnalysisConfig \
-v 1
diff --git a/src/basic/vx_util/crc_array.h b/src/basic/vx_util/crc_array.h
index 02805d1bf5..6a8841afce 100644
--- a/src/basic/vx_util/crc_array.h
+++ b/src/basic/vx_util/crc_array.h
@@ -108,6 +108,7 @@ class CRC_Array {
void add(const T &);
void add(const CRC_Array &);
+ void add_uniq(const T &, bool forward=true);
void add_css_sec(const char *);
void set(const T & val);
@@ -509,6 +510,22 @@ return;
////////////////////////////////////////////////////////////////////////
+template
+
+void CRC_Array::add_uniq(const T & k, bool forward)
+
+{
+
+if ( !has(k, forward) ) add(k);
+
+return;
+
+}
+
+
+////////////////////////////////////////////////////////////////////////
+
+
template
void CRC_Array::add_css_sec(const char * text)
diff --git a/src/libcode/vx_stat_out/stat_columns.cc b/src/libcode/vx_stat_out/stat_columns.cc
index 51992d7b50..f4ccb6354b 100644
--- a/src/libcode/vx_stat_out/stat_columns.cc
+++ b/src/libcode/vx_stat_out/stat_columns.cc
@@ -122,32 +122,18 @@ void write_header_row(const char * const * cols, int n_cols, int hdr_flag,
void write_mctc_header_row(int hdr_flag, int n_cat, AsciiTable &at,
int r, int c) {
- int i, j, col;
- ConcatString cs;
// Write the header column names if requested
if(hdr_flag) {
- for(i=0; i= 1) {
- tmp_str.format("%s%i", pct_columns[2], n_thresh);
- at.set_entry(r, col, tmp_str); // Threshold
- }
+ StringArray sa(get_pct_columns(n_thresh));
+ for(int i=0; i= 1) {
+ cs.format("%s%i", pct_columns[2], n_thresh);
+ sa.add(cs);
+ }
+
+ return sa;
+}
+
+
////////////////////////////////////////////////////////////////////////
void write_fho_row(StatHdrColumns &shc, const CTSInfo &cts_info,
@@ -4608,7 +4630,7 @@ void write_ssvar_cols(const PairDataEnsemble *pd_ptr, int i,
//
cnt_info.allocate_n_alpha(1);
cnt_info.alpha[0] = alpha;
- compute_cntinfo(pd_ptr->ssvar_bins[i].sl1l2_info, 0, cnt_info);
+ compute_cntinfo(pd_ptr->ssvar_bins[i].sl1l2_info, cnt_info);
//
// Ensemble spread/skill variance bins
diff --git a/src/libcode/vx_stat_out/stat_columns.h b/src/libcode/vx_stat_out/stat_columns.h
index 8af99ad730..7eb21c6fa0 100644
--- a/src/libcode/vx_stat_out/stat_columns.h
+++ b/src/libcode/vx_stat_out/stat_columns.h
@@ -49,6 +49,9 @@ extern void write_phist_header_row (int, int, AsciiTable &, int, int);
extern void write_orank_header_row (int, int, AsciiTable &, int, int);
extern void write_relp_header_row (int, int, AsciiTable &, int, int);
+extern StringArray get_mctc_columns (int);
+extern StringArray get_pct_columns (int);
+
extern void write_fho_row (StatHdrColumns &, const CTSInfo &, STATOutputType,
AsciiTable &, int &, AsciiTable &, int &);
extern void write_ctc_row (StatHdrColumns &, const CTSInfo &, STATOutputType,
diff --git a/src/libcode/vx_statistics/compute_stats.cc b/src/libcode/vx_statistics/compute_stats.cc
index bbc9e0ac1a..c0d988efd3 100644
--- a/src/libcode/vx_statistics/compute_stats.cc
+++ b/src/libcode/vx_statistics/compute_stats.cc
@@ -27,112 +27,97 @@ using namespace std;
const int detailed_debug_level = 5;
-
////////////////////////////////////////////////////////////////////////
-void compute_cntinfo(const SL1L2Info &s, bool aflag, CNTInfo &cnt_info) {
- double fbar, obar, ffbar, fobar, oobar, den;
- int n;
+void compute_cntinfo(const SL1L2Info &s, CNTInfo &cnt_info) {
+
+ // Initialize statistics
+ cnt_info.zero_out();
- // Set the quantities that can't be derived from SL1L2Info to bad data
- cnt_info.sp_corr.set_bad_data();
- cnt_info.kt_corr.set_bad_data();
- cnt_info.e10.set_bad_data();
- cnt_info.e25.set_bad_data();
- cnt_info.e50.set_bad_data();
- cnt_info.e75.set_bad_data();
- cnt_info.e90.set_bad_data();
- cnt_info.eiqr.set_bad_data();
- cnt_info.mad.set_bad_data();
- cnt_info.n_ranks = 0;
- cnt_info.frank_ties = 0;
- cnt_info.orank_ties = 0;
-
- // Get partial sums
- n = (aflag ? s.sacount : s.scount);
- fbar = (aflag ? s.fabar : s.fbar);
- obar = (aflag ? s.oabar : s.obar);
- fobar = (aflag ? s.foabar : s.fobar);
- ffbar = (aflag ? s.ffabar : s.ffbar);
- oobar = (aflag ? s.ooabar : s.oobar);
+ // Check for consistent counts
+ if(s.scount > 0 && s.sacount > 0 &&
+ s.scount != s.sacount) {
+ mlog << Error << "\ncompute_cntinfo() -> "
+ << "the scalar partial sum and scalar anomaly partial sum "
+ << "counts are both non-zero but do not match ("
+ << s.scount << " != " << s.sacount << ").\n\n";
+ exit(1);
+ }
// Number of matched pairs
+ int n = max(s.scount, s.sacount);
cnt_info.n = n;
- // Forecast mean and standard deviation
- cnt_info.fbar.v = fbar;
- cnt_info.fstdev.v = compute_stdev(fbar*n, ffbar*n, n);
-
- // Observation mean and standard deviation
- cnt_info.obar.v = obar;
- cnt_info.ostdev.v = compute_stdev(obar*n, oobar*n, n);
-
- // Multiplicative bias
- cnt_info.mbias.v = (is_eq(obar, 0.0) ? bad_data_double : fbar/obar);
-
- // Correlation coefficient
-
- // Handle SAL1L2 data
- if(aflag) {
- cnt_info.pr_corr.v = bad_data_double;
- cnt_info.anom_corr.v = compute_corr( fbar*n, obar*n,
- ffbar*n, oobar*n,
- fobar*n, n);
- cnt_info.rmsfa.v = sqrt(ffbar);
- cnt_info.rmsoa.v = sqrt(oobar);
- cnt_info.anom_corr_uncntr.v = compute_anom_corr_uncntr(ffbar, oobar,
- fobar);
- }
- // Handle SL1L2 data
- else {
- cnt_info.pr_corr.v = compute_corr( fbar*n, obar*n,
- ffbar*n, oobar*n,
- fobar*n, n);
- cnt_info.anom_corr.v = bad_data_double;
- cnt_info.rmsfa.v = bad_data_double;
- cnt_info.rmsoa.v = bad_data_double;
- cnt_info.anom_corr_uncntr.v = bad_data_double;
- }
+ // Process scalar partial sum statistics
+ if(s.scount > 0) {
- // Compute mean error
- cnt_info.me.v = fbar - obar;
+ // Forecast mean and standard deviation
+ cnt_info.fbar.v = s.fbar;
+ cnt_info.fstdev.v = compute_stdev(s.fbar*n, s.ffbar*n, n);
- // Compute mean error squared
- cnt_info.me2.v = cnt_info.me.v * cnt_info.me.v;
+ // Observation mean and standard deviation
+ cnt_info.obar.v = s.obar;
+ cnt_info.ostdev.v = compute_stdev(s.obar*n, s.oobar*n, n);
- // Compute mean absolute error
- cnt_info.mae.v = s.smae;
+ // Multiplicative bias
+ cnt_info.mbias.v = (is_eq(s.obar, 0.0) ? bad_data_double : s.fbar/s.obar);
- // Compute mean squared error
- cnt_info.mse.v = ffbar + oobar - 2.0*fobar;
+ // Correlation coefficient
+ cnt_info.pr_corr.v = compute_corr( s.fbar*n, s.obar*n,
+ s.ffbar*n, s.oobar*n,
+ s.fobar*n, n);
- // Compute mean squared error skill score
- den = cnt_info.ostdev.v * cnt_info.ostdev.v;
- if(!is_eq(den, 0.0)) {
- cnt_info.msess.v = 1.0 - cnt_info.mse.v / den;
- }
- else {
- cnt_info.msess.v = bad_data_double;
- }
+ // Compute mean error
+ cnt_info.me.v = s.fbar - s.obar;
- // Compute standard deviation of the mean error
- cnt_info.estdev.v = compute_stdev(cnt_info.me.v*n,
- cnt_info.mse.v*n, n);
+ // Compute mean error squared
+ cnt_info.me2.v = cnt_info.me.v * cnt_info.me.v;
- // Compute bias corrected mean squared error (decomposition of MSE)
- cnt_info.bcmse.v = cnt_info.mse.v - (fbar - obar)*(fbar - obar);
+ // Compute mean absolute error
+ cnt_info.mae.v = s.smae;
- // Compute root mean squared error
- cnt_info.rmse.v = sqrt(cnt_info.mse.v);
+ // Compute mean squared error
+ cnt_info.mse.v = s.ffbar + s.oobar - 2.0*s.fobar;
- // Compute Scatter Index (SI)
- if(!is_eq(cnt_info.obar.v, 0.0)) {
- cnt_info.si.v = cnt_info.rmse.v / cnt_info.obar.v;
+ // Compute mean squared error skill score
+ double den = cnt_info.ostdev.v * cnt_info.ostdev.v;
+ if(!is_eq(den, 0.0)) {
+ cnt_info.msess.v = 1.0 - cnt_info.mse.v / den;
+ }
+ else {
+ cnt_info.msess.v = bad_data_double;
+ }
+
+ // Compute standard deviation of the mean error
+ cnt_info.estdev.v = compute_stdev(cnt_info.me.v*n,
+ cnt_info.mse.v*n, n);
+
+ // Compute bias corrected mean squared error (decomposition of MSE)
+ cnt_info.bcmse.v = cnt_info.mse.v - (s.fbar - s.obar)*(s.fbar - s.obar);
+
+ // Compute root mean squared error
+ cnt_info.rmse.v = sqrt(cnt_info.mse.v);
+
+ // Compute Scatter Index (SI)
+ if(!is_eq(cnt_info.obar.v, 0.0)) {
+ cnt_info.si.v = cnt_info.rmse.v / cnt_info.obar.v;
+ }
+ else {
+ cnt_info.si.v = bad_data_double;
+ }
}
- else {
- cnt_info.si.v = bad_data_double;
+
+ // Process scalar anomaly partial sum statistics
+ if(s.sacount > 0) {
+ cnt_info.anom_corr.v = compute_corr( s.fabar*n, s.oabar*n,
+ s.ffabar*n, s.ooabar*n,
+ s.foabar*n, n);
+ cnt_info.rmsfa.v = sqrt(s.ffabar);
+ cnt_info.rmsoa.v = sqrt(s.ooabar);
+ cnt_info.anom_corr_uncntr.v = compute_anom_corr_uncntr(s.ffabar, s.ooabar,
+ s.foabar);
}
-
+
// Compute normal confidence intervals
cnt_info.compute_ci();
@@ -772,10 +757,23 @@ void compute_pctinfo(const PairDataPoint &pd, bool pstd_flag,
// Use input climatological probabilities or derive them
if(cmn_flag) {
- if(cprob_in) climo_prob = *cprob_in;
- else climo_prob = derive_climo_prob(pd.cdf_info_ptr,
- pd.ocmn_na, pd.ocsd_na,
- pct_info.othresh);
+
+ // Use climatological probabilities direclty, if supplied
+ if(cprob_in) {
+ climo_prob = *cprob_in;
+ }
+ // Use observation climatology data, if available
+ else if(pd.ocmn_na.n() > 0) {
+ climo_prob = derive_climo_prob(pd.cdf_info_ptr,
+ pd.ocmn_na, pd.ocsd_na,
+ pct_info.othresh);
+ }
+ // Otherwise, try using forecast climatology data
+ else {
+ climo_prob = derive_climo_prob(pd.cdf_info_ptr,
+ pd.fcmn_na, pd.fcsd_na,
+ pct_info.othresh);
+ }
}
//
diff --git a/src/libcode/vx_statistics/compute_stats.h b/src/libcode/vx_statistics/compute_stats.h
index 556afcd1e8..1649cdcec2 100644
--- a/src/libcode/vx_statistics/compute_stats.h
+++ b/src/libcode/vx_statistics/compute_stats.h
@@ -24,8 +24,7 @@
//
////////////////////////////////////////////////////////////////////////
-extern void compute_cntinfo(const SL1L2Info &, bool, CNTInfo &);
-
+extern void compute_cntinfo(const SL1L2Info &, CNTInfo &);
extern void compute_cntinfo(const PairDataPoint &, const NumArray &,
bool, bool, bool, CNTInfo &);
extern void compute_i_cntinfo(const PairDataPoint &, int,
diff --git a/src/libcode/vx_statistics/contable.h b/src/libcode/vx_statistics/contable.h
index cae46f880a..8c7823c4d7 100644
--- a/src/libcode/vx_statistics/contable.h
+++ b/src/libcode/vx_statistics/contable.h
@@ -193,6 +193,13 @@ class Nx2ContingencyTable : public ContingencyTable {
double threshold(int index) const; // 0 <= index <= Nrows
+ //
+ // set counts
+ //
+
+ void set_event(int row, int count);
+ void set_nonevent(int row, int count);
+
//
// increment counts
//
diff --git a/src/libcode/vx_statistics/contable_nx2.cc b/src/libcode/vx_statistics/contable_nx2.cc
index 9af0358511..c69ec38545 100644
--- a/src/libcode/vx_statistics/contable_nx2.cc
+++ b/src/libcode/vx_statistics/contable_nx2.cc
@@ -187,7 +187,8 @@ void Nx2ContingencyTable::set_size(int NR, int NC)
if ( NC != 2 ) {
- mlog << Error << "\nNx2ContingencyTable::set_size(int, int) -> must have 2 columns!\n\n";
+ mlog << Error << "\nNx2ContingencyTable::set_size(int, int) -> "
+ << "must have 2 columns!\n\n";
exit ( 1 );
@@ -209,7 +210,8 @@ int Nx2ContingencyTable::value_to_row(double t) const
if ( !Thresholds ) {
- mlog << Error << "\nNx2ContingencyTable::value_to_row(double) const -> thresholds array not set!\n\n";
+ mlog << Error << "\nNx2ContingencyTable::value_to_row(double) const -> "
+ << "thresholds array not set!\n\n";
exit ( 1 );
@@ -246,7 +248,8 @@ void Nx2ContingencyTable::set_thresholds(const double * Values)
if ( E->empty() ) {
- mlog << Error << "\nNx2ContingencyTable::set_thresholds(const double *) -> table empty!\n\n";
+ mlog << Error << "\nNx2ContingencyTable::set_thresholds(const double *) -> "
+ << "table empty!\n\n";
exit ( 1 );
@@ -272,7 +275,8 @@ double Nx2ContingencyTable::threshold(int k) const
if ( !Thresholds ) {
- mlog << Error << "\nNx2ContingencyTable::threshold(int) const -> no thresholds set!\n\n";
+ mlog << Error << "\nNx2ContingencyTable::threshold(int) const -> "
+ << "no thresholds set!\n\n";
exit ( 1 );
@@ -280,7 +284,8 @@ if ( !Thresholds ) {
if ( (k < 0) || (k > Nrows) ) { // there are Nrows + 1 thresholds
- mlog << Error << "\nNx2ContingencyTable::threshold(int) const -> range check error\n\n";
+ mlog << Error << "\nNx2ContingencyTable::threshold(int) const -> "
+ << "range check error\n\n";
exit ( 1 );
@@ -290,6 +295,51 @@ return Thresholds[k];
}
+////////////////////////////////////////////////////////////////////////
+
+
+void Nx2ContingencyTable::set_event(int row, int value)
+
+{
+
+if ( row < 0 || row >= Nrows ) {
+
+ mlog << Error << "\nNx2ContingencyTable::set_event(double) -> "
+ << "bad row index ... " << row << "\n\n";
+
+ exit ( 1 );
+
+}
+
+set_entry(row, nx2_event_column, value);
+
+return;
+
+}
+
+
+////////////////////////////////////////////////////////////////////////
+
+
+void Nx2ContingencyTable::set_nonevent(int row, int value)
+
+{
+
+if ( row < 0 || row >= Nrows ) {
+
+ mlog << Error << "\nNx2ContingencyTable::set_nonevent(double) -> "
+ << "bad row index ... " << row << "\n\n";
+
+ exit ( 1 );
+
+}
+
+set_entry(row, nx2_nonevent_column, value);
+
+return;
+
+}
+
////////////////////////////////////////////////////////////////////////
@@ -304,7 +354,8 @@ r = value_to_row(t);
if ( r < 0 ) {
- mlog << Error << "\nNx2ContingencyTable::inc_event(double) -> bad value ... " << t << "\n\n";
+ mlog << Error << "\nNx2ContingencyTable::inc_event(double) -> "
+ << "bad value ... " << t << "\n\n";
exit ( 1 );
@@ -330,7 +381,8 @@ r = value_to_row(t);
if ( r < 0 ) {
- mlog << Error << "\nNx2ContingencyTable::inc_nonevent(double) -> bad value ... " << t << "\n\n";
+ mlog << Error << "\nNx2ContingencyTable::inc_nonevent(double) -> "
+ << "bad value ... " << t << "\n\n";
exit ( 1 );
@@ -356,7 +408,8 @@ r = value_to_row(t);
if ( r < 0 ) {
- mlog << Error << "\nNx2ContingencyTable::event_count_by_thresh(double) -> bad value ... " << t << "\n\n";
+ mlog << Error << "\nNx2ContingencyTable::event_count_by_thresh(double) -> "
+ << "bad value ... " << t << "\n\n";
exit ( 1 );
@@ -384,7 +437,8 @@ r = value_to_row(t);
if ( r < 0 ) {
- mlog << Error << "\nNx2ContingencyTable::nonevent_count_by_thresh(double) -> bad value ... " << t << "\n\n";
+ mlog << Error << "\nNx2ContingencyTable::nonevent_count_by_thresh(double) -> "
+ << "bad value ... " << t << "\n\n";
exit ( 1 );
@@ -446,7 +500,8 @@ double Nx2ContingencyTable::row_proby(int row) const
if ( (row < 0) || (row >= Nrows) ) {
- mlog << Error << "\nNx2ContingencyTable::row_proby(int) const -> range check error\n\n";
+ mlog << Error << "\nNx2ContingencyTable::row_proby(int) const -> "
+ << "range check error\n\n";
exit ( 1 );
@@ -693,7 +748,8 @@ double Nx2ContingencyTable::row_calibration(int row) const
if ( (row < 0) || (row >= Nrows) ) {
- mlog << Error << "\nNx2ContingencyTable::row_calibration(int) const -> range check error\n\n";
+ mlog << Error << "\nNx2ContingencyTable::row_calibration(int) const -> "
+ << "range check error\n\n";
exit ( 1 );
@@ -723,7 +779,8 @@ double Nx2ContingencyTable::row_refinement(int row) const
if ( (row < 0) || (row >= Nrows) ) {
- mlog << Error << "\nNx2ContingencyTable::row_refinement(int) const -> range check error\n\n";
+ mlog << Error << "\nNx2ContingencyTable::row_refinement(int) const -> "
+ << "range check error\n\n";
exit ( 1 );
@@ -755,7 +812,8 @@ double Nx2ContingencyTable::row_event_likelihood(int row) const
if ( (row < 0) || (row >= Nrows) ) {
- mlog << Error << "\nNx2ContingencyTable::row_event_likelihood(int) const -> range check error\n\n";
+ mlog << Error << "\nNx2ContingencyTable::row_event_likelihood(int) const -> "
+ << "range check error\n\n";
exit ( 1 );
@@ -784,7 +842,8 @@ double Nx2ContingencyTable::row_nonevent_likelihood(int row) const
if ( (row < 0) || (row >= Nrows) ) {
- mlog << Error << "\nNx2ContingencyTable::row_nonevent_likelihood(int) const -> range check error\n\n";
+ mlog << Error << "\nNx2ContingencyTable::row_nonevent_likelihood(int) const -> "
+ << "range check error\n\n";
exit ( 1 );
@@ -815,7 +874,8 @@ TTContingencyTable tt;
if ( (row < 0) || (row >= Nrows) ) {
- mlog << Error << "\nNx2ContingencyTable::ctc_by_row(int) const -> range check error\n\n";
+ mlog << Error << "\nNx2ContingencyTable::ctc_by_row(int) const -> "
+ << "range check error\n\n";
exit ( 1 );
diff --git a/src/libcode/vx_statistics/met_stats.cc b/src/libcode/vx_statistics/met_stats.cc
index 4c679aed83..0ab9a05188 100644
--- a/src/libcode/vx_statistics/met_stats.cc
+++ b/src/libcode/vx_statistics/met_stats.cc
@@ -425,41 +425,208 @@ void CTSInfo::compute_ci() {
////////////////////////////////////////////////////////////////////////
-double CTSInfo::get_stat(const char *stat_name) {
+void CTSInfo::set_stat_ctc(const string &stat_name, double v) {
+
+ if(stat_name == "FY_OY") cts.set_fy_oy(nint(v));
+ else if(stat_name == "FY_ON") cts.set_fy_on(nint(v));
+ else if(stat_name == "FN_OY") cts.set_fn_oy(nint(v));
+ else if(stat_name == "FN_ON") cts.set_fn_on(nint(v));
+ else if(stat_name == "EC_VALUE") cts.set_ec_value(v);
+
+ return;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double CTSInfo::get_stat(STATLineType lt, const string &stat_name, int i_alpha) const {
double v = bad_data_double;
- // Find the statistic by name
- if(strcmp(stat_name, "TOTAL" ) == 0) v = cts.n();
- else if(strcmp(stat_name, "BASER" ) == 0) v = cts.baser();
- else if(strcmp(stat_name, "FMEAN" ) == 0) v = cts.fmean();
- else if(strcmp(stat_name, "ACC" ) == 0) v = cts.accuracy();
- else if(strcmp(stat_name, "FBIAS" ) == 0) v = cts.fbias();
- else if(strcmp(stat_name, "PODY" ) == 0) v = cts.pod_yes();
- else if(strcmp(stat_name, "PODN" ) == 0) v = cts.pod_no();
- else if(strcmp(stat_name, "POFD" ) == 0) v = cts.pofd();
- else if(strcmp(stat_name, "FAR" ) == 0) v = cts.far();
- else if(strcmp(stat_name, "CSI" ) == 0) v = cts.csi();
- else if(strcmp(stat_name, "GSS" ) == 0) v = cts.gss();
- else if(strcmp(stat_name, "HK" ) == 0) v = cts.hk();
- else if(strcmp(stat_name, "HSS" ) == 0) v = cts.hss();
- else if(strcmp(stat_name, "HSS_EC") == 0) v = cts.gheidke_ec(cts.ec_value());
- else if(strcmp(stat_name, "ODDS" ) == 0) v = cts.odds();
- else if(strcmp(stat_name, "LODDS" ) == 0) v = cts.lodds();
- else if(strcmp(stat_name, "ORSS" ) == 0) v = cts.orss();
- else if(strcmp(stat_name, "EDS" ) == 0) v = cts.eds();
- else if(strcmp(stat_name, "SEDS" ) == 0) v = cts.seds();
- else if(strcmp(stat_name, "EDI" ) == 0) v = cts.edi();
- else if(strcmp(stat_name, "SEDI" ) == 0) v = cts.sedi();
- else if(strcmp(stat_name, "BAGSS" ) == 0) v = cts.bagss();
+ // Get statistic by line type
+ if(lt == STATLineType::fho) v = get_stat_fho(stat_name);
+ else if(lt == STATLineType::ctc) v = get_stat_ctc(stat_name);
+ else if(lt == STATLineType::cts) v = get_stat_cts(stat_name, i_alpha);
else {
mlog << Error << "\nCTSInfo::get_stat() -> "
+ << "unexpected line type \"" << statlinetype_to_string(lt)
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double CTSInfo::get_stat_fho(const string &stat_name) const {
+ double v = bad_data_double;
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL" ) v = cts.n();
+ else if(stat_name == "F_RATE") v = cts.f_rate();
+ else if(stat_name == "H_RATE") v = cts.h_rate();
+ else if(stat_name == "O_RATE") v = cts.o_rate();
+ else {
+ mlog << Error << "\nCTSInfo::get_stat_fho() -> "
+ << "unknown categorical statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ // Return bad data for 0 pairs
+ if(cts.n() == 0 && stat_name != "TOTAL") {
+ v = bad_data_double;
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double CTSInfo::get_stat_ctc(const string &stat_name) const {
+ double v = bad_data_double;
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL" ) v = cts.n();
+ else if(stat_name == "FY_OY" ) v = cts.fy_oy();
+ else if(stat_name == "FY_ON" ) v = cts.fy_on();
+ else if(stat_name == "FN_OY" ) v = cts.fn_oy();
+ else if(stat_name == "FN_ON" ) v = cts.fn_on();
+ else if(stat_name == "EC_VALUE") v = cts.ec_value();
+ else {
+ mlog << Error << "\nCTSInfo::get_stat_ctc() -> "
+ << "unknown categorical statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ // Return bad data for 0 pairs
+ if(cts.n() == 0 && stat_name != "TOTAL") {
+ v = bad_data_double;
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double CTSInfo::get_stat_cts(const string &stat_name, int i_alpha) const {
+ double v = bad_data_double;
+
+ // Range check alpha index
+ if(i_alpha >= n_alpha && is_ci_stat_name(stat_name)) {
+ mlog << Error << "\nCTSInfo::get_stat_cts() -> "
+ << "alpha index out of range (" << i_alpha << " >= "
+ << n_alpha << ")!\n\n";
+ exit(1);
+ }
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL" ) v = (double) cts.n();
+ else if(stat_name == "BASER" ) v = baser.v;
+ else if(stat_name == "BASER_NCL" ) v = baser.v_ncl[i_alpha];
+ else if(stat_name == "BASER_NCU" ) v = baser.v_ncu[i_alpha];
+ else if(stat_name == "BASER_BCL" ) v = baser.v_bcl[i_alpha];
+ else if(stat_name == "BASER_BCU" ) v = baser.v_bcu[i_alpha];
+ else if(stat_name == "FMEAN" ) v = fmean.v;
+ else if(stat_name == "FMEAN_NCL" ) v = fmean.v_ncl[i_alpha];
+ else if(stat_name == "FMEAN_NCU" ) v = fmean.v_ncu[i_alpha];
+ else if(stat_name == "FMEAN_BCL" ) v = fmean.v_bcl[i_alpha];
+ else if(stat_name == "FMEAN_BCU" ) v = fmean.v_bcu[i_alpha];
+ else if(stat_name == "ACC" ) v = acc.v;
+ else if(stat_name == "ACC_NCL" ) v = acc.v_ncl[i_alpha];
+ else if(stat_name == "ACC_NCU" ) v = acc.v_ncu[i_alpha];
+ else if(stat_name == "ACC_BCL" ) v = acc.v_bcl[i_alpha];
+ else if(stat_name == "ACC_BCU" ) v = acc.v_bcu[i_alpha];
+ else if(stat_name == "FBIAS" ) v = fbias.v;
+ else if(stat_name == "FBIAS_BCL" ) v = fbias.v_bcl[i_alpha];
+ else if(stat_name == "FBIAS_BCU" ) v = fbias.v_bcu[i_alpha];
+ else if(stat_name == "PODY" ) v = pody.v;
+ else if(stat_name == "PODY_NCL" ) v = pody.v_ncl[i_alpha];
+ else if(stat_name == "PODY_NCU" ) v = pody.v_ncu[i_alpha];
+ else if(stat_name == "PODY_BCL" ) v = pody.v_bcl[i_alpha];
+ else if(stat_name == "PODY_BCU" ) v = pody.v_bcu[i_alpha];
+ else if(stat_name == "PODN" ) v = podn.v;
+ else if(stat_name == "PODN_NCL" ) v = podn.v_ncl[i_alpha];
+ else if(stat_name == "PODN_NCU" ) v = podn.v_ncu[i_alpha];
+ else if(stat_name == "PODN_BCL" ) v = podn.v_bcl[i_alpha];
+ else if(stat_name == "PODN_BCU" ) v = podn.v_bcu[i_alpha];
+ else if(stat_name == "POFD" ) v = pofd.v;
+ else if(stat_name == "POFD_NCL" ) v = pofd.v_ncl[i_alpha];
+ else if(stat_name == "POFD_NCU" ) v = pofd.v_ncu[i_alpha];
+ else if(stat_name == "POFD_BCL" ) v = pofd.v_bcl[i_alpha];
+ else if(stat_name == "POFD_BCU" ) v = pofd.v_bcu[i_alpha];
+ else if(stat_name == "FAR" ) v = far.v;
+ else if(stat_name == "FAR_NCL" ) v = far.v_ncl[i_alpha];
+ else if(stat_name == "FAR_NCU" ) v = far.v_ncu[i_alpha];
+ else if(stat_name == "FAR_BCL" ) v = far.v_bcl[i_alpha];
+ else if(stat_name == "FAR_BCU" ) v = far.v_bcu[i_alpha];
+ else if(stat_name == "CSI" ) v = csi.v;
+ else if(stat_name == "CSI_NCL" ) v = csi.v_ncl[i_alpha];
+ else if(stat_name == "CSI_NCU" ) v = csi.v_ncu[i_alpha];
+ else if(stat_name == "CSI_BCL" ) v = csi.v_bcl[i_alpha];
+ else if(stat_name == "CSI_BCU" ) v = csi.v_bcu[i_alpha];
+ else if(stat_name == "GSS" ) v = gss.v;
+ else if(stat_name == "GSS_BCL" ) v = gss.v_bcl[i_alpha];
+ else if(stat_name == "GSS_BCU" ) v = gss.v_bcu[i_alpha];
+ else if(stat_name == "HK" ) v = hk.v;
+ else if(stat_name == "HK_NCL" ) v = hk.v_ncl[i_alpha];
+ else if(stat_name == "HK_NCU" ) v = hk.v_ncu[i_alpha];
+ else if(stat_name == "HK_BCL" ) v = hk.v_bcl[i_alpha];
+ else if(stat_name == "HK_BCU" ) v = hk.v_bcu[i_alpha];
+ else if(stat_name == "HSS" ) v = hss.v;
+ else if(stat_name == "HSS_BCL" ) v = hss.v_bcl[i_alpha];
+ else if(stat_name == "HSS_BCU" ) v = hss.v_bcu[i_alpha];
+ else if(stat_name == "ODDS" ) v = odds.v;
+ else if(stat_name == "ODDS_NCL" ) v = odds.v_ncl[i_alpha];
+ else if(stat_name == "ODDS_NCU" ) v = odds.v_ncu[i_alpha];
+ else if(stat_name == "ODDS_BCL" ) v = odds.v_bcl[i_alpha];
+ else if(stat_name == "ODDS_BCU" ) v = odds.v_bcu[i_alpha];
+ else if(stat_name == "LODDS" ) v = lodds.v;
+ else if(stat_name == "LODDS_NCL" ) v = lodds.v_ncl[i_alpha];
+ else if(stat_name == "LODDS_NCU" ) v = lodds.v_ncu[i_alpha];
+ else if(stat_name == "LODDS_BCL" ) v = lodds.v_bcl[i_alpha];
+ else if(stat_name == "LODDS_BCU" ) v = lodds.v_bcu[i_alpha];
+ else if(stat_name == "ORSS" ) v = orss.v;
+ else if(stat_name == "ORSS_NCL" ) v = orss.v_ncl[i_alpha];
+ else if(stat_name == "ORSS_NCU" ) v = orss.v_ncu[i_alpha];
+ else if(stat_name == "ORSS_BCL" ) v = orss.v_bcl[i_alpha];
+ else if(stat_name == "ORSS_BCU" ) v = orss.v_bcu[i_alpha];
+ else if(stat_name == "EDS" ) v = eds.v;
+ else if(stat_name == "EDS_NCL" ) v = eds.v_ncl[i_alpha];
+ else if(stat_name == "EDS_NCU" ) v = eds.v_ncu[i_alpha];
+ else if(stat_name == "EDS_BCL" ) v = eds.v_bcl[i_alpha];
+ else if(stat_name == "EDS_BCU" ) v = eds.v_bcu[i_alpha];
+ else if(stat_name == "SEDS" ) v = seds.v;
+ else if(stat_name == "SEDS_NCL" ) v = seds.v_ncl[i_alpha];
+ else if(stat_name == "SEDS_NCU" ) v = seds.v_ncu[i_alpha];
+ else if(stat_name == "SEDS_BCL" ) v = seds.v_bcl[i_alpha];
+ else if(stat_name == "SEDS_BCU" ) v = seds.v_bcu[i_alpha];
+ else if(stat_name == "EDI" ) v = edi.v;
+ else if(stat_name == "EDI_NCL" ) v = edi.v_ncl[i_alpha];
+ else if(stat_name == "EDI_NCU" ) v = edi.v_ncu[i_alpha];
+ else if(stat_name == "EDI_BCL" ) v = edi.v_bcl[i_alpha];
+ else if(stat_name == "EDI_BCU" ) v = edi.v_bcu[i_alpha];
+ else if(stat_name == "SEDI" ) v = sedi.v;
+ else if(stat_name == "SEDI_NCL" ) v = sedi.v_ncl[i_alpha];
+ else if(stat_name == "SEDI_NCU" ) v = sedi.v_ncu[i_alpha];
+ else if(stat_name == "SEDI_BCL" ) v = sedi.v_bcl[i_alpha];
+ else if(stat_name == "SEDI_BCU" ) v = sedi.v_bcu[i_alpha];
+ else if(stat_name == "BAGSS" ) v = bagss.v;
+ else if(stat_name == "BAGSS_BCL" ) v = bagss.v_bcl[i_alpha];
+ else if(stat_name == "BAGSS_BCU" ) v = bagss.v_bcu[i_alpha];
+ else if(stat_name == "HSS_EC" ) v = hss_ec.v;
+ else if(stat_name == "HSS_EC_BCL") v = hss_ec.v_bcl[i_alpha];
+ else if(stat_name == "HSS_EC_BCU") v = hss_ec.v_bcu[i_alpha];
+ else if(stat_name == "EC_VALUE" ) v = cts.ec_value();
+ else {
+ mlog << Error << "\nCTSInfo::get_stat_cts() -> "
<< "unknown categorical statistic name \"" << stat_name
<< "\"!\n\n";
exit(1);
}
// Return bad data for 0 pairs
- if(cts.n() == 0 && strcmp(stat_name, "TOTAL") != 0) {
+ if(cts.n() == 0 && stat_name != "TOTAL") {
v = bad_data_double;
}
@@ -653,6 +820,122 @@ void MCTSInfo::compute_ci() {
return;
}
+////////////////////////////////////////////////////////////////////////
+
+double MCTSInfo::get_stat(STATLineType lt,
+ const string &stat_name,
+ ConcatString &col_name,
+ int i_alpha) const {
+ double v = bad_data_double;
+
+ // Initialize
+ col_name = stat_name;
+
+ // Get statistic by line type
+ if(lt == STATLineType::mctc) v = get_stat_mctc(stat_name, col_name);
+ else if(lt == STATLineType::mcts) v = get_stat_mcts(stat_name, i_alpha);
+ else {
+ mlog << Error << "\nMCTSInfo::get_stat() -> "
+ << "unexpected line type \"" << statlinetype_to_string(lt)
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double MCTSInfo::get_stat_mctc(const string &stat_name,
+ ConcatString &col_name) const {
+ double v = bad_data_double;
+ col_name = stat_name;
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL" ) v = (double) cts.total();
+ else if(stat_name == "N_CAT" ) v = (double) cts.nrows();
+ else if(stat_name == "EC_VALUE") v = cts.ec_value();
+ else if(check_reg_exp("F[0-9]*_O[0-9]*", stat_name.c_str())) {
+
+ col_name = "FI_OJ";
+
+ // Parse column name to retrieve index values
+ ConcatString cs(stat_name);
+ StringArray sa = cs.split("_");
+ int i = atoi(sa[0].c_str()+1) - 1;
+ int j = atoi(sa[1].c_str()+1) - 1;
+
+ // Range check
+ if(i < 0 || i >= cts.nrows() ||
+ j < 0 || j >= cts.ncols()) {
+ mlog << Error << "\nget_stat_mctc() -> "
+ << "range check error for column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+
+ // Retrieve the value
+ v = (double) cts.entry(i, j);
+ }
+ else {
+ mlog << Error << "\nMCTSInfo::get_stat_mctc() -> "
+ << "unknown multi-category statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double MCTSInfo::get_stat_mcts(const string &stat_name, int i_alpha) const {
+ double v = bad_data_double;
+
+ // Range check alpha index
+ if(i_alpha >= n_alpha && is_ci_stat_name(stat_name)) {
+ mlog << Error << "\nMCTSInfo::get_stat_mcts() -> "
+ << "alpha index out of range (" << i_alpha << " >= "
+ << n_alpha << ")!\n\n";
+ exit(1);
+ }
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL" ) v = (double) cts.total();
+ else if(stat_name == "N_CAT" ) v = (double) cts.nrows();
+ else if(stat_name == "ACC" ) v = acc.v;
+ else if(stat_name == "ACC_NCL" ) v = acc.v_ncl[i_alpha];
+ else if(stat_name == "ACC_NCU" ) v = acc.v_ncu[i_alpha];
+ else if(stat_name == "ACC_BCL" ) v = acc.v_bcl[i_alpha];
+ else if(stat_name == "ACC_BCU" ) v = acc.v_bcu[i_alpha];
+ else if(stat_name == "HK" ) v = hk.v;
+ else if(stat_name == "HK_BCL" ) v = hk.v_bcl[i_alpha];
+ else if(stat_name == "HK_BCU" ) v = hk.v_bcu[i_alpha];
+ else if(stat_name == "HSS" ) v = hss.v;
+ else if(stat_name == "HSS_BCL" ) v = hss.v_bcl[i_alpha];
+ else if(stat_name == "HSS_BCU" ) v = hss.v_bcu[i_alpha];
+ else if(stat_name == "GER" ) v = ger.v;
+ else if(stat_name == "GER_BCL" ) v = ger.v_bcl[i_alpha];
+ else if(stat_name == "GER_BCU" ) v = ger.v_bcu[i_alpha];
+ else if(stat_name == "HSS_EC" ) v = hss_ec.v;
+ else if(stat_name == "HSS_EC_BCL") v = hss_ec.v_bcl[i_alpha];
+ else if(stat_name == "HSS_EC_BCU") v = hss_ec.v_bcu[i_alpha];
+ else if(stat_name == "EC_VALUE" ) v = cts.ec_value();
+ else {
+ mlog << Error << "\nMCTSInfo::get_stat_mcts() -> "
+ << "unknown multi-category statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ // Return bad data for 0 pairs
+ if(cts.total() == 0 && stat_name != "TOTAL") {
+ v = bad_data_double;
+ }
+
+ return v;
+}
+
////////////////////////////////////////////////////////////////////////
//
// Code for class CNTInfo
@@ -702,6 +985,44 @@ void CNTInfo::init_from_scratch() {
////////////////////////////////////////////////////////////////////////
+void CNTInfo::zero_out() {
+
+ fbar.set_bad_data();
+ fstdev.set_bad_data();
+ obar.set_bad_data();
+ ostdev.set_bad_data();
+ pr_corr.set_bad_data();
+ sp_corr.set_bad_data();
+ kt_corr.set_bad_data();
+ anom_corr.set_bad_data();
+ rmsfa.set_bad_data();
+ rmsoa.set_bad_data();
+ anom_corr_uncntr.set_bad_data();
+ me.set_bad_data();
+ me2.set_bad_data();
+ estdev.set_bad_data();
+ mbias.set_bad_data();
+ mae.set_bad_data();
+ mse.set_bad_data();
+ msess.set_bad_data();
+ bcmse.set_bad_data();
+ rmse.set_bad_data();
+ si.set_bad_data();
+ e10.set_bad_data();
+ e25.set_bad_data();
+ e50.set_bad_data();
+ e75.set_bad_data();
+ e90.set_bad_data();
+ eiqr.set_bad_data();
+ mad.set_bad_data();
+
+ n_ranks = frank_ties = orank_ties = 0;
+
+ return;
+}
+
+////////////////////////////////////////////////////////////////////////
+
void CNTInfo::clear() {
n = 0;
@@ -1023,42 +1344,121 @@ void CNTInfo::compute_ci() {
////////////////////////////////////////////////////////////////////////
-double CNTInfo::get_stat(const char *stat_name) {
+double CNTInfo::get_stat(const string &stat_name, int i_alpha) const {
double v = bad_data_double;
+ // Range check alpha index
+ if(i_alpha >= n_alpha && is_ci_stat_name(stat_name)) {
+ mlog << Error << "\nCNTInfo::get_stat() -> "
+ << "alpha index out of range (" << i_alpha << " >= "
+ << n_alpha << ")!\n\n";
+ exit(1);
+ }
+
// Find the statistic by name
- if(strcmp(stat_name, "TOTAL" ) == 0) v = n;
- else if(strcmp(stat_name, "FBAR" ) == 0) v = fbar.v;
- else if(strcmp(stat_name, "FSTDEV" ) == 0) v = fstdev.v;
- else if(strcmp(stat_name, "OBAR" ) == 0) v = obar.v;
- else if(strcmp(stat_name, "OSTDEV" ) == 0) v = ostdev.v;
- else if(strcmp(stat_name, "PR_CORR" ) == 0) v = pr_corr.v;
- else if(strcmp(stat_name, "SP_CORR" ) == 0) v = sp_corr.v;
- else if(strcmp(stat_name, "KT_CORR" ) == 0) v = kt_corr.v;
- else if(strcmp(stat_name, "RANKS" ) == 0) v = n_ranks;
- else if(strcmp(stat_name, "FRANK_TIES" ) == 0) v = frank_ties;
- else if(strcmp(stat_name, "ORANK_TIES" ) == 0) v = orank_ties;
- else if(strcmp(stat_name, "ME" ) == 0) v = me.v;
- else if(strcmp(stat_name, "ESTDEV" ) == 0) v = estdev.v;
- else if(strcmp(stat_name, "MBIAS" ) == 0) v = mbias.v;
- else if(strcmp(stat_name, "MAE" ) == 0) v = mae.v;
- else if(strcmp(stat_name, "MSE" ) == 0) v = mse.v;
- else if(strcmp(stat_name, "BCMSE" ) == 0) v = bcmse.v;
- else if(strcmp(stat_name, "RMSE" ) == 0) v = rmse.v;
- else if(strcmp(stat_name, "SI" ) == 0) v = si.v;
- else if(strcmp(stat_name, "E10" ) == 0) v = e10.v;
- else if(strcmp(stat_name, "E25" ) == 0) v = e25.v;
- else if(strcmp(stat_name, "E50" ) == 0) v = e50.v;
- else if(strcmp(stat_name, "E75" ) == 0) v = e75.v;
- else if(strcmp(stat_name, "E90" ) == 0) v = e90.v;
- else if(strcmp(stat_name, "EIQR" ) == 0) v = eiqr.v;
- else if(strcmp(stat_name, "MAD " ) == 0) v = mad.v;
- else if(strcmp(stat_name, "ANOM_CORR" ) == 0) v = anom_corr.v;
- else if(strcmp(stat_name, "ME2" ) == 0) v = me2.v;
- else if(strcmp(stat_name, "MSESS" ) == 0) v = msess.v;
- else if(strcmp(stat_name, "RMSFA" ) == 0) v = rmsfa.v;
- else if(strcmp(stat_name, "RMSOA" ) == 0) v = rmsoa.v;
- else if(strcmp(stat_name, "ANOM_CORR_UNCNTR") == 0) v = anom_corr_uncntr.v;
+ if(stat_name == "TOTAL" ) v = (double) n;
+ else if(stat_name == "FBAR" ) v = fbar.v;
+ else if(stat_name == "FBAR_NCL" ) v = fbar.v_ncl[i_alpha];
+ else if(stat_name == "FBAR_NCU" ) v = fbar.v_ncu[i_alpha];
+ else if(stat_name == "FBAR_BCL" ) v = fbar.v_bcl[i_alpha];
+ else if(stat_name == "FBAR_BCU" ) v = fbar.v_bcu[i_alpha];
+ else if(stat_name == "FSTDEV" ) v = fstdev.v;
+ else if(stat_name == "FSTDEV_NCL" ) v = fstdev.v_ncl[i_alpha];
+ else if(stat_name == "FSTDEV_NCU" ) v = fstdev.v_ncu[i_alpha];
+ else if(stat_name == "FSTDEV_BCL" ) v = fstdev.v_bcl[i_alpha];
+ else if(stat_name == "FSTDEV_BCU" ) v = fstdev.v_bcu[i_alpha];
+ else if(stat_name == "OBAR" ) v = obar.v;
+ else if(stat_name == "OBAR_NCL" ) v = obar.v_ncl[i_alpha];
+ else if(stat_name == "OBAR_NCU" ) v = obar.v_ncu[i_alpha];
+ else if(stat_name == "OBAR_BCL" ) v = obar.v_bcl[i_alpha];
+ else if(stat_name == "OBAR_BCU" ) v = obar.v_bcu[i_alpha];
+ else if(stat_name == "OSTDEV" ) v = ostdev.v;
+ else if(stat_name == "OSTDEV_NCL" ) v = ostdev.v_ncl[i_alpha];
+ else if(stat_name == "OSTDEV_NCU" ) v = ostdev.v_ncu[i_alpha];
+ else if(stat_name == "OSTDEV_BCL" ) v = ostdev.v_bcl[i_alpha];
+ else if(stat_name == "OSTDEV_BCU" ) v = ostdev.v_bcu[i_alpha];
+ else if(stat_name == "PR_CORR" ) v = pr_corr.v;
+ else if(stat_name == "PR_CORR_NCL" ) v = pr_corr.v_ncl[i_alpha];
+ else if(stat_name == "PR_CORR_NCU" ) v = pr_corr.v_ncu[i_alpha];
+ else if(stat_name == "PR_CORR_BCL" ) v = pr_corr.v_bcl[i_alpha];
+ else if(stat_name == "PR_CORR_BCU" ) v = pr_corr.v_bcu[i_alpha];
+ else if(stat_name == "SP_CORR" ) v = sp_corr.v;
+ else if(stat_name == "KT_CORR" ) v = kt_corr.v;
+ else if(stat_name == "RANKS" ) v = n_ranks;
+ else if(stat_name == "FRANK_TIES" ) v = frank_ties;
+ else if(stat_name == "ORANK_TIES" ) v = orank_ties;
+ else if(stat_name == "ME" ) v = me.v;
+ else if(stat_name == "ME_NCL" ) v = me.v_ncl[i_alpha];
+ else if(stat_name == "ME_NCU" ) v = me.v_ncu[i_alpha];
+ else if(stat_name == "ME_BCL" ) v = me.v_bcl[i_alpha];
+ else if(stat_name == "ME_BCU" ) v = me.v_bcu[i_alpha];
+ else if(stat_name == "ESTDEV" ) v = estdev.v;
+ else if(stat_name == "ESTDEV_NCL" ) v = estdev.v_ncl[i_alpha];
+ else if(stat_name == "ESTDEV_NCU" ) v = estdev.v_ncu[i_alpha];
+ else if(stat_name == "ESTDEV_BCL" ) v = estdev.v_bcl[i_alpha];
+ else if(stat_name == "ESTDEV_BCU" ) v = estdev.v_bcu[i_alpha];
+ else if(stat_name == "MBIAS" ) v = mbias.v;
+ else if(stat_name == "MBIAS_BCL" ) v = mbias.v_bcl[i_alpha];
+ else if(stat_name == "MBIAS_BCU" ) v = mbias.v_bcu[i_alpha];
+ else if(stat_name == "MAE" ) v = mae.v;
+ else if(stat_name == "MAE_BCL" ) v = mae.v_bcl[i_alpha];
+ else if(stat_name == "MAE_BCU" ) v = mae.v_bcu[i_alpha];
+ else if(stat_name == "MSE" ) v = mse.v;
+ else if(stat_name == "MSE_BCL" ) v = mse.v_bcl[i_alpha];
+ else if(stat_name == "MSE_BCU" ) v = mse.v_bcu[i_alpha];
+ else if(stat_name == "BCMSE" ) v = bcmse.v;
+ else if(stat_name == "BCMSE_BCL" ) v = bcmse.v_bcl[i_alpha];
+ else if(stat_name == "BCMSE_BCU" ) v = bcmse.v_bcu[i_alpha];
+ else if(stat_name == "RMSE" ) v = rmse.v;
+ else if(stat_name == "RMSE_BCL" ) v = rmse.v_bcl[i_alpha];
+ else if(stat_name == "RMSE_BCU" ) v = rmse.v_bcu[i_alpha];
+ else if(stat_name == "SI" ) v = si.v;
+ else if(stat_name == "SI_BCL" ) v = si.v_bcl[i_alpha];
+ else if(stat_name == "SI_BCU" ) v = si.v_bcu[i_alpha];
+ else if(stat_name == "E10" ) v = e10.v;
+ else if(stat_name == "E10_BCL" ) v = e10.v_bcl[i_alpha];
+ else if(stat_name == "E10_BCU" ) v = e10.v_bcu[i_alpha];
+ else if(stat_name == "E25" ) v = e25.v;
+ else if(stat_name == "E25_BCL" ) v = e25.v_bcl[i_alpha];
+ else if(stat_name == "E25_BCU" ) v = e25.v_bcu[i_alpha];
+ else if(stat_name == "E50" ) v = e50.v;
+ else if(stat_name == "E50_BCL" ) v = e50.v_bcl[i_alpha];
+ else if(stat_name == "E50_BCU" ) v = e50.v_bcu[i_alpha];
+ else if(stat_name == "E75" ) v = e75.v;
+ else if(stat_name == "E75_BCL" ) v = e75.v_bcl[i_alpha];
+ else if(stat_name == "E75_BCU" ) v = e75.v_bcu[i_alpha];
+ else if(stat_name == "E90" ) v = e90.v;
+ else if(stat_name == "E90_BCL" ) v = e90.v_bcl[i_alpha];
+ else if(stat_name == "E90_BCU" ) v = e90.v_bcu[i_alpha];
+ else if(stat_name == "EIQR" ) v = eiqr.v;
+ else if(stat_name == "EIQR_BCL" ) v = eiqr.v_bcl[i_alpha];
+ else if(stat_name == "EIQR_BCU" ) v = eiqr.v_bcu[i_alpha];
+ else if(stat_name == "MAD" ) v = mad.v;
+ else if(stat_name == "MAD_BCL" ) v = mad.v_bcl[i_alpha];
+ else if(stat_name == "MAD_BCU" ) v = mad.v_bcu[i_alpha];
+ else if(stat_name == "ANOM_CORR" ) v = anom_corr.v;
+ else if(stat_name == "ANOM_CORR_NCL" ) v = anom_corr.v_ncl[i_alpha];
+ else if(stat_name == "ANOM_CORR_NCU" ) v = anom_corr.v_ncu[i_alpha];
+ else if(stat_name == "ANOM_CORR_BCL" ) v = anom_corr.v_bcl[i_alpha];
+ else if(stat_name == "ANOM_CORR_BCU" ) v = anom_corr.v_bcu[i_alpha];
+ else if(stat_name == "ME2" ) v = me2.v;
+ else if(stat_name == "ME2_BCL" ) v = me2.v_bcl[i_alpha];
+ else if(stat_name == "ME2_BCU" ) v = me2.v_bcu[i_alpha];
+ else if(stat_name == "MSESS" ) v = msess.v;
+ else if(stat_name == "MSESS_BCL" ) v = msess.v_bcl[i_alpha];
+ else if(stat_name == "MSESS_BCU" ) v = msess.v_bcu[i_alpha];
+ else if(stat_name == "RMSFA" ) v = rmsfa.v;
+ else if(stat_name == "RMSFA_BCL" ) v = rmsfa.v_bcl[i_alpha];
+ else if(stat_name == "RMSFA_BCU" ) v = rmsfa.v_bcu[i_alpha];
+ else if(stat_name == "RMSOA" ) v = rmsoa.v;
+ else if(stat_name == "RMSOA_BCL" ) v = rmsoa.v_bcl[i_alpha];
+ else if(stat_name == "RMSOA_BCU" ) v = rmsoa.v_bcu[i_alpha];
+ else if(stat_name == "ANOM_CORR_UNCNTR" ) v = anom_corr_uncntr.v;
+ else if(stat_name == "ANOM_CORR_UNCNTR_BCL") v = anom_corr_uncntr.v_bcl[i_alpha];
+ else if(stat_name == "ANOM_CORR_UNCNTR_BCU") v = anom_corr_uncntr.v_bcu[i_alpha];
+ else if(stat_name == "SI" ) v = si.v;
+ else if(stat_name == "SI_BCL" ) v = si.v_bcl[i_alpha];
+ else if(stat_name == "SI_BCU" ) v = si.v_bcu[i_alpha];
else {
mlog << Error << "\nCNTInfo::get_stat() -> "
<< "unknown continuous statistic name \"" << stat_name
@@ -1067,7 +1467,7 @@ double CNTInfo::get_stat(const char *stat_name) {
}
// Return bad data for 0 pairs
- if(n == 0 && strcmp(stat_name, "TOTAL") != 0) {
+ if(n == 0 && stat_name != "TOTAL") {
v = bad_data_double;
}
@@ -1113,7 +1513,8 @@ SL1L2Info & SL1L2Info::operator=(const SL1L2Info &c) {
////////////////////////////////////////////////////////////////////////
SL1L2Info & SL1L2Info::operator+=(const SL1L2Info &c) {
- SL1L2Info s_info;
+ SL1L2Info s_info = *this;
+ s_info.zero_out();
s_info.scount = scount + c.scount;
@@ -1305,6 +1706,110 @@ void SL1L2Info::set(const PairDataPoint &pd_all) {
return;
}
+////////////////////////////////////////////////////////////////////////
+
+void SL1L2Info::set_stat_sl1l2(const string &stat_name, double v) {
+
+ if(stat_name == "TOTAL") scount = nint(v);
+ else if(stat_name == "FBAR" ) fbar = v;
+ else if(stat_name == "OBAR" ) obar = v;
+ else if(stat_name == "FOBAR") fobar = v;
+ else if(stat_name == "FFBAR") ffbar = v;
+ else if(stat_name == "OOBAR") oobar = v;
+ else if(stat_name == "MAE" ) smae = v;
+ else {
+ mlog << Error << "\nSL1L2Info::set_stat_sl1l2() -> "
+ << "unknown scalar partial sum statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+void SL1L2Info::set_stat_sal1l2(const string &stat_name, double v) {
+
+ if(stat_name == "TOTAL" ) sacount = nint(v);
+ else if(stat_name == "FABAR" ) fabar = v;
+ else if(stat_name == "OABAR" ) oabar = v;
+ else if(stat_name == "FOABAR") foabar = v;
+ else if(stat_name == "FFABAR") ffabar = v;
+ else if(stat_name == "OOABAR") ooabar = v;
+ else if(stat_name == "MAE" ) samae = v;
+ else {
+ mlog << Error << "\nSL1L2Info::set_stat_sal1l2() -> "
+ << "unknown scalar anomaly partial sum statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double SL1L2Info::get_stat(STATLineType lt, const string &stat_name) const {
+ double v = bad_data_double;
+
+ // Get statistic by line type
+ if(lt == STATLineType::sl1l2) v = get_stat_sl1l2(stat_name);
+ else if(lt == STATLineType::sal1l2) v = get_stat_sal1l2(stat_name);
+ else {
+ mlog << Error << "\nSL1L2Info::get_stat() -> "
+ << "unexpected line type \"" << statlinetype_to_string(lt)
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double SL1L2Info::get_stat_sl1l2(const string &stat_name) const {
+ double v = bad_data_double;
+
+ if(stat_name == "TOTAL") v = (double) scount;
+ else if(stat_name == "FBAR" ) v = fbar;
+ else if(stat_name == "OBAR" ) v = obar;
+ else if(stat_name == "FOBAR") v = fobar;
+ else if(stat_name == "FFBAR") v = ffbar;
+ else if(stat_name == "OOBAR") v = oobar;
+ else if(stat_name == "MAE" ) v = smae;
+ else {
+ mlog << Error << "\nSL1L2Info::get_stat_sl1l2() -> "
+ << "unknown scalar partial sum statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double SL1L2Info::get_stat_sal1l2(const string &stat_name) const {
+ double v = bad_data_double;
+
+ if(stat_name == "TOTAL" ) v = (double) sacount;
+ else if(stat_name == "FABAR" ) v = fabar;
+ else if(stat_name == "OABAR" ) v = oabar;
+ else if(stat_name == "FOABAR") v = foabar;
+ else if(stat_name == "FFABAR") v = ffabar;
+ else if(stat_name == "OOABAR") v = ooabar;
+ else if(stat_name == "MAE" ) v = samae;
+ else {
+ mlog << Error << "\nSL1L2Info::get_stat_sal1l2() -> "
+ << "unknown scalar anomaly partial sum statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
////////////////////////////////////////////////////////////////////////
//
// Code for class VL1L2Info
@@ -1344,11 +1849,8 @@ VL1L2Info & VL1L2Info::operator=(const VL1L2Info &c) {
////////////////////////////////////////////////////////////////////////
VL1L2Info & VL1L2Info::operator+=(const VL1L2Info &c) {
- VL1L2Info v_info;
-
- // Store alpha values
- v_info.allocate_n_alpha(n_alpha);
- for(int i=0; i "
+ << "unknown vector partial sums statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
////////////////////////////////////////////////////////////////////////
-double VL1L2Info::get_stat(const char *stat_name) {
+double VL1L2Info::get_stat_val1l2(const string &stat_name) const {
double v = bad_data_double;
- if(strcmp(stat_name, "TOTAL" ) == 0) v = vcount;
- else if(strcmp(stat_name, "FBAR" ) == 0) v = FBAR.v;
- else if(strcmp(stat_name, "OBAR" ) == 0) v = OBAR.v;
- else if(strcmp(stat_name, "FS_RMS" ) == 0) v = FS_RMS.v;
- else if(strcmp(stat_name, "OS_RMS" ) == 0) v = OS_RMS.v;
- else if(strcmp(stat_name, "MSVE" ) == 0) v = MSVE.v;
- else if(strcmp(stat_name, "RMSVE" ) == 0) v = RMSVE.v;
- else if(strcmp(stat_name, "FSTDEV" ) == 0) v = FSTDEV.v;
- else if(strcmp(stat_name, "OSTDEV" ) == 0) v = OSTDEV.v;
- else if(strcmp(stat_name, "FDIR" ) == 0) v = FDIR.v;
- else if(strcmp(stat_name, "ODIR" ) == 0) v = ODIR.v;
- else if(strcmp(stat_name, "FBAR_SPEED" ) == 0) v = FBAR_SPEED.v;
- else if(strcmp(stat_name, "OBAR_SPEED" ) == 0) v = OBAR_SPEED.v;
- else if(strcmp(stat_name, "VDIFF_SPEED" ) == 0) v = VDIFF_SPEED.v;
- else if(strcmp(stat_name, "VDIFF_DIR" ) == 0) v = VDIFF_DIR.v;
- else if(strcmp(stat_name, "SPEED_ERR" ) == 0) v = SPEED_ERR.v;
- else if(strcmp(stat_name, "SPEED_ABSERR" ) == 0) v = SPEED_ABSERR.v;
- else if(strcmp(stat_name, "DIR_ERR" ) == 0) v = DIR_ERR.v;
- else if(strcmp(stat_name, "DIR_ABSERR" ) == 0) v = DIR_ABSERR.v;
- else if(strcmp(stat_name, "ANOM_CORR" ) == 0) v = ANOM_CORR.v;
- else if(strcmp(stat_name, "ANOM_CORR_UNCNTR") == 0) v = ANOM_CORR_UNCNTR.v;
- else if(strcmp(stat_name, "DIR_ME" ) == 0) v = DIR_ME.v;
- else if(strcmp(stat_name, "DIR_MAE" ) == 0) v = DIR_MAE.v;
- else if(strcmp(stat_name, "DIR_MSE" ) == 0) v = DIR_MSE.v;
- else if(strcmp(stat_name, "DIR_RMSE" ) == 0) v = DIR_RMSE.v;
+ // Find the statistic by name
+ if(stat_name == "TOTAL" ) v = vacount;
+ else if(stat_name == "UFABAR" ) v = ufa_bar;
+ else if(stat_name == "VFABAR" ) v = vfa_bar;
+ else if(stat_name == "UOABAR" ) v = uoa_bar;
+ else if(stat_name == "VOABAR" ) v = voa_bar;
+ else if(stat_name == "UVFOABAR" ) v = uvfoa_bar;
+ else if(stat_name == "UVFFABAR" ) v = uvffa_bar;
+ else if(stat_name == "UVOOABAR" ) v = uvooa_bar;
+ else if(stat_name == "FA_SPEED_BAR") v = fa_speed_bar;
+ else if(stat_name == "OA_SPEED_BAR") v = oa_speed_bar;
+ else if(stat_name == "TOTAL_DIR" ) v = dacount;
+ else if(stat_name == "DIRA_ME" ) v = dira_bar;
+ else if(stat_name == "DIRA_MAE" ) v = absdira_bar;
+ else if(stat_name == "DIRA_MSE" ) v = dira2_bar;
else {
mlog << Error << "\nVL1L2Info::get_stat() -> "
- << "unknown continuous statistic name \"" << stat_name
+ << "unknown vector anomaly partial sums statistic name \"" << stat_name
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double VL1L2Info::get_stat_vcnt(const string &stat_name) const {
+ double v = bad_data_double;
+
+ if(stat_name == "TOTAL" ) v = vcount;
+ else if(stat_name == "FBAR" ) v = FBAR.v;
+ else if(stat_name == "OBAR" ) v = OBAR.v;
+ else if(stat_name == "FS_RMS" ) v = FS_RMS.v;
+ else if(stat_name == "OS_RMS" ) v = OS_RMS.v;
+ else if(stat_name == "MSVE" ) v = MSVE.v;
+ else if(stat_name == "RMSVE" ) v = RMSVE.v;
+ else if(stat_name == "FSTDEV" ) v = FSTDEV.v;
+ else if(stat_name == "OSTDEV" ) v = OSTDEV.v;
+ else if(stat_name == "FDIR" ) v = FDIR.v;
+ else if(stat_name == "ODIR" ) v = ODIR.v;
+ else if(stat_name == "FBAR_SPEED" ) v = FBAR_SPEED.v;
+ else if(stat_name == "OBAR_SPEED" ) v = OBAR_SPEED.v;
+ else if(stat_name == "VDIFF_SPEED" ) v = VDIFF_SPEED.v;
+ else if(stat_name == "VDIFF_DIR" ) v = VDIFF_DIR.v;
+ else if(stat_name == "SPEED_ERR" ) v = SPEED_ERR.v;
+ else if(stat_name == "SPEED_ABSERR" ) v = SPEED_ABSERR.v;
+ else if(stat_name == "DIR_ERR" ) v = DIR_ERR.v;
+ else if(stat_name == "DIR_ABSERR" ) v = DIR_ABSERR.v;
+ else if(stat_name == "ANOM_CORR" ) v = ANOM_CORR.v;
+ else if(stat_name == "ANOM_CORR_UNCNTR") v = ANOM_CORR_UNCNTR.v;
+ else if(stat_name == "DIR_ME" ) v = DIR_ME.v;
+ else if(stat_name == "DIR_MAE" ) v = DIR_MAE.v;
+ else if(stat_name == "DIR_MSE" ) v = DIR_MSE.v;
+ else if(stat_name == "DIR_RMSE" ) v = DIR_RMSE.v;
+ else {
+ mlog << Error << "\nVL1L2Info::get_stat_vcnt() -> "
+ << "unknown vector continuous statistic name \"" << stat_name
<< "\"!\n\n";
exit(1);
}
@@ -2128,7 +2689,8 @@ NBRCNTInfo & NBRCNTInfo::operator=(const NBRCNTInfo &c) {
////////////////////////////////////////////////////////////////////////
NBRCNTInfo & NBRCNTInfo::operator+=(const NBRCNTInfo &c) {
- NBRCNTInfo n_info;
+ NBRCNTInfo n_info = *this;
+ n_info.sl1l2_info.zero_out();
double den;
n_info.sl1l2_info.scount = sl1l2_info.scount + c.sl1l2_info.scount;
@@ -2756,6 +3318,285 @@ void PCTInfo::compute_ci() {
return;
}
+////////////////////////////////////////////////////////////////////////
+
+double PCTInfo::get_stat(STATLineType lt,
+ const string &stat_name,
+ ConcatString &col_name,
+ int i_alpha) const {
+ double v = bad_data_double;
+
+ // Get statistic by line type
+ if(lt == STATLineType::pct) v = get_stat_pct(stat_name, col_name);
+ else if(lt == STATLineType::pjc) v = get_stat_pjc(stat_name, col_name);
+ else if(lt == STATLineType::prc) v = get_stat_prc(stat_name, col_name);
+ else if(lt == STATLineType::pstd) v = get_stat_pstd(stat_name, col_name, i_alpha);
+ else {
+ mlog << Error << "\nPCTInfo::get_stat() -> "
+ << "unexpected line type \"" << statlinetype_to_string(lt)
+ << "\"!\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double PCTInfo::get_stat_pct(const string &stat_name,
+ ConcatString &col_name) const {
+ int i = 0;
+ double v = bad_data_double;
+ col_name = stat_name;
+
+ // Get index value for variable column numbers
+ if(check_reg_exp("_[0-9]", stat_name.c_str())) {
+
+ // Parse the index value from the column name
+ i = atoi(strrchr(stat_name.c_str(), '_') + 1) - 1;
+
+ // Range check (allow THRESH_N for N == nrows)
+ if(i < 0 || i > pct.nrows()) {
+ mlog << Error << "\nPCTInfo::get_stat_pct() -> "
+ << "range check error for column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+ } // end if
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL") {
+ v = (double) pct.n();
+ }
+ else if(stat_name == "N_THRESH") {
+ v = (double) pct.nrows() + 1;
+ }
+ else if(check_reg_exp("THRESH_[0-9]", stat_name.c_str())) {
+ v = pct.threshold(i);
+ col_name = "THRESH_I";
+ }
+ else if(check_reg_exp("OY_[0-9]", stat_name.c_str())){
+ v = (double) pct.event_count_by_row(i);
+ col_name = "OY_I";
+ }
+ else if(check_reg_exp("ON_[0-9]", stat_name.c_str())) {
+ v = (double) pct.nonevent_count_by_row(i);
+ col_name = "ON_I";
+ }
+ else {
+ mlog << Error << "\nPCTInfo::get_stat_pct() -> "
+ << "unsupported column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double PCTInfo::get_stat_pjc(const string &stat_name,
+ ConcatString &col_name) const {
+ int i = 0;
+ double v = bad_data_double;
+ col_name = stat_name;
+
+ // Get index value for variable column numbers
+ if(check_reg_exp("_[0-9]", stat_name.c_str())) {
+
+ // Parse the index value from the column name
+ i = atoi(strrchr(stat_name.c_str(), '_') + 1) - 1;
+
+ // Range check
+ if(i < 0 || i >= pct.nrows()) {
+ mlog << Error << "\nPCTInfo::get_stat_pjc() -> "
+ << "range check error for column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+ } // end if
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL") {
+ v = (double) pct.n();
+ }
+ else if(stat_name == "N_THRESH") {
+ v = (double) pct.nrows() + 1;
+ }
+ else if(check_reg_exp("THRESH_[0-9]", stat_name.c_str())) {
+ v = pct.threshold(i);
+ col_name = "THRESH_I";
+ }
+ else if(check_reg_exp("OY_TP_[0-9]", stat_name.c_str())) {
+ v = pct.event_count_by_row(i)/(double) pct.n();
+ col_name = "OY_TP_I";
+ }
+ else if(check_reg_exp("ON_TP_[0-9]", stat_name.c_str())) {
+ v = pct.nonevent_count_by_row(i)/(double) pct.n();
+ col_name = "ON_TP_I";
+ }
+ else if(check_reg_exp("CALIBRATION_[0-9]", stat_name.c_str())) {
+ v = pct.row_calibration(i);
+ col_name = "CALIBRATION_I";
+ }
+ else if(check_reg_exp("REFINEMENT_[0-9]", stat_name.c_str())) {
+ v = pct.row_refinement(i);
+ col_name = "REFINEMENT_I";
+ }
+ else if(check_reg_exp("LIKELIHOOD_[0-9]", stat_name.c_str())) {
+ v = pct.row_event_likelihood(i);
+ col_name = "LIKELIHOOD_I";
+ }
+ else if(check_reg_exp("BASER_[0-9]", stat_name.c_str())) {
+ v = pct.row_obar(i);
+ col_name = "BASER_I";
+ }
+ else {
+ mlog << Error << "\nPCTInfo::get_stat_pjc() -> "
+ << "unsupported column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+
+ // Return bad data for 0 pairs
+ if(pct.n() == 0 && stat_name != "TOTAL") {
+ v = bad_data_double;
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double PCTInfo::get_stat_prc(const string &stat_name,
+ ConcatString &col_name) const {
+ int i = 0;
+ double v = bad_data_double;
+ col_name = stat_name;
+ TTContingencyTable ct;
+
+ // Get index value for variable column numbers
+ if(check_reg_exp("_[0-9]", stat_name.c_str())) {
+
+ // Parse the index value from the column name
+ i = atoi(strrchr(stat_name.c_str(), '_') + 1) - 1;
+
+ // Range check
+ if(i < 0 || i >= pct.nrows()) {
+ mlog << Error << "\nPCTInfo::get_stat_prc() -> "
+ << "range check error for column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+
+ // Get the 2x2 contingency table for this row
+ ct = pct.ctc_by_row(i);
+
+ } // end if
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL") {
+ v = (double) pct.n();
+ }
+ else if(stat_name == "N_THRESH") {
+ v = (double) pct.nrows() + 1;
+ }
+ else if(check_reg_exp("THRESH_[0-9]", stat_name.c_str())) {
+ v = pct.threshold(i);
+ col_name = "THRESH_I";
+ }
+ else if(check_reg_exp("PODY_[0-9]", stat_name.c_str())) {
+ v = ct.pod_yes();
+ col_name = "PODY_I";
+ }
+ else if(check_reg_exp("POFD_[0-9]", stat_name.c_str())) {
+ v = ct.pofd();
+ col_name = "POFD_I";
+ }
+ else {
+ mlog << Error << "\nPCTInfo::get_stat_prc() -> "
+ << "unsupported column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+
+ // Return bad data for 0 pairs
+ if(pct.n() == 0 && stat_name != "TOTAL") {
+ v = bad_data_double;
+ }
+
+ return v;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+double PCTInfo::get_stat_pstd(const string &stat_name,
+ ConcatString &col_name,
+ int i_alpha) const {
+ int i = 0;
+ double v = bad_data_double;
+ col_name = stat_name;
+
+ // Range check alpha index
+ if(i_alpha >= n_alpha && is_ci_stat_name(stat_name)) {
+ mlog << Error << "\nPCTInfo::get_stat_pstd() -> "
+ << "alpha index out of range (" << i_alpha << " >= "
+ << n_alpha << ")!\n\n";
+ exit(1);
+ }
+
+ // Get index value for variable column numbers
+ if(check_reg_exp("_[0-9]", stat_name.c_str())) {
+
+ // Parse the index value from the column name
+ i = atoi(strrchr(stat_name.c_str(), '_') + 1) - 1;
+
+ // Range check
+ if(i < 0 || i >= pct.nrows()) {
+ mlog << Error << "\nPCTInfo::get_stat_pstd() -> "
+ << "range check error for column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+ } // end if
+
+ // Find the statistic by name
+ if(stat_name == "TOTAL" ) v = (double) pct.n();
+ else if(stat_name == "N_THRESH" ) v = (double) pct.nrows() + 1;
+ else if(stat_name == "BASER" ) v = baser.v;
+ else if(stat_name == "BASER_NCL" ) v = baser.v_ncl[i_alpha];
+ else if(stat_name == "BASER_NCU" ) v = baser.v_ncu[i_alpha];
+ else if(stat_name == "RELIABILITY") v = pct.reliability();
+ else if(stat_name == "RESOLUTION" ) v = pct.resolution();
+ else if(stat_name == "UNCERTAINTY") v = pct.uncertainty();
+ else if(stat_name == "ROC_AUC" ) v = pct.roc_auc();
+ else if(stat_name == "BRIER" ) v = brier.v;
+ else if(stat_name == "BRIER_NCL" ) v = brier.v_ncl[i_alpha];
+ else if(stat_name == "BRIER_NCU" ) v = brier.v_ncu[i_alpha];
+ else if(stat_name == "BRIERCL" ) v = briercl.v;
+ else if(stat_name == "BRIERCL_NCL") v = briercl.v_ncl[i_alpha];
+ else if(stat_name == "BRIERCL_NCU") v = briercl.v_ncu[i_alpha];
+ else if(stat_name == "BSS" ) v = bss;
+ else if(stat_name == "BSS_SMPL" ) v = bss_smpl;
+ else if(check_reg_exp("THRESH_[0-9]", stat_name.c_str())) {
+ v = pct.threshold(i);
+ col_name = "THRESH_I";
+ }
+ else {
+ mlog << Error << "\nPCTInfo::get_stat_pstd() -> "
+ << "unsupported column name requested \"" << stat_name
+ << "\"\n\n";
+ exit(1);
+ }
+
+ // Return bad data for 0 pairs
+ if(pct.n() == 0 && stat_name != "TOTAL") {
+ v = bad_data_double;
+ }
+
+ return v;
+}
+
////////////////////////////////////////////////////////////////////////
//
// Code for class GRADInfo
@@ -3627,3 +4468,10 @@ int compute_rank(const DataPlane &dp, DataPlane &dp_rank, double *data_rank, int
}
////////////////////////////////////////////////////////////////////////
+
+bool is_ci_stat_name(const string &stat_name) {
+ return (stat_name.find("_NC") != string::npos ||
+ stat_name.find("_BC") != string::npos);
+}
+
+////////////////////////////////////////////////////////////////////////
diff --git a/src/libcode/vx_statistics/met_stats.h b/src/libcode/vx_statistics/met_stats.h
index f3bef1a90c..c968697dd7 100644
--- a/src/libcode/vx_statistics/met_stats.h
+++ b/src/libcode/vx_statistics/met_stats.h
@@ -97,7 +97,12 @@ class CTSInfo {
void compute_stats();
void compute_ci();
- double get_stat(const char *);
+ void set_stat_ctc(const std::string &, double);
+
+ double get_stat(STATLineType, const std::string &, int i_alpha=0) const;
+ double get_stat_fho(const std::string &) const;
+ double get_stat_ctc(const std::string &) const;
+ double get_stat_cts(const std::string &, int i_alpha=0) const;
};
////////////////////////////////////////////////////////////////////////
@@ -136,6 +141,10 @@ class MCTSInfo {
void add(double, double, const ClimoPntInfo *cpi = nullptr);
void compute_stats();
void compute_ci();
+
+ double get_stat(STATLineType, const std::string &, ConcatString &, int i_alpha=0) const;
+ double get_stat_mctc(const std::string &, ConcatString &) const;
+ double get_stat_mcts(const std::string &, int i_alpha=0) const;
};
////////////////////////////////////////////////////////////////////////
@@ -188,11 +197,13 @@ class CNTInfo {
int n_ranks, frank_ties, orank_ties;
+ void zero_out();
void clear();
+
void allocate_n_alpha(int);
void compute_ci();
- double get_stat(const char *);
+ double get_stat(const std::string &, int i_alpha=0) const;
};
////////////////////////////////////////////////////////////////////////
@@ -239,6 +250,13 @@ class SL1L2Info {
void zero_out();
void clear();
+
+ void set_stat_sl1l2(const std::string &, double);
+ void set_stat_sal1l2(const std::string &, double);
+
+ double get_stat(STATLineType, const std::string &) const;
+ double get_stat_sl1l2(const std::string &) const;
+ double get_stat_sal1l2(const std::string &) const;
};
////////////////////////////////////////////////////////////////////////
@@ -354,15 +372,17 @@ class VL1L2Info {
// Compute sums
void set(const PairDataPoint &, const PairDataPoint &);
-
- void clear();
+
void zero_out();
+ void clear();
void allocate_n_alpha(int);
void compute_stats();
void compute_ci();
- double get_stat(const char *);
+ double get_stat_vl1l2(const std::string &) const;
+ double get_stat_val1l2(const std::string &) const;
+ double get_stat_vcnt(const std::string &) const;
};
////////////////////////////////////////////////////////////////////////
@@ -502,8 +522,9 @@ class ISCInfo {
double baser;
double fbias;
- void clear();
void zero_out();
+ void clear();
+
void allocate_n_scale(int);
void compute_isc();
void compute_isc(int);
@@ -558,6 +579,12 @@ class PCTInfo {
void set_fthresh(const ThreshArray &);
void compute_stats();
void compute_ci();
+
+ double get_stat(STATLineType, const std::string &, ConcatString &, int i_alpha=0) const;
+ double get_stat_pct(const std::string &, ConcatString &) const;
+ double get_stat_pjc(const std::string &, ConcatString &) const;
+ double get_stat_prc(const std::string &, ConcatString &) const;
+ double get_stat_pstd(const std::string &, ConcatString &, int i_alpha=0) const;
};
////////////////////////////////////////////////////////////////////////
@@ -737,6 +764,8 @@ extern double compute_ufss(double);
extern int compute_rank(const DataPlane &, DataPlane &, double *, int &);
+extern bool is_ci_stat_name(const std::string &);
+
////////////////////////////////////////////////////////////////////////
#endif // __MET_STATS_H__
diff --git a/src/tools/core/series_analysis/series_analysis.cc b/src/tools/core/series_analysis/series_analysis.cc
index a2e8cdf5c1..79b46710fe 100644
--- a/src/tools/core/series_analysis/series_analysis.cc
+++ b/src/tools/core/series_analysis/series_analysis.cc
@@ -36,10 +36,10 @@
// 015 10/03/22 Presotpnik MET #2227 Remove namespace netCDF from header files.
// 016 01/29/24 Halley Gotway MET #2801 Configure time difference warnings.
// 017 07/05/24 Halley Gotway MET #2924 Support forecast climatology.
+// 018 07/26/24 Halley Gotway MET #1371 Aggregate previous output.
//
////////////////////////////////////////////////////////////////////////
-
#include
#include
#include
@@ -65,7 +65,6 @@
using namespace std;
using namespace netCDF;
-
////////////////////////////////////////////////////////////////////////
static void process_command_line(int, char **);
@@ -84,32 +83,69 @@ static void get_series_entry(int, VarInfo *, const StringArray &,
static bool read_single_entry(VarInfo *, const ConcatString &,
const GrdFileType, DataPlane &, Grid &);
+static void open_aggr_file();
+static DataPlane read_aggr_data_plane(const ConcatString &,
+ const char *suggestion=nullptr);
+
static void process_scores();
-static void do_cts (int, const PairDataPoint *);
-static void do_mcts (int, const PairDataPoint *);
-static void do_cnt (int, const PairDataPoint *);
-static void do_sl1l2 (int, const PairDataPoint *);
-static void do_pct (int, const PairDataPoint *);
-
-static void store_stat_fho (int, const ConcatString &, const CTSInfo &);
-static void store_stat_ctc (int, const ConcatString &, const CTSInfo &);
-static void store_stat_cts (int, const ConcatString &, const CTSInfo &);
-static void store_stat_mctc (int, const ConcatString &, const MCTSInfo &);
-static void store_stat_mcts (int, const ConcatString &, const MCTSInfo &);
-static void store_stat_cnt (int, const ConcatString &, const CNTInfo &);
-static void store_stat_sl1l2 (int, const ConcatString &, const SL1L2Info &);
-static void store_stat_sal1l2(int, const ConcatString &, const SL1L2Info &);
-static void store_stat_pct (int, const ConcatString &, const PCTInfo &);
-static void store_stat_pstd (int, const ConcatString &, const PCTInfo &);
-static void store_stat_pjc (int, const ConcatString &, const PCTInfo &);
-static void store_stat_prc (int, const ConcatString &, const PCTInfo &);
+static void do_categorical (int, const PairDataPoint *);
+static void do_multicategory (int, const PairDataPoint *);
+static void do_continuous (int, const PairDataPoint *);
+static void do_partialsums (int, const PairDataPoint *);
+static void do_probabilistic (int, const PairDataPoint *);
+static void do_climo_brier (int, double, int, PCTInfo &);
+
+static int read_aggr_total (int);
+static void read_aggr_ctc (int, const CTSInfo &, CTSInfo &);
+static void read_aggr_mctc (int, const MCTSInfo &, MCTSInfo &);
+static void read_aggr_sl1l2 (int, const SL1L2Info &, SL1L2Info &);
+static void read_aggr_sal1l2 (int, const SL1L2Info &, SL1L2Info &);
+static void read_aggr_pct (int, const PCTInfo &, PCTInfo &);
+
+static void store_stat_categorical(int,
+ STATLineType, const ConcatString &,
+ const CTSInfo &);
+static void store_stat_multicategory(int,
+ STATLineType, const ConcatString &,
+ const MCTSInfo &);
+static void store_stat_partialsums(int,
+ STATLineType, const ConcatString &,
+ const SL1L2Info &);
+static void store_stat_continuous(int,
+ STATLineType, const ConcatString &,
+ const CNTInfo &);
+static void store_stat_probabilistic(int,
+ STATLineType, const ConcatString &,
+ const PCTInfo &);
+
+static void store_stat_all_ctc (int, const CTSInfo &);
+static void store_stat_all_mctc (int, const MCTSInfo &);
+static void store_stat_all_sl1l2 (int, const SL1L2Info &);
+static void store_stat_all_sal1l2(int, const SL1L2Info &);
+static void store_stat_all_pct (int, const PCTInfo &);
+
+static ConcatString build_nc_var_name_categorical(
+ STATLineType, const ConcatString &,
+ const CTSInfo &, double);
+static ConcatString build_nc_var_name_multicategory(
+ STATLineType, const ConcatString &,
+ double);
+static ConcatString build_nc_var_name_partialsums(
+ STATLineType, const ConcatString &,
+ const SL1L2Info &);
+static ConcatString build_nc_var_name_continuous(
+ STATLineType, const ConcatString &,
+ const CNTInfo &, double);
+static ConcatString build_nc_var_name_probabilistic(
+ STATLineType, const ConcatString &,
+ const PCTInfo &, double);
static void setup_nc_file(const VarInfo *, const VarInfo *);
-static void add_nc_var(const ConcatString &, const ConcatString &,
- const ConcatString &, const ConcatString &,
- const ConcatString &, double);
-static void put_nc_val(int, const ConcatString &, float);
+static void add_stat_data(const ConcatString &, const ConcatString &,
+ const ConcatString &, const ConcatString &,
+ const ConcatString &, double);
+static void write_stat_data();
static void set_range(const unixtime &, unixtime &, unixtime &);
static void set_range(const int &, int &, int &);
@@ -120,6 +156,7 @@ static void usage();
static void set_fcst_files(const StringArray &);
static void set_obs_files(const StringArray &);
static void set_both_files(const StringArray &);
+static void set_aggr(const StringArray &);
static void set_paired(const StringArray &);
static void set_out_file(const StringArray &);
static void set_config_file(const StringArray &);
@@ -166,12 +203,13 @@ void process_command_line(int argc, char **argv) {
cline.set_usage(usage);
// Add the options function calls
- cline.add(set_fcst_files, "-fcst", -1);
- cline.add(set_obs_files, "-obs", -1);
- cline.add(set_both_files, "-both", -1);
- cline.add(set_paired, "-paired", 0);
- cline.add(set_config_file, "-config", 1);
- cline.add(set_out_file, "-out", 1);
+ cline.add(set_fcst_files, "-fcst", -1);
+ cline.add(set_obs_files, "-obs", -1);
+ cline.add(set_both_files, "-both", -1);
+ cline.add(set_aggr, "-aggr", 1);
+ cline.add(set_paired, "-paired", 0);
+ cline.add(set_config_file, "-config", 1);
+ cline.add(set_out_file, "-out", 1);
cline.add(set_compress, "-compress", 1);
// Parse the command line
@@ -180,36 +218,43 @@ void process_command_line(int argc, char **argv) {
// Check for error. There should be zero arguments left.
if(cline.n() != 0) usage();
- // Warn about log output
+ // Recommend logging verbosity level of 3 or less
if(mlog.verbosity_level() >= 3) {
- mlog << Warning << "\nRunning Series-Analysis at verbosity >= 3 "
+ mlog << Debug(3) << "Running Series-Analysis at verbosity >= 3 "
<< "produces excessive log output and can slow the runtime "
- << "considerably.\n\n";
+ << "considerably.\n";
}
// Check that the required arguments have been set.
if(fcst_files.n() == 0) {
mlog << Error << "\nprocess_command_line() -> "
<< "the forecast file list must be set using the "
- << "\"-fcst\" or \"-both\" option.\n\n";
+ << R"("-fcst" or "-both" option.)" << "\n\n";
usage();
}
if(obs_files.n() == 0) {
mlog << Error << "\nprocess_command_line() -> "
<< "the observation file list must be set using the "
- << "\"-obs\" or \"-both\" option.\n\n";
+ << R"("-obs" or "-both" option.)" << "\n\n";
usage();
}
if(config_file.length() == 0) {
mlog << Error << "\nprocess_command_line() -> "
<< "the configuration file must be set using the "
- << "\"-config\" option.\n\n";
+ << R"("-config" option.)" << "\n\n";
usage();
}
if(out_file.length() == 0) {
mlog << Error << "\nprocess_command_line() -> "
<< "the output NetCDF file must be set using the "
- << "\"-out\" option.\n\n";
+ << R"("-out" option.)" << "\n\n";
+ usage();
+ }
+ if(aggr_file == out_file) {
+ mlog << Error << "\nprocess_command_line() -> "
+ << R"(the "-out" and "-aggr" options cannot be )"
+ << R"(set to the same file (")" << aggr_file
+ << R"(")!)" << "\n\n";
usage();
}
@@ -249,9 +294,9 @@ void process_command_line(int argc, char **argv) {
// List the lengths of the series options
mlog << Debug(1)
- << "Length of configuration \"fcst.field\" = "
+ << R"(Length of configuration "fcst.field" = )"
<< conf_info.get_n_fcst() << "\n"
- << "Length of configuration \"obs.field\" = "
+ << R"(Length of configuration "obs.field" = )"
<< conf_info.get_n_obs() << "\n"
<< "Length of forecast file list = "
<< fcst_files.n() << "\n"
@@ -266,38 +311,38 @@ void process_command_line(int argc, char **argv) {
// - Observation file list
if(conf_info.get_n_fcst() > 1) {
series_type = SeriesType::Fcst_Conf;
- n_series = conf_info.get_n_fcst();
+ n_series_pair = conf_info.get_n_fcst();
mlog << Debug(1)
- << "Series defined by the \"fcst.field\" configuration entry "
- << "of length " << n_series << ".\n";
+ << R"(Series defined by the "fcst.field" configuration entry )"
+ << "of length " << n_series_pair << ".\n";
}
else if(conf_info.get_n_obs() > 1) {
series_type = SeriesType::Obs_Conf;
- n_series = conf_info.get_n_obs();
+ n_series_pair = conf_info.get_n_obs();
mlog << Debug(1)
- << "Series defined by the \"obs.field\" configuration entry "
- << "of length " << n_series << ".\n";
+ << R"(Series defined by the "obs.field" configuration entry )"
+ << "of length " << n_series_pair << ".\n";
}
else if(fcst_files.n() > 1) {
series_type = SeriesType::Fcst_Files;
- n_series = fcst_files.n();
+ n_series_pair = fcst_files.n();
mlog << Debug(1)
<< "Series defined by the forecast file list of length "
- << n_series << ".\n";
+ << n_series_pair << ".\n";
}
else if(obs_files.n() > 1) {
series_type = SeriesType::Obs_Files;
- n_series = obs_files.n();
+ n_series_pair = obs_files.n();
mlog << Debug(1)
<< "Series defined by the observation file list of length "
- << n_series << ".\n";
+ << n_series_pair << ".\n";
}
else {
series_type = SeriesType::Fcst_Conf;
- n_series = 1;
+ n_series_pair = 1;
mlog << Debug(1)
- << "The \"fcst.field\" and \"obs.field\" configuration entries "
- << "and the \"-fcst\" and \"-obs\" command line options "
+ << R"(The "fcst.field" and "obs.field" configuration entries )"
+ << R"(and the "-fcst" and "-obs" command line options )"
<< "all have length one.\n";
}
@@ -307,7 +352,7 @@ void process_command_line(int argc, char **argv) {
// The number of forecast and observation files must match.
if(fcst_files.n() != obs_files.n()) {
mlog << Error << "\nprocess_command_line() -> "
- << "when using the \"-paired\" command line option, the "
+ << R"(when using the "-paired" command line option, the )"
<< "number of forecast (" << fcst_files.n()
<< ") and observation (" << obs_files.n()
<< ") files must match.\n\n";
@@ -315,24 +360,24 @@ void process_command_line(int argc, char **argv) {
}
// The number of files must match the series length.
- if(fcst_files.n() != n_series) {
+ if(fcst_files.n() != n_series_pair) {
mlog << Error << "\nprocess_command_line() -> "
- << "when using the \"-paired\" command line option, the "
+ << R"(when using the "-paired" command line option, the )"
<< "the file list length (" << fcst_files.n()
- << ") and series length (" << n_series
+ << ") and series length (" << n_series_pair
<< ") must match.\n\n";
usage();
}
// Set the series file names to the input file lists
- for(i=0; iregrid(),
&fcst_grid, &obs_grid);
- nxy = grid.nx() * grid.ny();
// Process masking regions
conf_info.process_masks(grid);
// Set the block size, if needed
- if(is_bad_data(conf_info.block_size)) conf_info.block_size = nxy;
+ if(is_bad_data(conf_info.block_size)) {
+ conf_info.block_size = grid.nxy();
+ }
// Compute the number of reads required
- n_reads = nint(ceil((double) nxy / conf_info.block_size));
+ n_reads = nint(ceil((double) grid.nxy() / conf_info.block_size));
mlog << Debug(2)
<< "Computing statistics using a block size of "
@@ -371,7 +417,7 @@ void process_grid(const Grid &fcst_grid, const Grid &obs_grid) {
<< "\nA block size of " << conf_info.block_size << " for a "
<< grid.nx() << " x " << grid.ny() << " grid requires "
<< n_reads << " passes through the data which will be slow.\n"
- << "Consider increasing \"block_size\" in the configuration "
+ << R"(Consider increasing "block_size" in the configuration )"
<< "file based on available memory.\n\n";
}
@@ -398,8 +444,8 @@ Met2dDataFile *get_mtddf(const StringArray &file_list,
// Read first valid file
if(!(mtddf = mtddf_factory.new_met_2d_data_file(file_list[i].c_str(), type))) {
- mlog << Error << "\nTrouble reading data file \""
- << file_list[i] << "\"\n\n";
+ mlog << Error << "\nTrouble reading data file: "
+ << file_list[i] << "\n\n";
exit(1);
}
@@ -421,7 +467,7 @@ void get_series_data(int i_series,
mlog << Debug(2)
<< "Processing series entry " << i_series + 1 << " of "
- << n_series << ": " << fcst_info->magic_str()
+ << n_series_pair << ": " << fcst_info->magic_str()
<< " versus " << obs_info->magic_str() << "\n";
// Switch on the series type
@@ -499,7 +545,7 @@ void get_series_data(int i_series,
}
// Setup the verification grid
- if(nxy == 0) process_grid(fcst_grid, obs_grid);
+ if(!grid.is_set()) process_grid(fcst_grid, obs_grid);
// Regrid the forecast, if necessary
if(!(fcst_grid == grid)) {
@@ -510,12 +556,12 @@ void get_series_data(int i_series,
<< "disabled:\n" << fcst_grid.serialize()
<< " !=\n" << grid.serialize()
<< "\nSpecify regridding logic in the config file "
- << "\"regrid\" section.\n\n";
+ << R"("regrid" section.)" << "\n\n";
exit(1);
}
- mlog << Debug(1)
- << "Regridding field " << fcst_info->magic_str()
+ mlog << Debug(2)
+ << "Regridding forecast " << fcst_info->magic_str()
<< " to the verification grid.\n";
fcst_dp = met_regrid(fcst_dp, fcst_grid, grid,
fcst_info->regrid());
@@ -530,12 +576,12 @@ void get_series_data(int i_series,
<< "disabled:\n" << obs_grid.serialize()
<< " !=\n" << grid.serialize()
<< "\nSpecify regridding logic in the config file "
- << "\"regrid\" section.\n\n";
+ << R"("regrid" section.)" << "\n\n";
exit(1);
}
- mlog << Debug(1)
- << "Regridding field " << obs_info->magic_str()
+ mlog << Debug(2)
+ << "Regridding observation " << obs_info->magic_str()
<< " to the verification grid.\n";
obs_dp = met_regrid(obs_dp, obs_grid, grid,
obs_info->regrid());
@@ -564,7 +610,8 @@ void get_series_data(int i_series,
<< cs << "\n\n";
}
else {
- mlog << Debug(3) << cs << "\n";
+ mlog << Debug(3)
+ << cs << "\n";
}
}
@@ -578,7 +625,6 @@ void get_series_entry(int i_series, VarInfo *info,
const GrdFileType type,
StringArray &found_files, DataPlane &dp,
Grid &cur_grid) {
- int i, j;
bool found = false;
// Initialize
@@ -588,10 +634,10 @@ void get_series_entry(int i_series, VarInfo *info,
if(found_files[i_series].length() == 0) {
// Loop through the file list
- for(i=0; i "
<< "Could not find data for " << info->magic_str()
<< " in file list:\n";
- for(i=0; i "
+ << "unable to open the aggregate NetCDF file: "
+ << aggr_file << "\n\n";
+ exit(1);
+ }
+
+ // Update timing info based on aggregate file global attributes
+ ConcatString cs;
+
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "fcst_init_beg", cs)) {
+ set_range(timestring_to_unix(cs.c_str()), fcst_init_beg, fcst_init_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "fcst_init_end", cs)) {
+ set_range(timestring_to_unix(cs.c_str()), fcst_init_beg, fcst_init_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "fcst_valid_beg", cs)) {
+ set_range(timestring_to_unix(cs.c_str()), fcst_valid_beg, fcst_valid_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "fcst_valid_end", cs)) {
+ set_range(timestring_to_unix(cs.c_str()), fcst_valid_beg, fcst_valid_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "fcst_lead_beg", cs)) {
+ set_range(timestring_to_sec(cs.c_str()), fcst_lead_beg, fcst_lead_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "fcst_lead_end", cs)) {
+ set_range(timestring_to_sec(cs.c_str()), fcst_lead_beg, fcst_lead_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "obs_init_beg", cs)) {
+ set_range(timestring_to_unix(cs.c_str()), obs_init_beg, obs_init_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "obs_init_end", cs)) {
+ set_range(timestring_to_unix(cs.c_str()), obs_init_beg, obs_init_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "obs_valid_beg", cs)) {
+ set_range(timestring_to_unix(cs.c_str()), obs_valid_beg, obs_valid_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "obs_valid_end", cs)) {
+ set_range(timestring_to_unix(cs.c_str()), obs_valid_beg, obs_valid_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "obs_lead_beg", cs)) {
+ set_range(timestring_to_sec(cs.c_str()), obs_lead_beg, obs_lead_end);
+ }
+ if(get_att_value_string(aggr_nc.MetNc->Nc, "obs_lead_end", cs)) {
+ set_range(timestring_to_sec(cs.c_str()), obs_lead_beg, obs_lead_end);
+ }
+
+ // Store the aggregate series length
+ n_series_aggr = get_int_var(aggr_nc.MetNc->Nc, n_series_var_name, 0);
+
+ mlog << Debug(3)
+ << "Aggregation series has length " << n_series_aggr << ".\n";
+
+ return;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+DataPlane read_aggr_data_plane(const ConcatString &var_name,
+ const char *suggestion) {
+ DataPlane aggr_dp;
+
+ // Setup the data request
+ VarInfoNcMet aggr_info;
+ aggr_info.set_magic(var_name, "(*,*)");
+
+ mlog << Debug(2)
+ << R"(Reading aggregation ")"
+ << aggr_info.magic_str()
+ << R"(" field.)" << "\n";
+
+ // Attempt to read the gridded data from the current file
+ if(!aggr_nc.data_plane(aggr_info, aggr_dp)) {
+ mlog << Error << "\nread_aggr_data_plane() -> "
+ << R"(Required variable ")" << aggr_info.magic_str()
+ << R"(" not found in the aggregate file!)" << "\n\n";
+ if(suggestion) {
+ mlog << Error
+ << R"(Recommend recreating ")" << aggr_file
+ << R"(" to request that )" << suggestion
+ << " column(s) be written.\n\n";
+ }
+ exit(1);
+ }
+
+ // Check that the grid has not changed
+ if(aggr_nc.grid().nx() != grid.nx() ||
+ aggr_nc.grid().ny() != grid.ny()) {
+ mlog << Error << "\nread_aggr_data_plane() -> "
+ << "the input grid dimensions (" << grid.nx() << ", " << grid.ny()
+ << ") and aggregate grid dimensions (" << aggr_nc.grid().nx()
+ << ", " << aggr_nc.grid().ny() << ") do not match!\n\n";
+ exit(1);
+ }
+
+ return aggr_dp;
+}
+
+////////////////////////////////////////////////////////////////////////
+
void process_scores() {
- int i, x, y, i_read, i_series, i_point, i_fcst;
+ int x;
+ int y;
+ int i_point = 0;
VarInfo *fcst_info = (VarInfo *) nullptr;
VarInfo *obs_info = (VarInfo *) nullptr;
- PairDataPoint *pd_ptr = (PairDataPoint *) nullptr;
- DataPlane fcst_dp, obs_dp;
+ DataPlane fcst_dp;
+ DataPlane obs_dp;
+ vector pd_block;
const char *method_name = "process_scores() ";
// Climatology mean and standard deviation
DataPlane fcmn_dp, fcsd_dp;
DataPlane ocmn_dp, ocsd_dp;
+ // Open the aggregate file, if needed
+ if(aggr_file.nonempty()) open_aggr_file();
+
// Number of points skipped due to valid data threshold
- int n_skip_zero = 0;
- int n_skip_pos = 0;
+ int n_skip_zero_vld = 0;
+ int n_skip_some_vld = 0;
// Loop over the data reads
- for(i_read=0; i_read 1 ? i_series : 0);
+ int i_fcst = (conf_info.get_n_fcst() > 1 ? i_series : 0);
// Store the current VarInfo objects
fcst_info = conf_info.fcst_info[i_fcst];
@@ -715,18 +871,20 @@ void process_scores() {
// Retrieve the data planes for the current series entry
get_series_data(i_series, fcst_info, obs_info, fcst_dp, obs_dp);
- // Allocate PairDataPoint objects, if needed
- if(!pd_ptr) {
- pd_ptr = new PairDataPoint [conf_info.block_size];
- for(i=0; imagic_str() << ", found "
- << (fcmn_flag ? 0 : 1) << " forecast climatology mean and "
- << (fcsd_flag ? 0 : 1) << " standard deviation field(s), and "
- << (ocmn_flag ? 0 : 1) << " observation climatology mean and "
- << (ocsd_flag ? 0 : 1) << " standard deviation field(s).\n";
+ << (fcmn_flag ? 1 : 0) << " forecast climatology mean and "
+ << (fcsd_flag ? 1 : 0) << " standard deviation field(s), and "
+ << (ocmn_flag ? 1 : 0) << " observation climatology mean and "
+ << (ocsd_flag ? 1 : 0) << " standard deviation field(s).\n";
// Setup the output NetCDF file on the first pass
- if(nc_out == (NcFile *) 0) setup_nc_file(fcst_info, obs_info);
+ if(!nc_out) setup_nc_file(fcst_info, obs_info);
// Update timing info
set_range(fcst_dp.init(), fcst_init_beg, fcst_init_end);
@@ -786,7 +940,7 @@ void process_scores() {
set_range(obs_dp.lead(), obs_lead_beg, obs_lead_end);
// Store matched pairs for each grid point
- for(i=0; i 0) {
- do_cts(i_point+i, &pd_ptr[i]);
+ do_categorical(i_point+i, &pd_block[i]);
}
// Compute multi-category contingency table counts and statistics
if(!conf_info.fcst_info[0]->is_prob() &&
(conf_info.output_stats[STATLineType::mctc].n() +
conf_info.output_stats[STATLineType::mcts].n()) > 0) {
- do_mcts(i_point+i, &pd_ptr[i]);
+ do_multicategory(i_point+i, &pd_block[i]);
}
// Compute continuous statistics
if(!conf_info.fcst_info[0]->is_prob() &&
conf_info.output_stats[STATLineType::cnt].n() > 0) {
- do_cnt(i_point+i, &pd_ptr[i]);
+ do_continuous(i_point+i, &pd_block[i]);
}
// Compute partial sums
if(!conf_info.fcst_info[0]->is_prob() &&
(conf_info.output_stats[STATLineType::sl1l2].n() +
conf_info.output_stats[STATLineType::sal1l2].n()) > 0) {
- do_sl1l2(i_point+i, &pd_ptr[i]);
+ do_partialsums(i_point+i, &pd_block[i]);
}
// Compute probabilistics counts and statistics
@@ -878,22 +1031,16 @@ void process_scores() {
conf_info.output_stats[STATLineType::pstd].n() +
conf_info.output_stats[STATLineType::pjc].n() +
conf_info.output_stats[STATLineType::prc].n()) > 0) {
- do_pct(i_point+i, &pd_ptr[i]);
+ do_probabilistic(i_point+i, &pd_block[i]);
}
- } // end for i
- // Erase the data
- for(i=0; i 0 && conf_info.vld_data_thresh == 1.0) {
+ if(n_skip_some_vld > 0 && conf_info.vld_data_thresh == 1.0) {
mlog << Debug(2)
<< "Some points skipped due to missing data:\n"
- << "Consider decreasing \"vld_thresh\" in the config file "
+ << R"(Consider decreasing "vld_thresh" in the config file )"
<< "to include more points.\n"
- << "Consider requesting \"TOTAL\" from \"output_stats\" "
+ << R"(Consider requesting "TOTAL" from "output_stats" )"
<< "in the config file to see the valid data counts.\n";
}
@@ -936,30 +1080,56 @@ void process_scores() {
////////////////////////////////////////////////////////////////////////
-void do_cts(int n, const PairDataPoint *pd_ptr) {
- int i, j;
+void do_categorical(int n, const PairDataPoint *pd_ptr) {
- mlog << Debug(4) << "Computing Categorical Statistics.\n";
+ mlog << Debug(4)
+ << "Computing Categorical Statistics.\n";
// Allocate objects to store categorical statistics
int n_cts = conf_info.fcat_ta.n();
CTSInfo *cts_info = new CTSInfo [n_cts];
// Setup CTSInfo objects
- for(i=0; in_obs-1);
+
+ // Loop over the thresholds
+ for(int i=0; in_obs-1);
+
+ // Compute the current MCTSInfo
+ compute_mctsinfo(*pd_ptr, i_na, false, false, mcts_info);
+
+ // Read the MCTC data to be aggregated
+ MCTSInfo aggr_mcts;
+ read_aggr_mctc(n, mcts_info, aggr_mcts);
+
+ // Aggregate MCTC counts
+ mcts_info.cts += aggr_mcts.cts;
+
+ // Compute statistics and confidence intervals
+ mcts_info.compute_stats();
+ mcts_info.compute_ci();
+
+ }
// Compute the counts, stats, normal confidence intervals, and
// bootstrap confidence intervals
- if(conf_info.boot_interval == BootIntervalType::BCA) {
+ else if(conf_info.boot_interval == BootIntervalType::BCA) {
compute_mcts_stats_ci_bca(rng_ptr, *pd_ptr,
conf_info.n_boot_rep,
mcts_info, true,
@@ -1037,15 +1232,17 @@ void do_mcts(int n, const PairDataPoint *pd_ptr) {
}
// Add statistic value for each possible MCTC column
- for(i=0; isubset_pairs_cnt_thresh(cnt_info.fthresh, cnt_info.othresh,
- cnt_info.logic);
-
- // Check for no matched pairs to process
- if(pd.n_obs == 0) continue;
-
- // Compute the stats, normal confidence intervals, and
- // bootstrap confidence intervals
- int precip_flag = (conf_info.fcst_info[0]->is_precipitation() &&
- conf_info.obs_info[0]->is_precipitation());
+ // Aggregate input pair data with existing partial sums
+ if(aggr_file.nonempty()) {
+
+ // Compute partial sums from the pair data
+ SL1L2Info s_info;
+ s_info.fthresh = cnt_info.fthresh;
+ s_info.othresh = cnt_info.othresh;
+ s_info.logic = cnt_info.logic;
+ s_info.set(*pd_ptr);
+
+ // Aggregate scalar partial sums
+ SL1L2Info aggr_psum;
+ read_aggr_sl1l2(n, s_info, aggr_psum);
+ s_info += aggr_psum;
+
+ // Aggregate scalar anomaly partial sums
+ if(conf_info.output_stats[STATLineType::cnt].has("ANOM_CORR")) {
+ read_aggr_sal1l2(n, s_info, aggr_psum);
+ s_info += aggr_psum;
+ }
- if(conf_info.boot_interval == BootIntervalType::BCA) {
- compute_cnt_stats_ci_bca(rng_ptr, pd,
- precip_flag, conf_info.rank_corr_flag,
- conf_info.n_boot_rep,
- cnt_info, conf_info.tmp_dir.c_str());
+ // Compute continuous statistics from partial sums
+ compute_cntinfo(s_info, cnt_info);
}
+ // Compute continuous statistics from the pair data
else {
- compute_cnt_stats_ci_perc(rng_ptr, pd,
- precip_flag, conf_info.rank_corr_flag,
- conf_info.n_boot_rep, conf_info.boot_rep_prop,
- cnt_info, conf_info.tmp_dir.c_str());
+
+ // Apply continuous filtering thresholds to subset pairs
+ pd = pd_ptr->subset_pairs_cnt_thresh(cnt_info.fthresh, cnt_info.othresh,
+ cnt_info.logic);
+
+ // Check for no matched pairs to process
+ if(pd.n_obs == 0) continue;
+
+ // Compute the stats, normal confidence intervals, and
+ // bootstrap confidence intervals
+ int precip_flag = (conf_info.fcst_info[0]->is_precipitation() &&
+ conf_info.obs_info[0]->is_precipitation());
+
+ if(conf_info.boot_interval == BootIntervalType::BCA) {
+ compute_cnt_stats_ci_bca(rng_ptr, pd,
+ precip_flag, conf_info.rank_corr_flag,
+ conf_info.n_boot_rep,
+ cnt_info, conf_info.tmp_dir.c_str());
+ }
+ else {
+ compute_cnt_stats_ci_perc(rng_ptr, pd,
+ precip_flag, conf_info.rank_corr_flag,
+ conf_info.n_boot_rep, conf_info.boot_rep_prop,
+ cnt_info, conf_info.tmp_dir.c_str());
+ }
}
// Add statistic value for each possible CNT column
- for(j=0; j 0) {
+ read_aggr_sl1l2(n, s_info, aggr_psum);
+ s_info += aggr_psum;
+ }
+
+ // Aggregate SAL1L2 partial sums
+ if(conf_info.output_stats[STATLineType::sal1l2].n() > 0) {
+ read_aggr_sal1l2(n, s_info, aggr_psum);
+ s_info += aggr_psum;
+ }
+ }
+
// Add statistic value for each possible SL1L2 column
- for(j=0; jNc, &aggr_var_names);
- // Setup the PCTInfo object
- pct_info.fthresh = conf_info.fcat_ta;
- pct_info.allocate_n_alpha(conf_info.ci_alpha.n());
+ // Search for one containing TOTAL
+ for(int i=0; i "
+ << R"(No variable containing ")" << total_name
+ << R"(" "found in the aggregate file!)" << "\n\n";
+ exit(1);
+ }
}
- // Compute PCTInfo for each observation threshold
- for(i=0; i "
+ << "the number of MCTC categories do not match ("
+ << nint(v) << " != " << aggr_mcts.cts.nrows() << ")!\n\n";
+ exit(1);
}
- } // end for i
+ // Check the expected correct
+ else if(c == "EC_VALUE" && !is_bad_data(v) &&
+ !is_eq(v, aggr_mcts.cts.ec_value(), loose_tol)) {
+ mlog << Error << "\nread_aggr_mctc() -> "
+ << "the MCTC expected correct values do not match ("
+ << v << " != " << aggr_mcts.cts.ec_value() << ")!\n\n";
+ exit(1);
+ }
+ // Populate the MCTC table
+ else if(check_reg_exp("F[0-9]*_O[0-9]*", c.c_str())) {
+ StringArray sa(c.split("_"));
+ int i_row = atoi(sa[0].c_str()+1) - 1;
+ int i_col = atoi(sa[1].c_str()+1) - 1;
+ aggr_mcts.cts.set_entry(i_row, i_col, nint(v));
+ }
+ }
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_fho(int n, const ConcatString &col,
- const CTSInfo &cts_info) {
- double v;
- ConcatString lty_stat, var_name;
+void read_aggr_sl1l2(int n, const SL1L2Info &s_info,
+ SL1L2Info &aggr_psum) {
- // Set the column name to all upper case
- ConcatString c = to_upper(col);
+ // Initialize
+ aggr_psum.zero_out();
- // Get the column value
- if(c == "TOTAL") { v = (double) cts_info.cts.n(); }
- else if(c == "F_RATE") { v = cts_info.cts.f_rate(); }
- else if(c == "H_RATE") { v = cts_info.cts.h_rate(); }
- else if(c == "O_RATE") { v = cts_info.cts.o_rate(); }
- else {
- mlog << Error << "\nstore_stat_fho() -> "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ // Loop over the SL1L2 columns
+ for(auto &col : sl1l2_columns) {
- // Construct the NetCDF variable name
- var_name << cs_erase << "series_fho_" << c;
+ ConcatString c(to_upper(col));
+ ConcatString var_name(build_nc_var_name_partialsums(
+ STATLineType::sl1l2, c,
+ s_info));
- // Append threshold information
- if(cts_info.fthresh == cts_info.othresh) {
- var_name << "_" << cts_info.fthresh.get_abbr_str();
- }
- else {
- var_name << "_fcst" << cts_info.fthresh.get_abbr_str()
- << "_obs" << cts_info.othresh.get_abbr_str();
+ // Read aggregate data, if needed
+ if(aggr_data.count(var_name) == 0) {
+ aggr_data[var_name] = read_aggr_data_plane(
+ var_name, R"("ALL" SL1L2)");
+ }
+
+ // Populate the partial sums
+ aggr_psum.set_stat_sl1l2(col, aggr_data[var_name].buf()[n]);
}
- // Add map for this variable name
- if(stat_data.count(var_name) == 0) {
+ return;
+}
- // Build key
- lty_stat << "FHO_" << c;
+////////////////////////////////////////////////////////////////////////
- // Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- cts_info.fthresh.get_str(),
- cts_info.othresh.get_str(),
- bad_data_double);
- }
+void read_aggr_sal1l2(int n, const SL1L2Info &s_info,
+ SL1L2Info &aggr_psum) {
- // Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ // Initialize
+ aggr_psum.zero_out();
+
+ // Loop over the SAL1L2 columns
+ for(auto &col : sal1l2_columns) {
+
+ ConcatString c(to_upper(col));
+ ConcatString var_name(build_nc_var_name_partialsums(
+ STATLineType::sal1l2, c,
+ s_info));
+
+ // Read aggregate data, if needed
+ if(aggr_data.count(var_name) == 0) {
+ aggr_data[var_name] = read_aggr_data_plane(
+ var_name, R"("ALL" SAL1L2)");
+ }
+
+ // Populate the partial sums
+ aggr_psum.set_stat_sal1l2(col, aggr_data[var_name].buf()[n]);
+ }
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_ctc(int n, const ConcatString &col,
- const CTSInfo &cts_info) {
- int v;
- ConcatString lty_stat, var_name;
+void read_aggr_pct(int n, const PCTInfo &pct_info,
+ PCTInfo &aggr_pct) {
- // Set the column name to all upper case
- ConcatString c = to_upper(col);
+ // Initialize
+ aggr_pct.pct = pct_info.pct;
+ aggr_pct.pct.zero_out();
- // Get the column value
- if(c == "TOTAL") { v = cts_info.cts.n(); }
- else if(c == "FY_OY") { v = cts_info.cts.fy_oy(); }
- else if(c == "FY_ON") { v = cts_info.cts.fy_on(); }
- else if(c == "FN_OY") { v = cts_info.cts.fn_oy(); }
- else if(c == "FN_ON") { v = cts_info.cts.fn_on(); }
- else if(c == "EC_VALUE") { v = cts_info.cts.ec_value(); }
- else {
- mlog << Error << "\nstore_stat_ctc() -> "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ // Get PCT column names
+ StringArray pct_cols(get_pct_columns(aggr_pct.pct.nrows()+1));
- // Construct the NetCDF variable name
- var_name << cs_erase << "series_ctc_" << c;
+ // Loop over the PCT columns
+ for(int i=0; i "
+ << "the number of PCT thresholds do not match ("
+ << nint(v) << " != " << aggr_pct.pct.nrows()+1
+ << ")!\n\n";
+ exit(1);
+ }
+ // Set the event counts
+ else if(check_reg_exp("OY_[0-9]", c.c_str())) {
- // Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ // Parse the index value from the column name
+ int i_row = atoi(strrchr(c.c_str(), '_') + 1) - 1;
+ aggr_pct.pct.set_event(i_row, nint(v));
+ }
+ // Set the non-event counts
+ else if(check_reg_exp("ON_[0-9]", c.c_str())) {
+
+ // Parse the index value from the column name
+ int i_row = atoi(strrchr(c.c_str(), '_') + 1) - 1;
+ aggr_pct.pct.set_nonevent(i_row, nint(v));
+ }
+ }
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_cts(int n, const ConcatString &col,
- const CTSInfo &cts_info) {
- int i;
- double v;
- ConcatString lty_stat, var_name;
- int n_ci = 1;
+void do_probabilistic(int n, const PairDataPoint *pd_ptr) {
- // Set the column name to all upper case
- ConcatString c = to_upper(col);
+ mlog << Debug(4)
+ << "Computing Probabilistic Statistics.\n";
- // Check for columns with normal or bootstrap confidence limits
- if(strstr(c.c_str(), "_NC") || strstr(c.c_str(), "_BC")) n_ci = cts_info.n_alpha;
-
- // Loop over the alpha values, if necessary
- for(i=0; i "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ // Object to store probabilistic statistics
+ PCTInfo pct_info;
- // Construct the NetCDF variable name
- var_name << cs_erase << "series_cts_" << c;
+ // Setup the PCTInfo object
+ pct_info.fthresh = conf_info.fcat_ta;
+ pct_info.allocate_n_alpha(conf_info.ci_alpha.n());
+
+ for(int i=0; i 1) var_name << "_a" << cts_info.alpha[i];
-
- // Add map for this variable name
- if(stat_data.count(var_name) == 0) {
-
- // Build key
- lty_stat << "CTS_" << c;
+ // Add statistic value for each possible PCT column
+ for(int j=0; j 1 ? cts_info.alpha[i] : bad_data_double));
+ // Add statistic value for each possible PSTD column
+ for(int j=0; j= mcts_info.cts.nrows() ||
- j < 0 || j >= mcts_info.cts.ncols()) {
- mlog << Error << "\nstore_stat_mctc() -> "
- << "range check error for column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ // Aggregate the climatology brier score as a weighted
+ // average and recompute the brier skill score
- // Retrieve the value
- v = (double) mcts_info.cts.entry(i, j);
- }
- else {
- mlog << Error << "\nstore_stat_mctc() -> "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ if(is_bad_data(briercl_pair) || total_pair == 0) return;
// Construct the NetCDF variable name
- var_name << cs_erase << "series_mctc_" << c;
+ ConcatString var_name(build_nc_var_name_probabilistic(
+ STATLineType::pstd, "BRIERCL",
+ pct_info, bad_data_double));
- // Add map for this variable name
- if(stat_data.count(var_name) == 0) {
+ // Read aggregate data, if needed
+ if(aggr_data.count(var_name) == 0) {
+ aggr_data[var_name] = read_aggr_data_plane(
+ var_name, R"(the "BRIERCL" PSTD)");
+ }
- // Build key
- lty_stat << "MCTC_" << d;
+ // Get the n-th BRIERCL value
+ double briercl_aggr = aggr_data[var_name].buf()[n];
+ int total_aggr = read_aggr_total(n);
- // Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- mcts_info.fthresh.get_str(","),
- mcts_info.othresh.get_str(","),
- bad_data_double);
- }
+ // Aggregate BRIERCL as a weighted average
+ if(!is_bad_data(briercl_pair) &&
+ !is_bad_data(briercl_aggr) &&
+ (total_pair + total_aggr) > 0) {
- // Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ pct_info.briercl.v = (total_pair * briercl_pair +
+ total_aggr * briercl_aggr) /
+ (total_pair + total_aggr);
+
+ // Compute the brier skill score
+ if(!is_bad_data(pct_info.brier.v) &&
+ !is_bad_data(pct_info.briercl.v)) {
+ pct_info.bss = 1.0 - (pct_info.brier.v / pct_info.briercl.v);
+ }
+ }
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_mcts(int n, const ConcatString &col,
- const MCTSInfo &mcts_info) {
- int i;
- double v;
- ConcatString lty_stat, var_name;
- int n_ci = 1;
+void store_stat_categorical(int n, STATLineType lt,
+ const ConcatString &col,
+ const CTSInfo &cts_info) {
// Set the column name to all upper case
ConcatString c = to_upper(col);
+ // Handle ALL CTC columns
+ if(lt == STATLineType::ctc && c == all_columns) {
+ return store_stat_all_ctc(n, cts_info);
+ }
+
// Check for columns with normal or bootstrap confidence limits
- if(strstr(c.c_str(), "_NC") || strstr(c.c_str(), "_BC")) n_ci = mcts_info.n_alpha;
-
- // Loop over the alpha values, if necessary
- for(i=0; i "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ int n_alpha = 1;
+ if(lt == STATLineType::cts && is_ci_stat_name(c)) {
+ n_alpha = cts_info.n_alpha;
+ }
- // Construct the NetCDF variable name
- var_name << cs_erase << "series_mcts_" << c;
+ // Loop over the alpha values
+ for(int i_alpha=0; i_alpha 1) var_name << "_a" << mcts_info.alpha[i];
+ // Store alpha value
+ double alpha = (n_alpha > 1 ? cts_info.alpha[i_alpha] : bad_data_double);
+
+ // Construct the NetCDF variable name
+ ConcatString var_name(build_nc_var_name_categorical(
+ lt, c, cts_info, alpha));
// Add map for this variable name
if(stat_data.count(var_name) == 0) {
// Build key
- lty_stat << "MCTS_" << c;
+ ConcatString lty_stat(statlinetype_to_string(lt));
+ lty_stat << "_" << c;
// Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- mcts_info.fthresh.get_str(","),
- mcts_info.othresh.get_str(","),
- (n_ci > 1 ? mcts_info.alpha[i] : bad_data_double));
+ add_stat_data(var_name, c, stat_long_name[lty_stat],
+ cts_info.fthresh.get_str(),
+ cts_info.othresh.get_str(),
+ alpha);
}
// Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ stat_data[var_name].dp.buf()[n] = cts_info.get_stat(lt, c, i_alpha);
- } // end for i
+ } // end for i_alpha
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_cnt(int n, const ConcatString &col,
- const CNTInfo &cnt_info) {
- int i;
- double v;
- ConcatString lty_stat, var_name;
- int n_ci = 1;
+void store_stat_multicategory(int n, STATLineType lt,
+ const ConcatString &col,
+ const MCTSInfo &mcts_info) {
// Set the column name to all upper case
ConcatString c = to_upper(col);
+ // Handle ALL MCTC columns
+ if(lt == STATLineType::mctc && c == all_columns) {
+ return store_stat_all_mctc(n, mcts_info);
+ }
+
// Check for columns with normal or bootstrap confidence limits
- if(strstr(c.c_str(), "_NC") || strstr(c.c_str(), "_BC")) n_ci = cnt_info.n_alpha;
-
- // Loop over the alpha values, if necessary
- for(i=0; i "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ int n_alpha = 1;
+ if(lt == STATLineType::mcts && is_ci_stat_name(c)) {
+ n_alpha = mcts_info.n_alpha;
+ }
+
+ // Loop over the alpha values
+ for(int i_alpha=0; i_alpha 1 ? mcts_info.alpha[i_alpha] : bad_data_double);
// Construct the NetCDF variable name
- var_name << cs_erase << "series_cnt_" << c;
-
- // Append threshold information, if supplied
- if(cnt_info.fthresh.get_type() != thresh_na ||
- cnt_info.othresh.get_type() != thresh_na) {
- var_name << "_fcst" << cnt_info.fthresh.get_abbr_str()
- << "_" << setlogic_to_abbr(conf_info.cnt_logic)
- << "_obs" << cnt_info.othresh.get_abbr_str();
- }
+ ConcatString var_name(build_nc_var_name_multicategory(
+ lt, c, alpha));
- // Append confidence interval alpha value
- if(n_ci > 1) var_name << "_a" << cnt_info.alpha[i];
+ // Store the data value
+ ConcatString col_name;
+ auto v = (float) mcts_info.get_stat(lt, c, col_name, i_alpha);
// Add map for this variable name
if(stat_data.count(var_name) == 0) {
// Build key
- lty_stat << "CNT_" << c;
+ ConcatString lty_stat;
+ lty_stat << statlinetype_to_string(lt) << "_" << col_name;
// Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- cnt_info.fthresh.get_str(),
- cnt_info.othresh.get_str(),
- (n_ci > 1 ? cnt_info.alpha[i] : bad_data_double));
+ add_stat_data(var_name, c, stat_long_name[lty_stat],
+ mcts_info.fthresh.get_str(","),
+ mcts_info.othresh.get_str(","),
+ alpha);
}
// Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ stat_data[var_name].dp.buf()[n] = v;
- } // end for i
+ } // end for i_alpha
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_sl1l2(int n, const ConcatString &col,
- const SL1L2Info &s_info) {
- double v;
- ConcatString lty_stat, var_name;
+void store_stat_continuous(int n, STATLineType lt,
+ const ConcatString &col,
+ const CNTInfo &cnt_info) {
// Set the column name to all upper case
ConcatString c = to_upper(col);
- // Get the column value
- if(c == "TOTAL") { v = (double) s_info.scount; }
- else if(c == "FBAR") { v = s_info.fbar; }
- else if(c == "OBAR") { v = s_info.obar; }
- else if(c == "FOBAR") { v = s_info.fobar; }
- else if(c == "FFBAR") { v = s_info.ffbar; }
- else if(c == "OOBAR") { v = s_info.oobar; }
- else if(c == "MAE") { v = s_info.smae; }
- else {
- mlog << Error << "\nstore_stat_sl1l2() -> "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ // Check for columns with normal or bootstrap confidence limits
+ int n_alpha = 1;
+ if(is_ci_stat_name(c)) n_alpha = cnt_info.n_alpha;
- // Construct the NetCDF variable name
- var_name << cs_erase << "series_sl1l2_" << c;
+ // Loop over the alpha values
+ for(int i_alpha=0; i_alpha 1 ? cnt_info.alpha[i_alpha] : bad_data_double);
- // Add map for this variable name
- if(stat_data.count(var_name) == 0) {
+ // Construct the NetCDF variable name
+ ConcatString var_name(build_nc_var_name_continuous(
+ lt, c, cnt_info, alpha));
- // Build key
- lty_stat << "SL1L2_" << c;
+ // Add map for this variable name
+ if(stat_data.count(var_name) == 0) {
- // Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- s_info.fthresh.get_str(),
- s_info.othresh.get_str(),
- bad_data_double);
- }
+ // Build key
+ ConcatString lty_stat(statlinetype_to_string(lt));
+ lty_stat << "_" << c;
- // Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ // Add new map entry
+ add_stat_data(var_name, c, stat_long_name[lty_stat],
+ cnt_info.fthresh.get_str(),
+ cnt_info.othresh.get_str(),
+ alpha);
+ }
+
+ // Store the statistic value
+ stat_data[var_name].dp.buf()[n] = cnt_info.get_stat(c, i_alpha);
+
+ } // end for i_alpha
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_sal1l2(int n, const ConcatString &col,
- const SL1L2Info &s_info) {
- double v;
+void store_stat_partialsums(int n, STATLineType lt,
+ const ConcatString &col,
+ const SL1L2Info &s_info) {
// Set the column name to all upper case
ConcatString c = to_upper(col);
- // Get the column value
- if(c == "TOTAL") { v = (double) s_info.sacount; }
- else if(c == "FABAR") { v = s_info.fabar; }
- else if(c == "OABAR") { v = s_info.oabar; }
- else if(c == "FOABAR") { v = s_info.foabar; }
- else if(c == "FFABAR") { v = s_info.ffabar; }
- else if(c == "OOABAR") { v = s_info.ooabar; }
- else if(c == "MAE") { v = s_info.samae; }
- else {
- mlog << Error << "\nstore_stat_sal1l2() -> "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
+ // Handle ALL columns
+ if(c == all_columns) {
+ if(lt == STATLineType::sl1l2) return store_stat_all_sl1l2(n, s_info);
+ else if(lt == STATLineType::sal1l2) return store_stat_all_sal1l2(n, s_info);
}
// Construct the NetCDF variable name
- ConcatString var_name("series_sal1l2_");
- var_name << c;
-
- // Append threshold information, if supplied
- if(s_info.fthresh.get_type() != thresh_na ||
- s_info.othresh.get_type() != thresh_na) {
- var_name << "_fcst" << s_info.fthresh.get_abbr_str()
- << "_" << setlogic_to_abbr(conf_info.cnt_logic)
- << "_obs" << s_info.othresh.get_abbr_str();
- }
+ ConcatString var_name(build_nc_var_name_partialsums(
+ lt, c, s_info));
// Add map for this variable name
if(stat_data.count(var_name) == 0) {
// Build key
- ConcatString lty_stat("SAL1L2_");
- lty_stat << c;
+ ConcatString lty_stat(statlinetype_to_string(lt));
+ lty_stat << "_" << c;
// Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- s_info.fthresh.get_str(),
- s_info.othresh.get_str(),
- bad_data_double);
+ add_stat_data(var_name, c, stat_long_name[lty_stat],
+ s_info.fthresh.get_str(),
+ s_info.othresh.get_str(),
+ bad_data_double);
}
// Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ stat_data[var_name].dp.buf()[n] = s_info.get_stat(lt, c);
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_pct(int n, const ConcatString &col,
- const PCTInfo &pct_info) {
- int i = 0;
- double v;
- ConcatString lty_stat, var_name;
+void store_stat_probabilistic(int n, STATLineType lt,
+ const ConcatString &col,
+ const PCTInfo &pct_info) {
// Set the column name to all upper case
ConcatString c = to_upper(col);
- ConcatString d = c;
-
- // Get index value for variable column numbers
- if(check_reg_exp("_[0-9]", c.c_str())) {
-
- // Parse the index value from the column name
- i = atoi(strrchr(c.c_str(), '_') + 1) - 1;
-
- // Range check
- if(i < 0 || i >= pct_info.pct.nrows()) {
- mlog << Error << "\nstore_stat_pct() -> "
- << "range check error for column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
- } // end if
-
- // Get the column value
- if(c == "TOTAL") { v = (double) pct_info.pct.n(); }
- else if(c == "N_THRESH") { v = (double) pct_info.pct.nrows() + 1; }
- else if(check_reg_exp("THRESH_[0-9]", c.c_str())) { v = pct_info.pct.threshold(i); }
- else if(check_reg_exp("OY_[0-9]", c.c_str())) { v = (double) pct_info.pct.event_count_by_row(i);
- d = "OY_I"; }
- else if(check_reg_exp("ON_[0-9]", c.c_str())) { v = (double) pct_info.pct.nonevent_count_by_row(i);
- d = "ON_I"; }
- else {
- mlog << Error << "\nstore_stat_pct() -> "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
-
- // Construct the NetCDF variable name
- var_name << cs_erase << "series_pct_" << c
- << "_obs" << pct_info.othresh.get_abbr_str();
-
- // Add map for this variable name
- if(stat_data.count(var_name) == 0) {
-
- // Build key
- lty_stat << "PCT_" << d;
- // Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- pct_info.fthresh.get_str(","),
- pct_info.othresh.get_str(),
- bad_data_double);
+ // Handle ALL PCT columns
+ if(lt == STATLineType::pct && c == all_columns) {
+ return store_stat_all_pct(n, pct_info);
}
- // Store the statistic value
- put_nc_val(n, var_name, (float) v);
-
- return;
-}
-
-////////////////////////////////////////////////////////////////////////
-
-void store_stat_pstd(int n, const ConcatString &col,
- const PCTInfo &pct_info) {
- int i;
- double v;
- ConcatString lty_stat, var_name;
- int n_ci = 1;
+ // Check for columns with normal or bootstrap confidence limits
+ int n_alpha = 1;
+ if(is_ci_stat_name(c)) n_alpha = pct_info.n_alpha;
- // Set the column name to all upper case
- ConcatString c = to_upper(col);
+ // Loop over the alpha values
+ for(int i_alpha=0; i_alpha "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+ // Store alpha value
+ double alpha = (n_alpha > 1 ? pct_info.alpha[i_alpha] : bad_data_double);
// Construct the NetCDF variable name
- var_name << cs_erase << "series_pstd_" << c;
+ ConcatString var_name(build_nc_var_name_probabilistic(
+ lt, c, pct_info, alpha));
- // Append confidence interval alpha value
- if(n_ci > 1) var_name << "_a" << pct_info.alpha[i];
+ // Store the data value
+ ConcatString col_name;
+ auto v = (float) pct_info.get_stat(lt, c, col_name, i_alpha);
// Add map for this variable name
if(stat_data.count(var_name) == 0) {
// Build key
- lty_stat << "PSTD_" << c;
+ ConcatString lty_stat(statlinetype_to_string(lt));
+ lty_stat << "_" << col_name;
// Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- pct_info.fthresh.get_str(","),
- pct_info.othresh.get_str(),
- (n_ci > 1 ? pct_info.alpha[i] : bad_data_double));
+ add_stat_data(var_name, c, stat_long_name[lty_stat],
+ pct_info.fthresh.get_str(","),
+ pct_info.othresh.get_str(),
+ alpha);
}
// Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ stat_data[var_name].dp.buf()[n] = v;
- } // end for i
+ } // end for i_alpha
return;
}
////////////////////////////////////////////////////////////////////////
-void store_stat_pjc(int n, const ConcatString &col,
- const PCTInfo &pct_info) {
- int i = 0;
- int tot;
- double v;
- ConcatString lty_stat, var_name;
+void store_stat_all_ctc(int n, const CTSInfo &cts_info) {
+ for(auto &col : ctc_columns) {
+ store_stat_categorical(n, STATLineType::ctc, col, cts_info);
+ }
+}
- // Set the column name to all upper case
- ConcatString c = to_upper(col);
- ConcatString d = c;
+////////////////////////////////////////////////////////////////////////
- // Get index value for variable column numbers
- if(check_reg_exp("_[0-9]", c.c_str())) {
+void store_stat_all_mctc(int n, const MCTSInfo &mcts_info) {
+ StringArray mctc_cols(get_mctc_columns(mcts_info.cts.nrows()));
+ for(int i=0; i= pct_info.pct.nrows()) {
- mlog << Error << "\nstore_stat_pjc() -> "
- << "range check error for column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
- } // end if
-
- // Store the total count
- tot = pct_info.pct.n();
-
- // Get the column value
- if(c == "TOTAL") { v = (double) tot; }
- else if(c == "N_THRESH") { v = (double) pct_info.pct.nrows() + 1; }
- else if(check_reg_exp("THRESH_[0-9]", c.c_str())) { v = pct_info.pct.threshold(i);
- d = "THRESH_I"; }
- else if(check_reg_exp("OY_TP_[0-9]", c.c_str())) { v = pct_info.pct.event_count_by_row(i)/(double) tot;
- d = "OY_TP_I"; }
- else if(check_reg_exp("ON_TP_[0-9]", c.c_str())) { v = pct_info.pct.nonevent_count_by_row(i)/(double) tot;
- d = "ON_TP_I"; }
- else if(check_reg_exp("CALIBRATION_[0-9]", c.c_str())) { v = pct_info.pct.row_calibration(i);
- d = "CALIBRATION_I"; }
- else if(check_reg_exp("REFINEMENT_[0-9]", c.c_str())) { v = pct_info.pct.row_refinement(i);
- d = "REFINEMENT_I"; }
- else if(check_reg_exp("LIKELIHOOD_[0-9]", c.c_str())) { v = pct_info.pct.row_event_likelihood(i);
- d = "LIKELIHOOD_I"; }
- else if(check_reg_exp("BASER_[0-9]", c.c_str())) { v = pct_info.pct.row_obar(i);
- d = "BASER_I"; }
- else {
- mlog << Error << "\nstore_stat_pjc() -> "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
+void store_stat_all_sl1l2(int n, const SL1L2Info &s_info) {
+ for(auto &col : sl1l2_columns) {
+ store_stat_partialsums(n, STATLineType::sl1l2, col, s_info);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////
+
+void store_stat_all_sal1l2(int n, const SL1L2Info &s_info) {
+ for(auto &col : sal1l2_columns) {
+ store_stat_partialsums(n, STATLineType::sal1l2, col, s_info);
}
+}
- // Construct the NetCDF variable name
- var_name << cs_erase << "series_pjc_" << c
- << "_obs" << pct_info.othresh.get_abbr_str();
+////////////////////////////////////////////////////////////////////////
- // Add map for this variable name
- if(stat_data.count(var_name) == 0) {
+void store_stat_all_pct(int n, const PCTInfo &pct_info) {
+ StringArray pct_cols(get_pct_columns(pct_info.pct.nrows() + 1));
+ for(int i=0; i= pct_info.pct.nrows()) {
- mlog << Error << "\nstore_stat_prc() -> "
- << "range check error for column name requested \"" << c
- << "\"\n\n";
- exit(1);
- }
+////////////////////////////////////////////////////////////////////////
- // Get the 2x2 contingency table for this row
- ct = pct_info.pct.ctc_by_row(i);
+ConcatString build_nc_var_name_partialsums(
+ STATLineType lt, const ConcatString &col,
+ const SL1L2Info &s_info) {
- } // end if
+ // Append the column name
+ ConcatString var_name("series_");
+ var_name << to_lower(statlinetype_to_string(lt)) << "_" << col;
- // Get the column value
- if(c == "TOTAL") { v = (double) pct_info.pct.n(); }
- else if(c == "N_THRESH") { v = (double) pct_info.pct.nrows() + 1; }
- else if(check_reg_exp("THRESH_[0-9]", c.c_str())) { v = pct_info.pct.threshold(i);
- d = "THRESH_I"; }
- else if(check_reg_exp("PODY_[0-9]", c.c_str())) { v = ct.pod_yes();
- d = "PODY_I"; }
- else if(check_reg_exp("POFD_[0-9]", c.c_str())) { v = ct.pofd();
- d = "POFD_I"; }
- else {
- mlog << Error << "\nstore_stat_prc() -> "
- << "unsupported column name requested \"" << c
- << "\"\n\n";
- exit(1);
+ // Append threshold information, if supplied
+ if(s_info.fthresh.get_type() != thresh_na ||
+ s_info.othresh.get_type() != thresh_na) {
+ var_name << "_fcst" << s_info.fthresh.get_abbr_str()
+ << "_" << setlogic_to_abbr(s_info.logic)
+ << "_obs" << s_info.othresh.get_abbr_str();
}
- // Add map for this variable name
- if(stat_data.count(var_name) == 0) {
+ return var_name;
+}
- // Build key
- lty_stat << "PRC_" << d;
+////////////////////////////////////////////////////////////////////////
- // Add new map entry
- add_nc_var(var_name, c, stat_long_name[lty_stat],
- pct_info.fthresh.get_str(","),
- pct_info.othresh.get_str(),
- bad_data_double);
+ConcatString build_nc_var_name_continuous(
+ STATLineType lt, const ConcatString &col,
+ const CNTInfo &cnt_info, double alpha) {
+
+ // Append the column name
+ ConcatString var_name("series_");
+ var_name << to_lower(statlinetype_to_string(lt)) << "_" << col;
+
+ // Append threshold information, if supplied
+ if(cnt_info.fthresh.get_type() != thresh_na ||
+ cnt_info.othresh.get_type() != thresh_na) {
+ var_name << "_fcst" << cnt_info.fthresh.get_abbr_str()
+ << "_" << setlogic_to_abbr(cnt_info.logic)
+ << "_obs" << cnt_info.othresh.get_abbr_str();
}
- // Store the statistic value
- put_nc_val(n, var_name, (float) v);
+ // Append confidence interval alpha value
+ if(!is_bad_data(alpha)) var_name << "_a" << alpha;
- return;
+ return var_name;
+}
+
+////////////////////////////////////////////////////////////////////////
+
+ConcatString build_nc_var_name_probabilistic(
+ STATLineType lt, const ConcatString &col,
+ const PCTInfo &pct_info, double alpha) {
+
+ // Append the column name
+ ConcatString var_name("series_");
+ var_name << to_lower(statlinetype_to_string(lt)) << "_" << col;
+
+ // Append the observation threshold
+ var_name << "_obs" << pct_info.othresh.get_abbr_str();
+
+ // Append confidence interval alpha value
+ if(!is_bad_data(alpha)) var_name << "_a" << alpha;
+
+ return var_name;
}
////////////////////////////////////////////////////////////////////////
@@ -2200,9 +2204,10 @@ void setup_nc_file(const VarInfo *fcst_info, const VarInfo *obs_info) {
if (deflate_level < 0) deflate_level = conf_info.get_compression_level();
// Add the series length variable
- NcVar var = add_var(nc_out, "n_series", ncInt, deflate_level);
+ NcVar var = add_var(nc_out, n_series_var_name, ncInt, deflate_level);
add_att(&var, "long_name", "length of series");
+ int n_series = n_series_pair + n_series_aggr;
if(!put_nc_data(&var, &n_series)) {
mlog << Error << "\nsetup_nc_file() -> "
<< "error writing the series length variable.\n\n";
@@ -2217,68 +2222,77 @@ void setup_nc_file(const VarInfo *fcst_info, const VarInfo *obs_info) {
////////////////////////////////////////////////////////////////////////
-void add_nc_var(const ConcatString &var_name,
- const ConcatString &name,
- const ConcatString &long_name,
- const ConcatString &fcst_thresh,
- const ConcatString &obs_thresh,
- double alpha) {
- NcVarData d;
-
- int deflate_level = compress_level;
- if (deflate_level < 0) deflate_level = conf_info.get_compression_level();
-
- // Add a new variable to the NetCDF file
- NcVar var = add_var(nc_out, (string)var_name, ncFloat, lat_dim, lon_dim, deflate_level);
- d.var = new NcVar(var);
-
- // Add variable attributes
- add_att(d.var, "_FillValue", bad_data_float);
- if(name.length() > 0) add_att(d.var, "name", (string)name);
- if(long_name.length() > 0) add_att(d.var, "long_name", (string)long_name);
- if(fcst_thresh.length() > 0) add_att(d.var, "fcst_thresh", (string)fcst_thresh);
- if(obs_thresh.length() > 0) add_att(d.var, "obs_thresh", (string)obs_thresh);
- if(!is_bad_data(alpha)) add_att(d.var, "alpha", alpha);
-
- // Store the new NcVarData object in the map
- stat_data[var_name] = d;
+void add_stat_data(const ConcatString &var_name,
+ const ConcatString &name,
+ const ConcatString &long_name,
+ const ConcatString &fcst_thresh,
+ const ConcatString &obs_thresh,
+ double alpha) {
+
+ NcVarData data;
+ data.dp.set_size(grid.nx(), grid.ny(), bad_data_double);
+ data.name = name;
+ data.long_name = long_name;
+ data.fcst_thresh = fcst_thresh;
+ data.obs_thresh = obs_thresh;
+ data.alpha = alpha;
+
+ // Store the new NcVarData object
+ stat_data[var_name] = data;
+ stat_data_keys.push_back(var_name);
return;
}
////////////////////////////////////////////////////////////////////////
-void put_nc_val(int n, const ConcatString &var_name, float v) {
- int x, y;
+void write_stat_data() {
- // Determine x,y location
- DefaultTO.one_to_two(grid.nx(), grid.ny(), n, x, y);
+ mlog << Debug(2)
+ << "Writing " << stat_data_keys.size()
+ << " output variables.\n";
- // Check for key in the map
- if(stat_data.count(var_name) == 0) {
- mlog << Error << "\nput_nc_val() -> "
- << "variable name \"" << var_name
- << "\" does not exist in the map.\n\n";
- exit(1);
+ int deflate_level = compress_level;
+ if(deflate_level < 0) deflate_level = conf_info.get_compression_level();
+
+ // Allocate memory to store data values for each grid point
+ float *data = new float [grid.nx()*grid.ny()];
+
+ // Write output for each stat_data map entry
+ for(auto &key : stat_data_keys) {
+
+ NcVarData *ptr = &stat_data[key];
+
+ // Add a new variable to the NetCDF file
+ NcVar nc_var = add_var(nc_out, key, ncFloat, lat_dim, lon_dim, deflate_level);
+
+ // Add variable attributes
+ add_att(&nc_var, "_FillValue", bad_data_float);
+ add_att(&nc_var, "name", ptr->name);
+ add_att(&nc_var, "long_name", ptr->long_name);
+ if(ptr->fcst_thresh.length() > 0) add_att(&nc_var, "fcst_thresh", ptr->fcst_thresh);
+ if(ptr->obs_thresh.length() > 0) add_att(&nc_var, "obs_thresh", ptr->obs_thresh);
+ if(!is_bad_data(ptr->alpha)) add_att(&nc_var, "alpha", ptr->alpha);
+
+ // Store the data
+ for(int x=0; xdp(x, y);
+ } // end for y
+ } // end for x
+
+ // Write out the data
+ if(!put_nc_data_with_dims(&nc_var, &data[0], grid.ny(), grid.nx())) {
+ mlog << Error << "\nwrite_stat_data() -> "
+ << R"(error writing ")" << key
+ << R"(" data to the output file.)" << "\n\n";
+ exit(1);
+ }
}
- // Get the NetCDF variable to be written
- NcVar *var = stat_data[var_name].var;
-
- long offsets[2];
- long lengths[2];
- offsets[0] = y;
- offsets[1] = x;
- lengths[0] = 1;
- lengths[1] = 1;
-
- // Store the current value
- if(!put_nc_data(var, &v, lengths, offsets)) {
- mlog << Error << "\nput_nc_val() -> "
- << "error writing to variable " << var_name
- << " for point (" << x << ", " << y << ").\n\n";
- exit(1);
- }
+ // Clean up
+ if(data) { delete [] data; data = (float *) nullptr; }
return;
}
@@ -2311,25 +2325,23 @@ void set_range(const int &t, int &beg, int &end) {
void clean_up() {
- // Deallocate NetCDF variable for each map entry
- map::const_iterator it;
- for(it=stat_data.begin(); it!=stat_data.end(); it++) {
- if(it->second.var) { delete it->second.var; }
- }
-
// Close the output NetCDF file
if(nc_out) {
// List the NetCDF file after it is finished
- mlog << Debug(1) << "Output file: " << out_file << "\n";
+ mlog << Debug(1)
+ << "Output file: " << out_file << "\n";
delete nc_out;
nc_out = (NcFile *) nullptr;
}
+ // Close the aggregate NetCDF file
+ if(aggr_nc.MetNc) aggr_nc.close();
+
// Deallocate memory for data files
- if(fcst_mtddf) { delete fcst_mtddf; fcst_mtddf = (Met2dDataFile *) nullptr; }
- if(obs_mtddf) { delete obs_mtddf; obs_mtddf = (Met2dDataFile *) nullptr; }
+ if(fcst_mtddf) { delete fcst_mtddf; fcst_mtddf = nullptr; }
+ if(obs_mtddf) { delete obs_mtddf; obs_mtddf = nullptr; }
// Deallocate memory for the random number generator
rng_free(rng_ptr);
@@ -2348,6 +2360,7 @@ void usage() {
<< "\t-fcst file_1 ... file_n | fcst_file_list\n"
<< "\t-obs file_1 ... file_n | obs_file_list\n"
<< "\t[-both file_1 ... file_n | both_file_list]\n"
+ << "\t[-aggr file]\n"
<< "\t[-paired]\n"
<< "\t-out file\n"
<< "\t-config file\n"
@@ -2355,37 +2368,53 @@ void usage() {
<< "\t[-v level]\n"
<< "\t[-compress level]\n\n"
- << "\twhere\t\"-fcst file_1 ... file_n\" are the gridded "
+ << "\twhere\t"
+ << R"("-fcst file_1 ... file_n" are the gridded )"
<< "forecast files to be used (required).\n"
- << "\t\t\"-fcst fcst_file_list\" is an ASCII file containing "
+ << "\t\t"
+ << R"("-fcst fcst_file_list" is an ASCII file containing )"
<< "a list of gridded forecast files to be used (required).\n"
- << "\t\t\"-obs file_1 ... file_n\" are the gridded "
+ << "\t\t"
+ << R"("-obs file_1 ... file_n" are the gridded )"
<< "observation files to be used (required).\n"
- << "\t\t\"-obs obs_file_list\" is an ASCII file containing "
+ << "\t\t"
+ << R"("-obs obs_file_list" is an ASCII file containing )"
<< "a list of gridded observation files to be used (required).\n"
- << "\t\t\"-both\" sets the \"-fcst\" and \"-obs\" options to "
+ << "\t\t"
+ << R"("-both" sets the "-fcst" and "-obs" options to )"
<< "the same set of files (optional).\n"
- << "\t\t\"-paired\" to indicate that the input -fcst and -obs "
+ << "\t\t"
+ << R"("-aggr file" specifies a series_analysis output )"
+ << "file with partial sums and/or contingency table counts to be "
+ << "updated prior to deriving statistics (optional).\n"
+
+ << "\t\t"
+ << R"("-paired" to indicate that the input -fcst and -obs )"
<< "file lists are already paired (optional).\n"
- << "\t\t\"-out file\" is the NetCDF output file containing "
+ << "\t\t"
+ << R"("-out file" is the NetCDF output file containing )"
<< "computed statistics (required).\n"
- << "\t\t\"-config file\" is a SeriesAnalysisConfig file "
+ << "\t\t"
+ << R"("-config file" is a SeriesAnalysisConfig file )"
<< "containing the desired configuration settings (required).\n"
- << "\t\t\"-log file\" outputs log messages to the specified "
+ << "\t\t"
+ << R"("-log file" outputs log messages to the specified )"
<< "file (optional).\n"
- << "\t\t\"-v level\" overrides the default level of logging ("
+ << "\t\t"
+ << R"("-v level" overrides the default level of logging ()"
<< mlog.verbosity_level() << ") (optional).\n"
- << "\t\t\"-compress level\" overrides the compression level of NetCDF variable ("
+ << "\t\t"
+ << R"("-compress level" overrides the compression level of NetCDF variable ()"
<< conf_info.get_compression_level() << ") (optional).\n\n" << flush;
exit(1);
@@ -2412,6 +2441,12 @@ void set_both_files(const StringArray & a) {
////////////////////////////////////////////////////////////////////////
+void set_aggr(const StringArray & a) {
+ aggr_file = a[0];
+}
+
+////////////////////////////////////////////////////////////////////////
+
void set_paired(const StringArray & a) {
paired = true;
}
@@ -2449,8 +2484,8 @@ void parse_long_names() {
f_in.open(file_name.c_str());
if(!f_in) {
mlog << Error << "\nparse_long_names() -> "
- << "can't open the ASCII file \"" << file_name
- << "\" for reading\n\n";
+ << R"(can't open the ASCII file ") << file_name
+ << R"(" for reading!)" << "\n\n";
exit(1);
}
diff --git a/src/tools/core/series_analysis/series_analysis.h b/src/tools/core/series_analysis/series_analysis.h
index 2540540015..73b2f3d6f6 100644
--- a/src/tools/core/series_analysis/series_analysis.h
+++ b/src/tools/core/series_analysis/series_analysis.h
@@ -17,7 +17,6 @@
// 000 12/10/12 Halley Gotway New
// 001 09/28/22 Prestopnik MET #2227 Remove namespace std and netCDF from header files
//
-//
////////////////////////////////////////////////////////////////////////
#ifndef __SERIES_ANALYSIS_H__
@@ -43,6 +42,7 @@
#include "series_analysis_conf_info.h"
#include "vx_data2d_factory.h"
+#include "vx_data2d_nc_met.h"
#include "vx_grid.h"
#include "vx_util.h"
#include "vx_stat_out.h"
@@ -60,6 +60,11 @@ static const char * program_name = "series_analysis";
static const char * default_config_filename =
"MET_BASE/config/SeriesAnalysisConfig_default";
+static const char * all_columns = "ALL";
+static const char * n_series_var_name = "n_series";
+
+static const char * total_name = "TOTAL";
+
////////////////////////////////////////////////////////////////////////
//
// Variables for Command Line Arguments
@@ -68,10 +73,11 @@ static const char * default_config_filename =
// Input files
static StringArray fcst_files, found_fcst_files;
-static StringArray obs_files, found_obs_files;
-static GrdFileType ftype = FileType_None;
-static GrdFileType otype = FileType_None;
-static bool paired = false;
+static StringArray obs_files, found_obs_files;
+static GrdFileType ftype = FileType_None;
+static GrdFileType otype = FileType_None;
+static ConcatString aggr_file;
+static bool paired = false;
static int compress_level = -1;
// Output file
@@ -88,17 +94,26 @@ static SeriesAnalysisConfInfo conf_info;
////////////////////////////////////////////////////////////////////////
// Output NetCDF file
-static netCDF::NcFile *nc_out = (netCDF::NcFile *) nullptr;
-static netCDF::NcDim lat_dim;
-static netCDF::NcDim lon_dim ;
+static netCDF::NcFile *nc_out = nullptr;
+static netCDF::NcDim lat_dim;
+static netCDF::NcDim lon_dim ;
-// Structure to store computed statistics and corresponding metadata
+// Structure to store computed statistics
struct NcVarData {
- netCDF::NcVar * var; // Pointer to NetCDF variable
+ DataPlane dp;
+ std::string name;
+ std::string long_name;
+ std::string fcst_thresh;
+ std::string obs_thresh;
+ double alpha;
};
// Mapping of NetCDF variable name to computed statistic
-std::map stat_data;
+std::map stat_data;
+std::vector stat_data_keys;
+
+// Mapping of aggregate NetCDF variable name to DataPlane
+std::map aggr_data;
////////////////////////////////////////////////////////////////////////
//
@@ -108,16 +123,16 @@ std::map stat_data;
// Grid variables
static Grid grid;
-static int nxy = 0;
static int n_reads = 1; // Initialize to at least one pass
// Data file factory and input files
static Met2dDataFileFactory mtddf_factory;
-static Met2dDataFile *fcst_mtddf = (Met2dDataFile *) nullptr;
-static Met2dDataFile *obs_mtddf = (Met2dDataFile *) nullptr;
+static Met2dDataFile *fcst_mtddf = nullptr;
+static Met2dDataFile *obs_mtddf = nullptr;
+static MetNcMetDataFile aggr_nc;
// Pointer to the random number generator to be used
-static gsl_rng *rng_ptr = (gsl_rng *) nullptr;
+static gsl_rng *rng_ptr = nullptr;
// Enumeration of ways that a series can be defined
enum class SeriesType {
@@ -130,7 +145,8 @@ enum class SeriesType {
static SeriesType series_type = SeriesType::None;
// Series length
-static int n_series = 0;
+static int n_series_pair = 0; // Input pair data series
+static int n_series_aggr = 0; // Input aggr series
// Range of timing values encountered in the data
static unixtime fcst_init_beg = (unixtime) 0;
diff --git a/src/tools/core/stat_analysis/aggr_stat_line.cc b/src/tools/core/stat_analysis/aggr_stat_line.cc
index 441cbe5e07..6c0a7add52 100644
--- a/src/tools/core/stat_analysis/aggr_stat_line.cc
+++ b/src/tools/core/stat_analysis/aggr_stat_line.cc
@@ -706,7 +706,7 @@ void aggr_summary_lines(LineDataFile &f, STATAnalysisJob &job,
else if(do_cnt && (line.type() == STATLineType::sl1l2 ||
line.type() == STATLineType::sal1l2)) {
parse_sl1l2_line(line, sl1l2_info);
- compute_cntinfo(sl1l2_info, 0, cnt_info);
+ compute_cntinfo(sl1l2_info, cnt_info);
}
//
@@ -731,7 +731,7 @@ void aggr_summary_lines(LineDataFile &f, STATAnalysisJob &job,
//
if((line.type() == STATLineType::fho ||
line.type() == STATLineType::ctc) && lty == STATLineType::cts) {
- v = cts_info.get_stat(req_col[i].c_str());
+ v = cts_info.get_stat_cts(req_col[i].c_str());
w = cts_info.cts.n();
}
else if(line.type() == STATLineType::sl1l2 && lty == STATLineType::cnt) {
@@ -743,7 +743,7 @@ void aggr_summary_lines(LineDataFile &f, STATAnalysisJob &job,
w = cnt_info.n;
}
else if(line.type() == STATLineType::vl1l2 && lty == STATLineType::vcnt) {
- v = vl1l2_info.get_stat(req_col[i].c_str());
+ v = vl1l2_info.get_stat_vcnt(req_col[i].c_str());
w = (is_vector_dir_stat(line.type(), req_col[i].c_str()) ?
vl1l2_info.dcount :
vl1l2_info.vcount);
@@ -1519,7 +1519,7 @@ void aggr_psum_lines(LineDataFile &f, STATAnalysisJob &job,
//
// Compute the stats for the current time
//
- compute_cntinfo(cur_sl1l2, 0, cur_cnt);
+ compute_cntinfo(cur_sl1l2, cur_cnt);
//
// Append the stats
diff --git a/src/tools/core/stat_analysis/skill_score_index_job.cc b/src/tools/core/stat_analysis/skill_score_index_job.cc
index 9651e50a0d..88bc450900 100644
--- a/src/tools/core/stat_analysis/skill_score_index_job.cc
+++ b/src/tools/core/stat_analysis/skill_score_index_job.cc
@@ -245,17 +245,17 @@ SSIDXData SSIndexJobInfo::compute_ss_index() {
// Continuous stats
if(job_lt[i] == STATLineType::sl1l2) {
- compute_cntinfo(fcst_sl1l2[i], 0, fcst_cnt);
+ compute_cntinfo(fcst_sl1l2[i], fcst_cnt);
fcst_stat = fcst_cnt.get_stat(fcst_job[i].column[0].c_str());
- compute_cntinfo(ref_sl1l2[i], 0, ref_cnt);
+ compute_cntinfo(ref_sl1l2[i], ref_cnt);
ref_stat = ref_cnt.get_stat(fcst_job[i].column[0].c_str());
}
// Categorical stats
else if(job_lt[i] == STATLineType::ctc) {
fcst_cts[i].compute_stats();
- fcst_stat = fcst_cts[i].get_stat(fcst_job[i].column[0].c_str());
+ fcst_stat = fcst_cts[i].get_stat_cts(fcst_job[i].column[0].c_str());
ref_cts[i].compute_stats();
- ref_stat = ref_cts[i].get_stat(fcst_job[i].column[0].c_str());
+ ref_stat = ref_cts[i].get_stat_cts(fcst_job[i].column[0].c_str());
}
else {
mlog << Error << "\nSSIndexJobInfo::compute_ss_index() -> "
diff --git a/src/tools/core/stat_analysis/stat_analysis_job.cc b/src/tools/core/stat_analysis/stat_analysis_job.cc
index 3492309da1..b3a9eb12cb 100644
--- a/src/tools/core/stat_analysis/stat_analysis_job.cc
+++ b/src/tools/core/stat_analysis/stat_analysis_job.cc
@@ -1876,10 +1876,7 @@ void write_job_aggr_psum(STATAnalysisJob &job, STATLineType lt,
//
// Compute CNTInfo statistics from the aggregated partial sums
//
- if(it->second.sl1l2_info.scount > 0)
- compute_cntinfo(it->second.sl1l2_info, 0, it->second.cnt_info);
- else
- compute_cntinfo(it->second.sl1l2_info, 1, it->second.cnt_info);
+ compute_cntinfo(it->second.sl1l2_info, it->second.cnt_info);
if(job.stat_out) {
write_cnt_cols(it->second.cnt_info, 0, job.stat_at,
@@ -2610,7 +2607,7 @@ void write_job_aggr_ssvar(STATAnalysisJob &job, STATLineType lt,
//
// Compute CNTInfo statistics from the aggregated partial sums
//
- compute_cntinfo(bin_it->second.sl1l2_info, 0, cnt_info);
+ compute_cntinfo(bin_it->second.sl1l2_info, cnt_info);
//
// Write the output STAT line
diff --git a/src/tools/other/pb2nc/pb2nc.cc b/src/tools/other/pb2nc/pb2nc.cc
index a9abdf1be9..94a7750bad 100644
--- a/src/tools/other/pb2nc/pb2nc.cc
+++ b/src/tools/other/pb2nc/pb2nc.cc
@@ -58,9 +58,10 @@
// from header files
// 019 07/21/23 Prestopnik, J. MET #2615 Add #include to compile
// successfully using gcc12
+// 020 08/26/24 Halley Gotway MET #2938 Silence center time warnings
+//
////////////////////////////////////////////////////////////////////////
-
#include
#include
#include
@@ -467,7 +468,7 @@ int met_main(int argc, char *argv[]) {
if (collect_metadata) {
// Process each PrepBufr file
- for(int i=0; i ";
@@ -933,7 +933,7 @@ void process_pbfile(int i_pb) {
if(dump_flag) {
// Check for multiple PrepBufr files
- if(pbfile.n_elements() > 1) {
+ if(pbfile.n() > 1) {
mlog << Error << "\n" << method_name
<< "the \"-dump\" and \"-pbfile\" options may not be "
<< "used together. Only one Bufr file may be dump "
@@ -1067,15 +1067,14 @@ void process_pbfile(int i_pb) {
cape_h = pbl_h = 0;
cape_p = pbl_p = bad_data_float;
- diff_file_time_count = 0;
cycle_minute = missing_cycle_minute; // initialize
- // Derive quantities which can be derived from
- // P, Q, T, Z, U, V
+ // Check the number of variables to be derived from:
+ // P, Q, T, Z, U, V
if (n_derive_gc > bufr_derive_cfgs.size()) {
- mlog << Debug(3) << "\n" << method_name
- << "Skip the derived variables because of not requested ("
- << bufr_derive_cfgs.size() << ").\n\n";
+ mlog << Debug(3) << method_name
+ << "No observation variables requested to be derived ("
+ << bufr_derive_cfgs.size() << ").\n";
}
for (int idx=0; idx= debug_level_for_performance) {
end_t = clock();
- cout << (end_t-start_t)/double(CLOCKS_PER_SEC)
- << " seconds\n";
+ cout << (end_t-start_t)/double(CLOCKS_PER_SEC) << " seconds\n";
start_t = clock();
}
}
@@ -1149,10 +1147,10 @@ void process_pbfile(int i_pb) {
<< " to " << end_time_str << "\n";
}
- else if(file_ut != msg_ut) {
- diff_file_time_count++;
- if (!diff_file_times.has(msg_ut)) diff_file_times.add(msg_ut);
- }
+
+ // Keep track of the unique message reference times,
+ // searching from newest to oldest
+ unique_msg_ut.add_uniq(msg_ut, false);
// Add minutes by calling IUPVS01(unit, "MINU")
if (cycle_minute != missing_cycle_minute) {
@@ -1802,7 +1800,7 @@ void process_pbfile(int i_pb) {
int n_other_file_obs = 0;
int n_other_total_obs = 0;
int n_other_hdr_obs = 0;
- int var_count = bufr_obs_name_arr.n_elements();
+ int var_count = bufr_obs_name_arr.n();
for (int vIdx=0; vIdx= debug_level_for_performance) {
end_t = clock();
log_message << (end_t-start_t)/double(CLOCKS_PER_SEC) << " seconds";
- //start_t = clock();
}
cout << log_message << "\n";
}
- if(0 < diff_file_time_count && 0 < diff_file_times.n_elements()) {
- mlog << Warning << "\n" << method_name
- << "The observation time should remain the same for "
- << "all " << (is_prepbufr ? "PrepBufr" : "Bufr") << " messages\n";
- mlog << Warning << method_name << " "
- << diff_file_time_count << " messages with different reference time ("
- << unix_to_yyyymmdd_hhmmss(file_ut) << "):\n";
- for (int idx=0; idx 1) {
+
+ ConcatString msg_cs;
+ msg_cs << "Found " << unique_msg_ut.n() << " unique "
+ << (is_prepbufr ? "PrepBufr" : "Bufr")
+ << " message reference time(s) from "
+ << unix_to_yyyymmdd_hhmmss(unique_msg_ut.min()) << " to "
+ << unix_to_yyyymmdd_hhmmss(unique_msg_ut.max()) << ".\n";
+
+ // Print warning if the time window was not defined on the command line
+ if(valid_beg_ut == (unixtime) 0 &&
+ valid_end_ut == (unixtime) 0) {
+ mlog << Warning << "\n" << method_name
+ << msg_cs << "\n"
+ << R"(Set the "-valid_beg" and/or "-valid_end" )"
+ << "command line options to define the retention "
+ << "time window.\n\n";
+ }
+ else {
+ mlog << Debug(3) << msg_cs;
}
- mlog << Warning << "\n";
}
nc_point_obs.write_observation();
- if(mlog.verbosity_level() > 0) cout << "\n" << flush;
-
mlog << Debug(2)
<< "Messages processed\t\t\t= " << npbmsg << "\n"
<< "Rejected based on message type\t\t= "
@@ -2065,7 +2070,7 @@ void process_pbfile(int i_pb) {
int debug_level = 5;
if(mlog.verbosity_level() >= debug_level) {
log_message = "Filtered time:";
- for (kk=0; kk variable \""
@@ -2388,9 +2393,9 @@ void process_pbfile_metadata(int i_pb) {
}
} // if (0 == i_read)
- if (0 == unchecked_var_list.n_elements()) break;
+ if (0 == unchecked_var_list.n()) break;
- int var_count = unchecked_var_list.n_elements();
+ int var_count = unchecked_var_list.n();
for (int vIdx=var_count-1; vIdx>=0; vIdx--) {
int nlev2, count;
bool has_valid_data;
@@ -2441,7 +2446,7 @@ void process_pbfile_metadata(int i_pb) {
bool has_prepbufr_vars = false;
const char * tmp_var_name;
bufr_obs_name_arr.clear();
- for (index=0; index obs_var_map = conf_info.getObsVarMap();
- for(int i=0; i= 0 && (do_all_vars || code < bufr_target_variables.n_elements()));
+ return(code >= 0 && (do_all_vars || code < bufr_target_variables.n()));
}
////////////////////////////////////////////////////////////////////////
bool keep_level_category(int category) {
- return(conf_info.level_category.n_elements() == 0 ||
+ return(conf_info.level_category.n() == 0 ||
conf_info.level_category.has(category, false));
}
@@ -2884,8 +2889,8 @@ void display_bufr_variables(const StringArray &all_vars, const StringArray &all_
ConcatString description;
ConcatString line_buf;
- mlog << Debug(1) << "\n Header variables (" << hdr_arr.n_elements() << ") :\n";
- for(i=0; i pqtzuv_map_tq,
log_array.add(buf.c_str());
}
offset = 0;
- for (int idx=log_array.n_elements()-1; idx>=0; idx--) {
+ for (int idx=log_array.n()-1; idx>=0; idx--) {
mlog << Debug(PBL_DEBUG_LEVEL) << method_name << "TQZ record: "
<< offset++ << "\t" << log_array[idx] << "\n";
}
@@ -3463,7 +3468,7 @@ void log_tqz_and_uv(map pqtzuv_map_tq,
log_array.add(buf.c_str());
}
offset = 0;
- for (int idx=log_array.n_elements()-1; idx>=0; idx--) {
+ for (int idx=log_array.n()-1; idx>=0; idx--) {
mlog << Debug(PBL_DEBUG_LEVEL) << method_name << " UV record: "
<< offset++ << "\t" << log_array[idx] << "\n";
}
@@ -3496,7 +3501,7 @@ void log_merged_tqz_uv(map pqtzuv_map_tq,
log_array.add(buf.c_str());
}
int offset = 0;
- for (int idx=log_array.n_elements()-1; idx>=0; idx--) {
+ for (int idx=log_array.n()-1; idx>=0; idx--) {
mlog << Debug(PBL_DEBUG_LEVEL) << method_name << " merged: "
<< offset++ << "\t" << log_array[idx] << "\n";
}
@@ -3523,7 +3528,7 @@ void log_pbl_input(int pbl_level, const char *method_name) {
int offset = 0;
mlog << Debug(PBL_DEBUG_LEVEL) << method_name
<< "input to calpbl_ (buffer): index, P, Q, T, Z, U, V\n";
- for (int idx=log_array.n_elements()-1; idx>=0; idx--) {
+ for (int idx=log_array.n()-1; idx>=0; idx--) {
mlog << Debug(PBL_DEBUG_LEVEL) << method_name << " "
<< offset++ << "\t" << log_array[idx] << "\n";
}