// *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* // ** Copyright UCAR (c) 1992 - 2018 // ** University Corporation for Atmospheric Research (UCAR) // ** National Center for Atmospheric Research (NCAR) // ** Research Applications Lab (RAL) // ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA // *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=* //////////////////////////////////////////////////////////////////////// // // Filename: point_stat.cc // // Description: // Based on user specified parameters, this tool compares a // a gridded forecast field to the point observation output of // the PB2NC or ASCII2NC tool. It computes many verification // scores and statistics, including confidence intervals, to // summarize the comparison. // // Mod# Date Name Description // ---- ---- ---- ----------- // 000 04/18/07 Halley Gotway New // 001 12/20/07 Halley Gotway Allow verification for level 0 // for verifying PRMSL // 002 01/24/08 Halley Gotway In compute_cnt, print a warning // message when bad data values are encountered. // 003 01/24/08 Halley Gotway Add support for writing the matched // pair data to the MPR file. // 004 02/04/08 Halley Gotway Modify to read new format of the // intermediate NetCDF point-observation format. // 005 02/11/08 Halley Gotway Remove BIAS from the CNT line // since it's the same as the ME. // 006 02/12/08 Halley Gotway Fix bug in writing COP line to // write out OY and ON rather than FY and FN. // 007 02/12/08 Halley Gotway Enable Point-Stat to read the // NetCDF output of PCP-Combine. // 008 02/25/08 Halley Gotway Write the output lines using the // routines in the vx_met_util library. // 009 07/01/08 Halley Gotway Add the rank_corr_flag to the // config file to disable computing rank correlations. // 010 07/18/08 Halley Gotway Fix bug in the write_mpr routine. // Replace i_fho with i_mpr. // 011 09/23/08 Halley Gotway Change argument sequence for the // GRIB record access routines. // 012 11/03/08 Halley Gotway Use the get_file_type routine to // determine the input file types. // 013 03/13/09 Halley Gotway Add support for verifying // probabilistic forecasts. // 014 04/21/09 Halley Gotway Fix bug for resetting obs_var. // 015 05/07/10 Halley Gotway Rename process_grid() to // setup_first_pass() and modify its logic. // 016 05/24/10 Halley Gotway Rename command line options: // From -ncfile to -point_obs // From -valid_beg to -obs_valid_beg // From -valid_end to -obs_valid_end // 017 05/27/10 Halley Gotway Add -fcst_valid and -fcst_lead // command line options. // 018 06/08/10 Halley Gotway Add support for multi-category // contingency tables. // 019 06/15/10 Halley Gotway Dump reason codes for why // point observations were rejected. // 020 06/30/10 Halley Gotway Enhance grid equality checks. // 021 10/20/11 Holmes Added use of command line class to // parse the command line arguments. // 022 11/14/11 Holmes Added code to enable reading of // multiple config files. // 023 02/02/12 Bullock Set default output directory to "." // 024 03/05/12 Halley Gotway Fix bug in processing command line // for setting valid end times. // 025 04/16/12 Halley Gotway Switch to using vx_config library. // 026 04/27/12 Halley Gotway Move -fcst_valid and -fcst_lead // command line options to config file. // 027 02/25/13 Halley Gotway Add duplicates rejection counts. // 028 08/21/13 Halley Gotway Fix sizing of output tables for 12 // or more probabilstic thresholds. // 029 07/09/14 Halley Gotway Add station id exclusion option. // 030 03/02/15 Halley Gotway Add automated regridding. // 031 07/30/15 Halley Gotway Add conditional continuous verification. // 032 09/11/15 Halley Gotway Add climatology to config file. // 033 02/27/17 Halley Gotway Add HiRA verification. // 034 05/15/17 Prestopnik P Add shape for HiRA, interp and regrid. // 035 06/16/17 Halley Gotway Add ECLV line type. // 036 11/01/17 Halley Gotway Add binned climatologies. // 037 02/06/18 Halley Gotway Restructure config logic to make // all options settable for each verification task. // 038 08/15/18 Halley Gotway Add mask.llpnt type. // 039 08/24/18 Halley Gotway Add ECNT output for HiRA. // //////////////////////////////////////////////////////////////////////// using namespace std; #include #include #include #include #include #include #include #include #include #include #include #include #include "point_stat.h" #include "vx_statistics.h" #include "vx_nc_util.h" #include "vx_regrid.h" #include "vx_log.h" #include "nc_obs_util.h" //////////////////////////////////////////////////////////////////////// #define BUFFER_SIZE (DEF_NC_BUFFER_SIZE/2) static void process_command_line (int, char **); static void setup_first_pass (const DataPlane &, const Grid &); static void setup_txt_files(); static void setup_table (AsciiTable &); static void build_outfile_name(unixtime, int, const char *, ConcatString &); static void process_fcst_climo_files(); static void process_obs_file(int); static void process_scores(); static void do_cts (CTSInfo *&, int, PairDataPoint *); static void do_mcts (MCTSInfo &, int, PairDataPoint *); static void do_cnt (CNTInfo *&, int, PairDataPoint *); static void do_sl1l2 (SL1L2Info *&, int, PairDataPoint *); static void do_vl1l2 (VL1L2Info *&, int, PairDataPoint *, PairDataPoint *); static void do_pct (PCTInfo *&, int, PairDataPoint *, int i_bin); static void do_hira_ens ( int, PairDataPoint *); static void do_hira_prob( int, PairDataPoint *); static void finish_txt_files(); static void clean_up(); static void usage(); static void set_point_obs(const StringArray &); static void set_ncfile(const StringArray &); static void set_obs_valid_beg_time(const StringArray &); static void set_obs_valid_end_time(const StringArray &); static void set_outdir(const StringArray &); static void set_logfile(const StringArray &); static void set_verbosity(const StringArray &); //////////////////////////////////////////////////////////////////////// int main(int argc, char *argv[]) { int i; // Set handler to be called for memory allocation error set_new_handler(oom); // Process the command line arguments process_command_line(argc, argv); // Process the forecast and climo files process_fcst_climo_files(); // Process each observation netCDF file for(i=0; i= beg_ut if(obs_valid_beg_ut != (unixtime) 0 && obs_valid_end_ut != (unixtime) 0 && obs_valid_beg_ut > obs_valid_end_ut) { mlog << Error << "\nprocess_command_line() -> " << "the ending time (" << unix_to_yyyymmdd_hhmmss(obs_valid_end_ut) << ") must be greater than the beginning time (" << unix_to_yyyymmdd_hhmmss(obs_valid_beg_ut) << ").\n\n"; exit(1); } // Store the input forecast and observation file names fcst_file = cline[0]; obs_file.insert(0, cline[1]); config_file = cline[2]; // Create the default config file names default_config_file = replace_path(default_config_filename); // List the config files mlog << Debug(1) << "Default Config File: " << default_config_file << "\n" << "User Config File: " << config_file << "\n"; // Read the config files conf_info.read_config(default_config_file, config_file); // Get the forecast file type from config, if present ftype = parse_conf_file_type(conf_info.conf.lookup_dictionary(conf_key_fcst)); // Read forecast file if(!(fcst_mtddf = mtddf_factory.new_met_2d_data_file(fcst_file, ftype))) { mlog << Error << "\nTrouble reading forecast file \"" << fcst_file << "\"\n\n"; exit(1); } // Use a variable index from var_name instead of GRIB code bool use_var_id = is_using_var_id(obs_file[0]); // Store the forecast file type ftype = fcst_mtddf->file_type(); // Process the configuration conf_info.process_config(ftype, use_var_id); // Set the model name shc.set_model(conf_info.model); // Use the first verification task to set the random number generator // and seed value for bootstrap confidence intervals rng_set(rng_ptr, conf_info.vx_opt[0].boot_info.rng, conf_info.vx_opt[0].boot_info.seed); // List the input files mlog << Debug(1) << "Forecast File: " << fcst_file << "\n"; for(i=0; iregrid(), &(data_grid), &(data_grid)); // Process the masks conf_info.process_masks(grid); // Setup the VxPairDataPoint objects conf_info.set_vx_pd(); // Store the lead and valid times if(fcst_valid_ut == (unixtime) 0) fcst_valid_ut = dp.valid(); if(is_bad_data(fcst_lead_sec)) fcst_lead_sec = dp.lead(); return; } //////////////////////////////////////////////////////////////////////// void setup_txt_files() { int i, max_col, max_prob_col, max_mctc_col, n_prob, n_cat, n_eclv; ConcatString base_name; // Create output file names for the stat file and optional text files build_outfile_name(fcst_valid_ut, fcst_lead_sec, "", base_name); ///////////////////////////////////////////////////////////////////// // // Setup the output STAT file // ///////////////////////////////////////////////////////////////////// // Get the maximum number of data columns n_prob = conf_info.get_max_n_fprob_thresh(); n_cat = conf_info.get_max_n_cat_thresh() + 1; n_eclv = conf_info.get_max_n_eclv_points(); // Check for HiRA output for(i=0; i max_stat_col ? max_prob_col : max_stat_col ); max_col = ( max_mctc_col > max_col ? max_mctc_col : max_col ); // Add the header columns max_col += n_header_columns + 1; // Initialize file stream stat_out = (ofstream *) 0; // Build the file name stat_file << base_name << stat_file_ext; // Create the output STAT file open_txt_file(stat_out, stat_file); // Setup the STAT AsciiTable stat_at.set_size(conf_info.n_stat_row() + 1, max_col); setup_table(stat_at); // Write the text header row write_header_row((const char **) 0, 0, 1, stat_at, 0, 0); // Initialize the row index to 1 to account for the header i_stat_row = 1; ///////////////////////////////////////////////////////////////////// // // Setup each of the optional output text files // ///////////////////////////////////////////////////////////////////// // Loop through output file type for(i=0; idata_plane_array( *conf_info.vx_opt[i].vx_pd.fcst_info, fcst_dpa); mlog << Debug(2) << "\n" << sep_str << "\n\n" << "Reading data for " << conf_info.vx_opt[i].vx_pd.fcst_info->magic_str() << ".\n"; // Check for zero fields if(n_fcst == 0) { mlog << Warning << "\nprocess_fcst_climo_files() -> " << "no fields matching " << conf_info.vx_opt[i].vx_pd.fcst_info->magic_str() << " found in file: " << fcst_file << "\n\n"; continue; } // Setup the first pass through the data if(is_first_pass) setup_first_pass(fcst_dpa[0], fcst_mtddf->grid()); // Regrid, if necessary if(!(fcst_mtddf->grid() == grid)) { mlog << Debug(1) << "Regridding " << fcst_dpa.n_planes() << " forecast field(s) for " << conf_info.vx_opt[i].vx_pd.fcst_info->magic_str() << " to the verification grid.\n"; // Loop through the forecast fields for(j=0; jgrid(), grid, conf_info.vx_opt[i].vx_pd.fcst_info->regrid()); } } // Rescale probabilities from [0, 100] to [0, 1] if(conf_info.vx_opt[i].vx_pd.fcst_info->p_flag()) { for(j=0; jmagic_str() << " found " << n_fcst << " forecast levels, " << cmn_dpa.n_planes() << " climatology mean levels, and " << csd_dpa.n_planes() << " climatology standard deviation levels.\n"; } // end for i // Check for no data if(is_first_pass) { mlog << Error << "\nprocess_fcst_climo_files() -> " << "no requested forecast data found! Exiting...\n\n"; exit(1); } mlog << Debug(2) << "\n" << sep_str << "\n\n"; return; } //////////////////////////////////////////////////////////////////////// void process_obs_file(int i_nc) { int j, i_obs; float obs_arr[OBS_ARRAY_LEN], hdr_arr[hdr_arr_len]; float prev_obs_arr[OBS_ARRAY_LEN]; char hdr_typ_str[max_str_len]; char hdr_sid_str[max_str_len]; char hdr_vld_str[max_str_len]; char obs_qty_str[max_str_len]; unixtime hdr_ut; NcFile *obs_in = (NcFile *) 0; // Set flags for vectors bool vflag = conf_info.get_vflag(); bool is_ugrd, is_vgrd; // Open the observation file as a NetCDF file. // The observation file must be in NetCDF format as the // output of the PB2NC or ASCII2NC tool. obs_in = open_ncfile(obs_file[i_nc]); if(IS_INVALID_NC_P(obs_in)) { delete obs_in; obs_in = (NcFile *) 0; mlog << Warning << "\nprocess_obs_file() -> " << "can't open observation netCDF file: " << obs_file[i_nc] << "\n\n"; return; } // Read the dimensions and variables NetcdfObsVars obs_vars; read_nc_dims_vars(obs_vars, obs_in); bool use_var_id = obs_vars.use_var_id; if (use_var_id) { NcDim var_dim = get_nc_dim(obs_in,nc_dim_nvar); get_dim_size(&var_dim); } int exit_code = check_nc_dims_vars(obs_vars); if(exit_code == exit_code_no_dim) { mlog << Error << "\nprocess_obs_file() -> " << "can't read \"mxstr\", \"nobs\" or \"nmsg\" " << "dimensions from netCDF file: " << obs_file[i_nc] << "\n\n"; exit(1); } if(exit_code == exit_code_no_hdr_vars) { mlog << Error << "\nprocess_obs_file() -> " << "can't read \"hdr_typ\", \"hdr_sid\", " << "or \"hdr_vld\" variables from netCDF file: " << obs_file[i_nc] << "\n\n"; exit(1); } if(exit_code == exit_code_no_loc_vars) { mlog << Error << "\nprocess_obs_file() -> " << "can't read \"hdr_arr\" or \"hdr_lat\" " << "variables from netCDF file: " << obs_file[i_nc] << "\n\n"; exit(1); } if(exit_code == exit_code_no_obs_vars) { mlog << Error << "\nprocess_obs_file() -> " << "can't read \"obs_arr\" or \"obs_val\" " << "variables from netCDF file: " << obs_file[i_nc] << "\n\n"; exit(1); } if(IS_INVALID_NC(obs_vars.obs_qty_var)) mlog << Debug(3) << "Quality marker information not found input file\n"; int obs_count = get_dim_size(&obs_vars.obs_dim); int hdr_count = get_dim_size(&obs_vars.hdr_dim); int var_name_len = get_nc_string_length(obs_in, obs_vars.obs_var, nc_var_obs_var); mlog << Debug(2) << "Searching " << obs_count << " observations from " << hdr_count << " messages.\n"; StringArray var_names; char var_name[var_name_len+1]; strcpy(var_name, ""); if (use_var_id) { if (!get_nc_data_to_array(obs_in, nc_var_obs_var, &var_names)) { mlog << Error << "\nprocess_obs_file() -> " << "trouble getting variable names from " << nc_var_obs_var << "\n\n"; exit(1); } } bool use_arr_vars = !IS_INVALID_NC(obs_vars.obs_arr_var); int buf_size = ((obs_count > BUFFER_SIZE) ? BUFFER_SIZE : (obs_count)); NcHeaderData header_data = get_nc_hdr_data(obs_vars); int typ_len = header_data.typ_len; int sid_len = header_data.sid_len; int vld_len = header_data.vld_len; int qty_len = get_nc_string_length(obs_in, obs_vars.obs_qty_tbl_var, (use_arr_vars ? nc_var_obs_qty : nc_var_obs_qty_tbl)); int obs_qty_idx_block[buf_size]; float obs_arr_block[buf_size][OBS_ARRAY_LEN]; char obs_qty_block[buf_size][qty_len]; StringArray obs_qty_array; if (!IS_INVALID_NC(obs_vars.obs_qty_tbl_var)) { if (!get_nc_data_to_array(&obs_vars.obs_qty_tbl_var, &obs_qty_array)) { mlog << Error << "\nprocess_obs_file() -> " << "trouble getting obs_qty\n\n"; exit(1); } //obs_qty_array.dump(cout); } int str_length; int block_size = (obs_count > BUFFER_SIZE) ? BUFFER_SIZE : obs_count; // Process each observation in the file for(int i_block_start_idx=0; i_block_start_idx BUFFER_SIZE) block_size = BUFFER_SIZE; if (!read_nc_obs_data(obs_vars, block_size, i_block_start_idx, qty_len, (float *)obs_arr_block, obs_qty_idx_block, (char *)obs_qty_block)) { exit(1); } int hdr_idx; strcpy(obs_qty_str, ""); for(int i_block_idx=0; i_block_idx= hdr_count) { mlog << Warning << "\nprocess_obs_file() -> " << "range check error for header index " << headerOffset << " from observation number " << i_obs << " of point observation file: " << obs_file[i_nc] << "\n\n"; continue; } // Read the corresponding header array for this observation hdr_arr[0] = header_data.lat_array[headerOffset]; hdr_arr[1] = header_data.lon_array[headerOffset]; hdr_arr[2] = header_data.elv_array[headerOffset]; // Read the corresponding header type for this observation hdr_idx = use_arr_vars ? headerOffset : header_data.typ_idx_array[headerOffset]; str_length = strlen(header_data.typ_array[hdr_idx]); if (str_length > typ_len) str_length = typ_len; strncpy(hdr_typ_str, header_data.typ_array[hdr_idx], str_length); hdr_typ_str[str_length] = bad_data_char; // Read the corresponding header Station ID for this observation hdr_idx = use_arr_vars ? headerOffset : header_data.sid_idx_array[headerOffset]; str_length = strlen(header_data.sid_array[hdr_idx]); if (str_length > sid_len) str_length = sid_len; strncpy(hdr_sid_str, header_data.sid_array[hdr_idx], str_length); hdr_sid_str[str_length] = bad_data_char; // Read the corresponding valid time for this observation hdr_idx = use_arr_vars ? headerOffset : header_data.vld_idx_array[headerOffset]; str_length = strlen(header_data.vld_array[hdr_idx]); if (str_length > vld_len) str_length = vld_len; strncpy(hdr_vld_str, header_data.vld_array[hdr_idx], str_length); hdr_vld_str[str_length] = bad_data_char; // Check for wind components is_ugrd = ( use_var_id && var_name == ugrd_abbr_str ) || (!use_var_id && nint(obs_arr[1]) == ugrd_grib_code); is_vgrd = ( use_var_id && var_name == vgrd_abbr_str ) || (!use_var_id && nint(obs_arr[1]) == vgrd_grib_code); // If the current observation is UGRD, save it as the // previous. If vector winds are to be computed, UGRD // must be followed by VGRD if(vflag && is_ugrd) { for(j=0; j<4; j++) prev_obs_arr[j] = obs_arr[j]; } // If the current observation is VGRD and vector // winds are to be computed. Make sure that the // previous observation was UGRD with the same header // and at the same vertical level. if(vflag && is_vgrd) { if(!is_eq(obs_arr[0], prev_obs_arr[0]) || !is_eq(obs_arr[2], prev_obs_arr[2]) || !is_eq(obs_arr[3], prev_obs_arr[3])) { mlog << Error << "\nprocess_obs_file() -> " << "for observation index " << i_obs << ", when computing VL1L2 and/or VAL1L2 vector winds " << "each UGRD observation must be followed by a VGRD " << "observation with the same header and at the same " << "level.\n\n"; exit(1); } } // Convert string to a unixtime hdr_ut = timestring_to_unix(hdr_vld_str); int grib_code = obs_arr[1]; if (use_var_id && grib_code < var_names.n_elements()) { strcpy(var_name, var_names[grib_code]); obs_arr[1] = bad_data_int; } else { strcpy(var_name, ""); } // Check each conf_info.vx_pd object to see if this observation // should be added for(j=0; jname()); // Set the forecast level name shc.set_fcst_lev(conf_info.vx_opt[i].vx_pd.fcst_info->level_name()); // Store the observation variable name shc.set_obs_var(conf_info.vx_opt[i].vx_pd.obs_info->name()); // Set the observation level name shc.set_obs_lev(conf_info.vx_opt[i].vx_pd.obs_info->level_name()); // Set the forecast lead time shc.set_fcst_lead_sec(conf_info.vx_opt[i].vx_pd.fcst_dpa[0].lead()); // Set the forecast valid time shc.set_fcst_valid_beg(conf_info.vx_opt[i].vx_pd.fcst_dpa[0].valid()); shc.set_fcst_valid_end(conf_info.vx_opt[i].vx_pd.fcst_dpa[0].valid()); // Set the observation lead time shc.set_obs_lead_sec(0); // Set the observation valid time shc.set_obs_valid_beg(conf_info.vx_opt[i].vx_pd.beg_ut); shc.set_obs_valid_end(conf_info.vx_opt[i].vx_pd.end_ut); // Loop through the message types for(j=0; jmagic_str() << " versus " << conf_info.vx_opt[i].vx_pd.obs_info->magic_str() << ", for observation type " << pd_ptr->msg_typ << ", over region " << pd_ptr->mask_name << ", for interpolation method " << shc.get_interp_mthd() << "(" << shc.get_interp_pnts_str() << "), using " << pd_ptr->n_obs << " pairs.\n"; // Dump out detailed information about why observations were rejected mlog << Debug(3) << "Number of matched pairs = " << pd_ptr->n_obs << "\n" << "Observations processed = " << conf_info.vx_opt[i].vx_pd.n_try << "\n" << "Rejected: SID exclusion = " << conf_info.vx_opt[i].vx_pd.rej_sid_exc << "\n" << "Rejected: obs type = " << conf_info.vx_opt[i].vx_pd.rej_gc << "\n" << "Rejected: valid time = " << conf_info.vx_opt[i].vx_pd.rej_vld << "\n" << "Rejected: bad obs value = " << conf_info.vx_opt[i].vx_pd.rej_obs << "\n" << "Rejected: off the grid = " << conf_info.vx_opt[i].vx_pd.rej_grd << "\n" << "Rejected: level mismatch = " << conf_info.vx_opt[i].vx_pd.rej_lvl << "\n" << "Rejected: quality marker = " << conf_info.vx_opt[i].vx_pd.rej_qty << "\n" << "Rejected: message type = " << conf_info.vx_opt[i].vx_pd.rej_typ[j][k][l] << "\n" << "Rejected: masking region = " << conf_info.vx_opt[i].vx_pd.rej_mask[j][k][l] << "\n" << "Rejected: bad fcst value = " << conf_info.vx_opt[i].vx_pd.rej_fcst[j][k][l] << "\n" << "Rejected: duplicates = " << conf_info.vx_opt[i].vx_pd.rej_dup[j][k][l] << "\n"; // Continue for no matched pairs if(pd_ptr->n_obs == 0) continue; // Write out the MPR lines if(conf_info.vx_opt[i].output_flag[i_mpr] != STATOutputType_None) { write_mpr_row(shc, pd_ptr, conf_info.vx_opt[i].output_flag[i_mpr] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_mpr], i_txt_row[i_mpr]); // Reset the observation valid time shc.set_obs_valid_beg(conf_info.vx_opt[i].vx_pd.beg_ut); shc.set_obs_valid_end(conf_info.vx_opt[i].vx_pd.end_ut); } // Compute CTS scores if(!conf_info.vx_opt[i].vx_pd.fcst_info->is_prob() && conf_info.vx_opt[i].fcat_ta.n_elements() > 0 && (conf_info.vx_opt[i].output_flag[i_fho] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_ctc] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_cts] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_eclv] != STATOutputType_None)) { // Initialize for(m=0; mis_prob() && conf_info.vx_opt[i].fcat_ta.n_elements() > 1 && (conf_info.vx_opt[i].output_flag[i_mctc] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_mcts] != STATOutputType_None)) { // Initialize mcts_info.clear(); // Compute MCTS Info do_mcts(mcts_info, i, pd_ptr); // Write out MCTC if(conf_info.vx_opt[i].output_flag[i_mctc] != STATOutputType_None && mcts_info.cts.total() > 0) { write_mctc_row(shc, mcts_info, conf_info.vx_opt[i].output_flag[i_mctc] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_mctc], i_txt_row[i_mctc]); } // Write out MCTS if(conf_info.vx_opt[i].output_flag[i_mcts] != STATOutputType_None && mcts_info.cts.total() > 0) { write_mcts_row(shc, mcts_info, conf_info.vx_opt[i].output_flag[i_mcts] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_mcts], i_txt_row[i_mcts]); } } // end Compute MCTS scores // Compute CNT scores if(!conf_info.vx_opt[i].vx_pd.fcst_info->is_prob() && conf_info.vx_opt[i].output_flag[i_cnt] != STATOutputType_None) { // Initialize for(m=0; mis_prob() && (conf_info.vx_opt[i].output_flag[i_sl1l2] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_sal1l2] != STATOutputType_None)) { // Initialize for(m=0; m 0) { write_sl1l2_row(shc, sl1l2_info[m], conf_info.vx_opt[i].output_flag[i_sl1l2] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_sl1l2], i_txt_row[i_sl1l2]); } // Write out SAL1L2 if(conf_info.vx_opt[i].output_flag[i_sal1l2] != STATOutputType_None && sl1l2_info[m].sacount > 0) { write_sal1l2_row(shc, sl1l2_info[m], conf_info.vx_opt[i].output_flag[i_sal1l2] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_sal1l2], i_txt_row[i_sal1l2]); } } // end for m } // end Compute SL1L2 and SAL1L2 // Compute VL1L2 and VAL1L2 partial sums for UGRD,VGRD if(!conf_info.vx_opt[i].vx_pd.fcst_info->is_prob() && conf_info.vx_opt[i].vx_pd.fcst_info->is_v_wind() && conf_info.vx_opt[i].vx_pd.fcst_info->uv_index() >= 0 && (conf_info.vx_opt[i].output_flag[i_vl1l2] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_val1l2] != STATOutputType_None) ) { // Store the forecast variable name shc.set_fcst_var(ugrd_vgrd_abbr_str); // Store the observation variable name shc.set_obs_var(ugrd_vgrd_abbr_str); // Initialize for(m=0; muv_index(); // Check to make sure message types, masking regions, // and interpolation methods match if(conf_info.vx_opt[i].get_n_msg_typ() != conf_info.vx_opt[ui].get_n_msg_typ() || conf_info.vx_opt[i].get_n_mask() != conf_info.vx_opt[ui].get_n_mask() || conf_info.vx_opt[i].get_n_interp() != conf_info.vx_opt[ui].get_n_interp()) { mlog << Warning << "\nprocess_scores() -> " << "when computing VL1L2 and/or VAL1L2 vector " << "partial sums, the U and V components must " << "be processed using the same set of message " << "types, masking regions, and interpolation " << "methods. Failing to do so will cause " << "unexpected results!\n\n"; } // Compute VL1L2 and VAL1L2 do_vl1l2(vl1l2_info, i, &conf_info.vx_opt[ui].vx_pd.pd[j][k][l], &conf_info.vx_opt[i].vx_pd.pd[j][k][l]); // Loop through all of the wind speed thresholds for(m=0; m 0) { write_vl1l2_row(shc, vl1l2_info[m], conf_info.vx_opt[i].output_flag[i_vl1l2] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_vl1l2], i_txt_row[i_vl1l2]); } // Write out VAL1L2 if(conf_info.vx_opt[i].output_flag[i_val1l2] != STATOutputType_None && vl1l2_info[m].vacount > 0) { write_val1l2_row(shc, vl1l2_info[m], conf_info.vx_opt[i].output_flag[i_val1l2] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_val1l2], i_txt_row[i_val1l2]); } // Write out VCNT if(conf_info.vx_opt[i].output_flag[i_vcnt] != STATOutputType_None && vl1l2_info[m].vcount > 0) { write_vcnt_row(shc, vl1l2_info[m], conf_info.vx_opt[i].output_flag[i_vcnt] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_vcnt], i_txt_row[i_vcnt]); } } // end for m // Reset the forecast variable name shc.set_fcst_var(conf_info.vx_opt[i].vx_pd.fcst_info->name()); // Reset the observation variable name shc.set_obs_var(conf_info.vx_opt[i].vx_pd.obs_info->name()); } // end Compute VL1L2 and VAL1L2 // Compute PCT counts and scores if(conf_info.vx_opt[i].vx_pd.fcst_info->is_prob() && (conf_info.vx_opt[i].output_flag[i_pct] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_pstd] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_pjc] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_prc] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_eclv] != STATOutputType_None)) { // Initialize for(m=0; mcdf_na.n_valid() > 0 ? conf_info.vx_opt[i].get_n_cdf_bin() : 1); // Loop over the climo_cdf_bins for(m=0; m 1) cs << "_BIN" << m+1; shc.set_mask(cs); // Compute PCT do_pct(pct_info, i, pd_ptr, (n_cdf_bin > 1 ? m : bad_data_int)); // Loop through all of the thresholds for(n=0; nis_prob() && conf_info.vx_opt[i].hira_info.flag && conf_info.vx_opt[i].output_flag[i_ecnt] != STATOutputType_None) { pd_ptr = &conf_info.vx_opt[i].vx_pd.pd[j][k][0]; // Appy HiRA verification and write ensemble output do_hira_ens(i, pd_ptr); } // end HiRA for probabilities // Apply HiRA probabilistic verification logic if(!conf_info.vx_opt[i].vx_pd.fcst_info->is_prob() && conf_info.vx_opt[i].hira_info.flag && (conf_info.vx_opt[i].output_flag[i_mpr] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_pct] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_pstd] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_pjc] != STATOutputType_None || conf_info.vx_opt[i].output_flag[i_prc] != STATOutputType_None)) { pd_ptr = &conf_info.vx_opt[i].vx_pd.pd[j][k][0]; // Appy HiRA verification and write probabilistic output do_hira_prob(i, pd_ptr); } // end HiRA for probabilities } // end for k } // end for j mlog << Debug(2) << "\n" << sep_str << "\n\n"; } // end for i // Deallocate memory if(cts_info) { delete [] cts_info; cts_info = (CTSInfo *) 0; } if(cnt_info) { delete [] cnt_info; cnt_info = (CNTInfo *) 0; } if(sl1l2_info) { delete [] sl1l2_info; sl1l2_info = (SL1L2Info *) 0; } if(vl1l2_info) { delete [] vl1l2_info; vl1l2_info = (VL1L2Info *) 0; } if(pct_info) { delete [] pct_info; pct_info = (PCTInfo *) 0; } return; } //////////////////////////////////////////////////////////////////////// void do_cts(CTSInfo *&cts_info, int i_vx, PairDataPoint *pd_ptr) { int i, j, n_cat; mlog << Debug(2) << "Computing Categorical Statistics.\n"; // // Set up the CTSInfo thresholds and alpha values // n_cat = conf_info.vx_opt[i_vx].fcat_ta.n_elements(); for(i=0; if_na.n_elements() == 0 || pd_ptr->o_na.n_elements() == 0) return; // // Compute the stats, normal confidence intervals, and bootstrap // bootstrap confidence intervals // if(conf_info.vx_opt[i_vx].boot_info.interval == boot_bca_flag) { compute_cts_stats_ci_bca(rng_ptr, pd_ptr->f_na, pd_ptr->o_na, conf_info.vx_opt[i_vx].boot_info.n_rep, cts_info, n_cat, conf_info.vx_opt[i_vx].output_flag[i_cts] != STATOutputType_None, conf_info.vx_opt[i_vx].rank_corr_flag, conf_info.tmp_dir); } else { compute_cts_stats_ci_perc(rng_ptr, pd_ptr->f_na, pd_ptr->o_na, conf_info.vx_opt[i_vx].boot_info.n_rep, conf_info.vx_opt[i_vx].boot_info.rep_prop, cts_info, n_cat, conf_info.vx_opt[i_vx].output_flag[i_cts] != STATOutputType_None, conf_info.vx_opt[i_vx].rank_corr_flag, conf_info.tmp_dir); } return; } //////////////////////////////////////////////////////////////////////// void do_mcts(MCTSInfo &mcts_info, int i_vx, PairDataPoint *pd_ptr) { int i; mlog << Debug(2) << "Computing Multi-Category Statistics.\n"; // // Set up the MCTSInfo size, thresholds, and alpha values // mcts_info.cts.set_size(conf_info.vx_opt[i_vx].fcat_ta.n_elements() + 1); mcts_info.set_fthresh(conf_info.vx_opt[i_vx].fcat_ta); mcts_info.set_othresh(conf_info.vx_opt[i_vx].ocat_ta); mcts_info.allocate_n_alpha(conf_info.vx_opt[i_vx].get_n_ci_alpha()); for(i=0; if_na.n_elements() == 0 || pd_ptr->o_na.n_elements() == 0) return; // // Compute the stats, normal confidence intervals, and bootstrap // bootstrap confidence intervals // if(conf_info.vx_opt[i_vx].boot_info.interval == boot_bca_flag) { compute_mcts_stats_ci_bca(rng_ptr, pd_ptr->f_na, pd_ptr->o_na, conf_info.vx_opt[i_vx].boot_info.n_rep, mcts_info, conf_info.vx_opt[i_vx].output_flag[i_mcts] != STATOutputType_None, conf_info.vx_opt[i_vx].rank_corr_flag, conf_info.tmp_dir); } else { compute_mcts_stats_ci_perc(rng_ptr, pd_ptr->f_na, pd_ptr->o_na, conf_info.vx_opt[i_vx].boot_info.n_rep, conf_info.vx_opt[i_vx].boot_info.rep_prop, mcts_info, conf_info.vx_opt[i_vx].output_flag[i_mcts] != STATOutputType_None, conf_info.vx_opt[i_vx].rank_corr_flag, conf_info.tmp_dir); } return; } //////////////////////////////////////////////////////////////////////// void do_cnt(CNTInfo *&cnt_info, int i_vx, PairDataPoint *pd_ptr) { int i, j; PairDataPoint pd; mlog << Debug(2) << "Computing Continuous Statistics.\n"; // // Process each filtering threshold // for(i=0; iis_precipitation() && conf_info.vx_opt[i_vx].vx_pd.obs_info->is_precipitation()); if(conf_info.vx_opt[i_vx].boot_info.interval == boot_bca_flag) { compute_cnt_stats_ci_bca(rng_ptr, pd.f_na, pd.o_na, pd.cmn_na, pd.wgt_na, precip_flag, conf_info.vx_opt[i_vx].rank_corr_flag, conf_info.vx_opt[i_vx].boot_info.n_rep, cnt_info[i], conf_info.tmp_dir); } else { compute_cnt_stats_ci_perc(rng_ptr, pd.f_na, pd.o_na, pd.cmn_na, pd.wgt_na, precip_flag, conf_info.vx_opt[i_vx].rank_corr_flag, conf_info.vx_opt[i_vx].boot_info.n_rep, conf_info.vx_opt[i_vx].boot_info.rep_prop, cnt_info[i], conf_info.tmp_dir); } } // end for i return; } //////////////////////////////////////////////////////////////////////// void do_sl1l2(SL1L2Info *&s_info, int i_vx, PairDataPoint *pd_ptr) { int i; mlog << Debug(2) << "Computing Scalar Partial Sums.\n"; // // Process each filtering threshold // for(i=0; if_na, pd_ptr->o_na, pd_ptr->cmn_na, pd_ptr->wgt_na); } // end for i return; } //////////////////////////////////////////////////////////////////////// void do_vl1l2(VL1L2Info *&v_info, int i_vx, PairDataPoint *ugrd_pd_ptr, PairDataPoint *vgrd_pd_ptr) { int i; // // Check that the number of pairs are the same // if(ugrd_pd_ptr->n_obs != vgrd_pd_ptr->n_obs) { mlog << Error << "\ndo_vl1l2() -> " << "unequal number of UGRD and VGRD pairs (" << ugrd_pd_ptr->n_obs << " != " << vgrd_pd_ptr->n_obs << ")\n\n"; exit(1); } // // Set all of the VL1L2Info objects // for(i=0; if_na, vgrd_pd_ptr->f_na, ugrd_pd_ptr->o_na, vgrd_pd_ptr->o_na, ugrd_pd_ptr->cmn_na, vgrd_pd_ptr->cmn_na, ugrd_pd_ptr->wgt_na); } // end for i return; } //////////////////////////////////////////////////////////////////////// void do_pct(PCTInfo *&pct_info, int i_vx, PairDataPoint *pd_ptr, int i_bin) { int i, j, n_pct; PairDataPoint pd_bin; NumArray climo_prob; // No binned climatology if(is_bad_data(i_bin)) { mlog << Debug(2) << "Computing Probabilistic Statistics.\n"; } // Binned climatology else { mlog << Debug(2) << "Computing Probabilistic Statistics " << "for climo CDF bin number " << i_bin+1 << " of " << conf_info.vx_opt[i_vx].get_n_cdf_bin() << " (" << conf_info.vx_opt[i_vx].climo_cdf_ta[i_bin].get_str() << ").\n"; // Subset the matched pairs for the current bin pd_bin = subset_climo_cdf_bin(*pd_ptr, conf_info.vx_opt[i_vx].climo_cdf_ta, i_bin); pd_ptr = &pd_bin; } // // If there are no matched pairs to process, return // if(pd_ptr->f_na.n_elements() == 0 || pd_ptr->o_na.n_elements() == 0) return; // // Set up the PCTInfo thresholds and alpha values // n_pct = conf_info.vx_opt[i_vx].ocat_ta.n_elements(); for(i=0; icmn_na, pd_ptr->csd_na, conf_info.vx_opt[i_vx].ocat_ta[i]); // // Compute the probabilistic counts and statistics // compute_pctinfo(pd_ptr->f_na, pd_ptr->o_na, climo_prob, conf_info.vx_opt[i_vx].output_flag[i_pstd], pct_info[i]); } // end for i return; } //////////////////////////////////////////////////////////////////////// void do_hira_ens(int i_vx, PairDataPoint *pd_ptr) { PairDataEnsemble hira_pd; int i, j, k, lvl_blw, lvl_abv; NumArray f_ens; // Set flag for specific humidity bool spfh_flag = conf_info.vx_opt[i_vx].vx_pd.fcst_info->is_specific_humidity() && conf_info.vx_opt[i_vx].vx_pd.obs_info->is_specific_humidity(); shc.set_interp_mthd(InterpMthd_Nbrhd, conf_info.vx_opt[i_vx].hira_info.shape); // Loop over the HiRA widths for(i=0; in_obs); hira_pd.set_ens_size(gt->size()); f_ens.extend(gt->size()); // Process each observation point for(j=0; jn_obs; j++) { // Determine the forecast level values find_vert_lvl(conf_info.vx_opt[i_vx].vx_pd.fcst_dpa, pd_ptr->lvl_na[j], lvl_blw, lvl_abv); // Get the nearby forecast values get_interp_points(conf_info.vx_opt[i_vx].vx_pd.fcst_dpa, pd_ptr->x_na[j], pd_ptr->y_na[j], InterpMthd_Nbrhd, conf_info.vx_opt[i_vx].hira_info.width[i], conf_info.vx_opt[i_vx].hira_info.shape, conf_info.vx_opt[i_vx].hira_info.vld_thresh, spfh_flag, conf_info.vx_opt[i_vx].vx_pd.fcst_info->level().type(), pd_ptr->lvl_na[j], lvl_blw, lvl_abv, f_ens); // Check for values if(f_ens.n() == 0) continue; // Store the observation value hira_pd.add_obs(pd_ptr->sid_sa[j], pd_ptr->lat_na[j], pd_ptr->lon_na[j], pd_ptr->x_na[j], pd_ptr->y_na[j], pd_ptr->vld_ta[j], pd_ptr->lvl_na[j], pd_ptr->elv_na[j], pd_ptr->o_na[j], pd_ptr->o_qc_sa[j], pd_ptr->cmn_na[j], pd_ptr->csd_na[j], pd_ptr->wgt_na[j]); // Store the ensemble mean and member values hira_pd.mn_na.add(f_ens.mean()); for(k=0; kmagic_str() << " versus " << conf_info.vx_opt[i_vx].vx_pd.obs_info->magic_str() << ", for observation type " << pd_ptr->msg_typ << ", over region " << pd_ptr->mask_name << ", for interpolation method HiRA Ensemble NBRHD(" << shc.get_interp_pnts_str() << "), using " << hira_pd.n_obs << " pairs.\n"; // Check for zero matched pairs if(hira_pd.o_na.n_elements() == 0) continue; // Compute ensemble statistics hira_pd.compute_pair_vals(rng_ptr); hira_pd.compute_stats(); // Write out the ECNT line if(conf_info.vx_opt[i_vx].output_flag[i_ecnt] != STATOutputType_None) { write_ecnt_row(shc, &hira_pd, conf_info.vx_opt[i_vx].output_flag[i_ecnt] == STATOutputType_Both, stat_at, i_stat_row, txt_at[i_ecnt], i_txt_row[i_ecnt]); } } // end for i return; } //////////////////////////////////////////////////////////////////////// void do_hira_prob(int i_vx, PairDataPoint *pd_ptr) { PairDataPoint hira_pd; int i, j, k, lvl_blw, lvl_abv; double f_cov, cmn_cov; SingleThresh cat_thresh; PCTInfo pct_info; // Set flag for specific humidity bool spfh_flag = conf_info.vx_opt[i_vx].vx_pd.fcst_info->is_specific_humidity() && conf_info.vx_opt[i_vx].vx_pd.obs_info->is_specific_humidity(); shc.set_interp_mthd(InterpMthd_Nbrhd, conf_info.vx_opt[i_vx].hira_info.shape); // Loop over categorical thresholds and HiRA widths for(i=0; in_obs; k++) { // Compute the fractional coverage forecast value using the // observation level value find_vert_lvl(conf_info.vx_opt[i_vx].vx_pd.fcst_dpa, pd_ptr->lvl_na[k], lvl_blw, lvl_abv); f_cov = compute_interp(conf_info.vx_opt[i_vx].vx_pd.fcst_dpa, pd_ptr->x_na[k], pd_ptr->y_na[k], pd_ptr->o_na[k], InterpMthd_Nbrhd, conf_info.vx_opt[i_vx].hira_info.width[j], conf_info.vx_opt[i_vx].hira_info.shape, conf_info.vx_opt[i_vx].hira_info.vld_thresh, spfh_flag, conf_info.vx_opt[i_vx].vx_pd.fcst_info->level().type(), pd_ptr->lvl_na[k], lvl_blw, lvl_abv, &cat_thresh); // Check for bad data if(is_bad_data(f_cov)) continue; // Compute the fractional coverage climatological mean value // using the observation level value find_vert_lvl(conf_info.vx_opt[i_vx].vx_pd.climo_mn_dpa, pd_ptr->lvl_na[k], lvl_blw, lvl_abv); cmn_cov = compute_interp(conf_info.vx_opt[i_vx].vx_pd.climo_mn_dpa, pd_ptr->x_na[k], pd_ptr->y_na[k], pd_ptr->o_na[k], InterpMthd_Nbrhd, conf_info.vx_opt[i_vx].hira_info.width[j], conf_info.vx_opt[i_vx].hira_info.shape, conf_info.vx_opt[i_vx].hira_info.vld_thresh, spfh_flag, conf_info.vx_opt[i_vx].vx_pd.fcst_info->level().type(), pd_ptr->lvl_na[k], lvl_blw, lvl_abv, &cat_thresh); // Store the fractional coverage pair hira_pd.add_pair(pd_ptr->sid_sa[k], pd_ptr->lat_na[k], pd_ptr->lon_na[k], pd_ptr->x_na[k], pd_ptr->y_na[k], pd_ptr->vld_ta[k], pd_ptr->lvl_na[k], pd_ptr->elv_na[k], f_cov, pd_ptr->o_na[k], pd_ptr->o_qc_sa[k], cmn_cov, pd_ptr->csd_na[k], pd_ptr->wgt_na[k]); } // end for k mlog << Debug(2) << "Processing " << conf_info.vx_opt[i_vx].vx_pd.fcst_info->magic_str() << conf_info.vx_opt[i_vx].fcat_ta[i].get_str() << " versus " << conf_info.vx_opt[i_vx].vx_pd.obs_info->magic_str() << conf_info.vx_opt[i_vx].ocat_ta[i].get_str() << ", for observation type " << pd_ptr->msg_typ << ", over region " << pd_ptr->mask_name << ", for interpolation method HiRA Probability NBRHD(" << shc.get_interp_pnts_str() << "), using " << hira_pd.n_obs << " pairs.\n"; // Check for zero matched pairs if(hira_pd.f_na.n_elements() == 0 || hira_pd.o_na.n_elements() == 0) continue; // Set up the PCTInfo thresholds and alpha values pct_info.fthresh = conf_info.vx_opt[i_vx].hira_info.cov_ta; pct_info.othresh = conf_info.vx_opt[i_vx].ocat_ta[i]; pct_info.allocate_n_alpha(conf_info.vx_opt[i_vx].get_n_ci_alpha()); for(k=0; k