Skip to content

Commit

Permalink
Merge pull request #1308 from veg/develop
Browse files Browse the repository at this point in the history
v2.5.30rc
  • Loading branch information
spond authored Mar 24, 2021
2 parents 4f87ea2 + d604af0 commit 6db6c58
Show file tree
Hide file tree
Showing 12 changed files with 99 additions and 40 deletions.
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ project(HyPhy)
cmake_policy(VERSION 3.0.0)

enable_testing()
#enable_language(Fortran)

set(CMAKE_CONFIGURATION_TYPES Release)

Expand Down
4 changes: 3 additions & 1 deletion res/TemplateBatchFiles/GARD.bf
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,8 @@ Will resume search from the end of the previous run.
}
}



gard.defaultFitFilePath = (gard.alignment[terms.data.file] + '.best-gard');
KeywordArgument ("output-lf", "Write the best fitting HyPhy analysis snapshot to (default is to save to the same path as the alignment file + 'best-gard')", gard.defaultFitFilePath);
gard.lfFileLocation = io.PromptUserForFilePath ("Save the HyPhy analysis snapshot to");
Expand Down Expand Up @@ -428,7 +430,7 @@ namespace gard {

bestOverallModelSoFar = {1, startWithBP};
for (i, v; in; json["breakpointData"]) {
if (i < startWithBP) {
if (+i < startWithBP) {
bestOverallModelSoFar[+i] = +(v['bps'])[1];
}
}
Expand Down
16 changes: 13 additions & 3 deletions res/TemplateBatchFiles/SelectionAnalyses/FitMultiModel.bf
Original file line number Diff line number Diff line change
Expand Up @@ -451,9 +451,17 @@ if (fitter.do_islands) {

if (utility.Array1D (fitter.callout)) {
// reconstruct ancestors here


fitter.site2partition = {};
fitter.ancestral_caches = {};
for (index, partition; in; fitter.filter_specification) {
for (site_index, site; in; partition[terms.data.coverage]) {
fitter.site2partition [site] = {{index__,site_index__}};
}
fitter.ancestral_caches [index] = ancestral.build (fitter.three_hit_results[terms.likelihood_function], +index, None);
}
fitter.site_reports = {};
fitter.ancestral_cache = ancestral.build (fitter.three_hit_results[terms.likelihood_function], 0, None);


io.ReportProgressMessageMD ("fitter", "sites", "" + utility.Array1D (fitter.callout) + " individual " + io.SingularOrPlural (utility.Array1D (fitter.callout) , "site", "sites") + " which showed sufficiently strong preference for multiple-hit models");
fitter.table_output_options = {
Expand Down Expand Up @@ -489,8 +497,10 @@ if (utility.Array1D (fitter.callout)) {

fitter.table_output_options[utility.getGlobalValue("terms.table_options.header")] = FALSE;
utility.ForEachPair (fitter.callout, "_site_", "_value_", "

_site_map_ = fitter.site2partition [_site_];

fitter.site_reports [_site_] = (ancestral.ComputeSubstitutionBySite (fitter.ancestral_cache, +_site_, None))[terms.substitutions];
fitter.site_reports [_site_] = (ancestral.ComputeSubstitutionBySite ((fitter.ancestral_caches[_site_map_[0]]), +(_site_map_[1]), None))[terms.substitutions];

if (fitter.do_islands) {
fprintf(stdout, io.FormatTableRow(
Expand Down
26 changes: 14 additions & 12 deletions res/TemplateBatchFiles/SelectionAnalyses/MEME.bf
Original file line number Diff line number Diff line change
Expand Up @@ -511,6 +511,8 @@ function meme.apply_proportional_site_constraint.fel (tree_name, node_name, alph
node_name = tree_name + "." + node_name;

ExecuteCommands ("
`node_name`.`alpha_parameter` :< 1e10;
`node_name`.`beta_parameter` :< 1e10;
`node_name`.`alpha_parameter` := (`alpha_factor`) * meme.branch_length__;
`node_name`.`beta_parameter` := (`beta_factor`) * meme.branch_length__;
");
Expand All @@ -524,6 +526,9 @@ function meme.apply_proportional_site_constraint.bsrel (tree_name, node_name, al
node_name = tree_name + "." + node_name;

ExecuteCommands ("
`node_name`.`alpha_parameter` :< 1e10;
`node_name`.`beta_parameter2` :< 1e10;
`node_name`.`beta_parameter` :< 1e10;
`node_name`.`alpha_parameter` := (`alpha_factor`) * meme.branch_length__;
`node_name`.`beta_parameter` := (`omega_factor`) * `node_name`.`alpha_parameter`;
`node_name`.`beta_parameter2` := (`beta_factor`) * meme.branch_length__;
Expand Down Expand Up @@ -734,10 +739,16 @@ lfunction meme.handle_a_site (lf_fel, lf_bsrel, filter_data, partition_index, pa

if (^"meme.site_beta_plus" > ^"meme.site_alpha" && ^"meme.site_mixture_weight" < 0.999999) {

/*
if (^"meme.site_tree_bsrel.aipysurusLaevis.alpha" > 10000) {
Export (lfe, ^lf_bsrel);
console.log (lfe);
}
*/

LFCompute (^lf_bsrel,LF_START_COMPUTE);
LFCompute (^lf_bsrel,baseline);




for (_node_name_; in; ^bsrel_tree_id) {
if (((^"meme.selected_branches") [partition_index])[_node_name_] == utility.getGlobalValue("terms.tree_attributes.test")) {
Expand All @@ -747,16 +758,7 @@ lfunction meme.handle_a_site (lf_fel, lf_bsrel, filter_data, partition_index, pa
}
}

/*utility.ForEach (^bsrel_tree_id, "_node_name_",
'
if ((meme.selected_branches [^"`&partition_index`"])[_node_name_] == utility.getGlobalValue("terms.tree_attributes.test")) {
_node_name_res_ = meme.compute_branch_EBF (^"`&lf_bsrel`", ^"`&bsrel_tree_id`", _node_name_, ^"`&baseline`");
(^"`&branch_ebf`")[_node_name_] = _node_name_res_[utility.getGlobalValue("terms.empirical_bayes_factor")];
(^"`&branch_posterior`")[_node_name_] = _node_name_res_[utility.getGlobalValue("terms.posterior")];
}
'
);*/


LFCompute (^lf_bsrel,LF_DONE_COMPUTE);

^"meme.site_beta_plus" := ^"meme.site_alpha";
Expand Down
5 changes: 4 additions & 1 deletion res/TemplateBatchFiles/libv3/models/codon/MG_REV.bf
Original file line number Diff line number Diff line change
Expand Up @@ -142,10 +142,12 @@ function models.codon.MG_REV.set_branch_length(model, value, parameter) {
if ( null != models.codon.MG_REV.set_branch_length.beta && null != models.codon.MG_REV.set_branch_length.alpha) {



models.codon.MG_REV.set_branch_length.alpha.p = parameter + "." + models.codon.MG_REV.set_branch_length.alpha;
models.codon.MG_REV.set_branch_length.beta.p = parameter + "." + models.codon.MG_REV.set_branch_length.beta;

if (Type(value) == "AssociativeList") {

if (value[terms.model.branch_length_scaler] == terms.model.branch_length_constrain) {
if (parameters.IsIndependent(models.codon.MG_REV.set_branch_length.alpha.p)) {
if (Abs(model[terms.parameters.synonymous_rate]) == 0) {
Expand Down Expand Up @@ -173,11 +175,12 @@ function models.codon.MG_REV.set_branch_length(model, value, parameter) {
}
} else {
models.codon.MG_REV.set_branch_length.lp = 0;



if (parameters.IsIndependent(models.codon.MG_REV.set_branch_length.alpha.p)) {
Eval(models.codon.MG_REV.set_branch_length.alpha.p + ":=(" + value[terms.model.branch_length_scaler] + ")*" + value[terms.branch_length]);
models.codon.MG_REV.set_branch_length.lp += 1;

}

if (parameters.IsIndependent(models.codon.MG_REV.set_branch_length.beta.p)) {
Expand Down
1 change: 1 addition & 0 deletions res/TemplateBatchFiles/libv3/tasks/alignments.bf
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ lfunction alignments.ReadNucleotideDataSet_aux(dataset_name) {
partitions = Transpose(dfpm);
}
}


return {
utility.getGlobalValue("terms.data.sequences"): Eval("`dataset_name`.species"),
Expand Down
13 changes: 1 addition & 12 deletions res/TemplateBatchFiles/libv3/tasks/estimators.bf
Original file line number Diff line number Diff line change
Expand Up @@ -772,17 +772,6 @@ lfunction estimators.FitLF(data_filter, tree, model_map, initial_values, model_o
can_do_restarts = null;




//Export (lfe, likelihoodFunction);
//console.log (lfe);
//GetString (lfe, likelihoodFunction, -1);
//console.log (lfe);
//fprintf ("/Users/sergei/Desktop/busted.txt", CLEAR_FILE, lfe);
//utility.ToggleEnvVariable("VERBOSITY_LEVEL", 10);




if (utility.Has (run_options, utility.getGlobalValue("terms.search_grid"),"AssociativeList")) {
grid_results = mpi.ComputeOnGrid (&likelihoodFunction, run_options [utility.getGlobalValue("terms.search_grid")], "mpi.ComputeOnGrid.SimpleEvaluator", "mpi.ComputeOnGrid.ResultHandler");
Expand Down Expand Up @@ -1176,7 +1165,7 @@ lfunction estimators.FitCodonModel(codon_data, tree, generator, genetic_code, op
component_tree = lf_components[2 * i + 1];
ClearConstraints( * component_tree);
branch_map = (option[utility.getGlobalValue("terms.run_options.partitioned_omega")])[i];

component_branches = BranchName( * component_tree, -1);
for (j = 0; j < Columns(component_branches) - 1; j += 1) {
/**
Expand Down
14 changes: 14 additions & 0 deletions src/core/dataset_filter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -870,6 +870,20 @@ _List * _DataSetFilter::ComputePatternToSiteMap (void) const {
return result;
}

//_______________________________________________________________________

_SimpleList const _DataSetFilter::SitesForPattern (unsigned long pattern_id) const {
_SimpleList result;

for (unsigned long s = 0UL; s < duplicateMap.lLength; s++) {
if (duplicateMap.get(s) == pattern_id) {
result << s;
}
}

return result;
}



//_______________________________________________________________________
Expand Down
14 changes: 12 additions & 2 deletions src/core/global_things.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ namespace hy_global {
kErrorStringDatasetRefIndexError ("Dataset index reference out of range"),
kErrorStringMatrixExportError ("Export matrix called with a non-polynomial matrix argument"),
kErrorStringNullOperand ("Attempting to operate on an undefined value; this is probably the result of an earlier 'soft' error condition"),
kHyPhyVersion = _String ("2.5.29"),
kHyPhyVersion = _String ("2.5.30"),

kNoneToken = "None",
kNullToken = "null",
Expand Down Expand Up @@ -672,10 +672,20 @@ namespace hy_global {

char str[] = "\n";
fwrite (str, 1, 1, hy_message_log_file);
fwrite (message.get_str(), 1, message.length(), hy_message_log_file);
fwrite (message.get_str(), 1, message.length(), hy_message_log_file);
fflush (hy_message_log_file);
#endif
}

//____________________________________________________________________________________
void ReportWarningConsole (_String const message) {
if (has_terminal_stdout) {
NLToConsole();
BufferToConsole("***WARNING***\n");
StringToConsole(message);
NLToConsole();
}
}

//____________________________________________________________________________________
void HandleOrStoreApplicationError (_String* error_string, _String const & message) {
Expand Down
6 changes: 6 additions & 0 deletions src/core/include/dataset_filter.h
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,12 @@ class _DataSetFilter : public BaseObj {
* have the same pattern in the original alignment
*/

_SimpleList const SitesForPattern(unsigned long) const;

/**
* return a list of sites matching a given pattern
*/

template<typename SOURCE_TYPE, typename TARGET_TYPE> void PatternToSiteMapper(SOURCE_TYPE const* source, TARGET_TYPE * target, long padup, TARGET_TYPE filler) const {


Expand Down
8 changes: 8 additions & 0 deletions src/core/include/global_things.h
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,14 @@ namespace hy_global {
*/
void ReportWarning (_String const & message);

//_______________________________________________________________________

/** Push waring message to console (high impact warnings)
@param message the diagnostic message to report
*/
void ReportWarningConsole (_String const message);

//_______________________________________________________________________

/**
Expand Down
31 changes: 22 additions & 9 deletions src/core/likefunc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1920,6 +1920,7 @@ bool _LikelihoodFunction::PreCompute (void)
hyFloat tp = cornholio->Compute()->Value();
if (!cornholio->IsValueInBounds(tp)){
ReportWarning (_String ("Failing bound checks on ") & *cornholio->GetName() & " = " & _String (tp, "%25.16g"));
//break;
}
}

Expand Down Expand Up @@ -3675,8 +3676,14 @@ void _LikelihoodFunction::SetupLFCaches (void) {
if (translation < 0L) {
translation = theFilter->Translate2Frequencies (aState, translationCache, true);
if (translation < 0L) {
hyFloat total_weight = 0.;
for (unsigned long j = 0UL; j < stateSpaceDim; j++) {
ambigs->Store(translationCache[j]);
total_weight += translationCache[j];
}
if (CheckEqual(0., total_weight)) {
_SimpleList pattern_sites = theFilter->SitesForPattern(siteID);
HandleApplicationError(aState.Enquote() & " found at sites " & _String ((_String*)pattern_sites.toStr()) & " of sequence " & theFilter->GetSequenceName(leafID)->Enquote() & " maps to a null probability vector; this will result in an uncomputable likelihood function");
}
translation = -ambig_resolution_count++;
}
Expand Down Expand Up @@ -5611,11 +5618,16 @@ long _LikelihoodFunction::Bracket (long index, hyFloat& left, hyFloat& middle
(or involves a paremeter that has very little effect on the LF), recomputation could be within numerical error
**/
if (rightValue - middleValue > 1e-12*errorTolerance || leftValue - middleValue > 1e-12*errorTolerance) {
if (rightValue - middleValue > 1e-10*errorTolerance || leftValue - middleValue > 1e-10*errorTolerance) {
char buf[256], buf2[512];
snprintf (buf, 256, " \n\tERROR: [_LikelihoodFunction::Bracket (index %ld) recomputed the value to midpoint: L(%g) = %g [@%g -> %g:@%g -> %g]]", index, middle, middleValue, left, leftValue,right, rightValue);
snprintf (buf2, 512, "\n\t[_LikelihoodFunction::Bracket (index %ld) BRACKET %s: %20.16g <= %20.16g >= %20.16g. steps, L=%g, R=%g, values %15.12g : %15.12g - %15.12g]", index, successful ? "SUCCESSFUL" : "FAILED", left,middle,right, leftStep, rightStep, leftValue - middleValue, middleValue, rightValue - middleValue);
_TerminateAndDump (_String (buf) & "\n" & buf2 & "\nParameter name " & (index >= 0 ? *GetIthIndependentName(index) : "line optimization"));
if (hy_env::EnvVariableTrue(hy_env::tolerate_numerical_errors)) {
ReportWarningConsole (_String (buf) & "\n" & buf2 & "\nParameter name " & (index >= 0 ? *GetIthIndependentName(index) : "line optimization"));
} else {
_TerminateAndDump (_String (buf) & "\n" & buf2 & "\nParameter name " & (index >= 0 ? *GetIthIndependentName(index) : "line optimization"));
}

}
successful = rightValue<=middleValue && leftValue<=middleValue;
}
Expand Down Expand Up @@ -7062,10 +7074,14 @@ void _LikelihoodFunction::LocateTheBump (long index,hyFloat gPrecision, hyFlo
snprintf (buf, 256, "\n\t[_LikelihoodFunction::LocateTheBump (index %ld) RESETTING THE VALUE (worse log likelihood obtained; current value %20.16g, best value %20.16g) ]\n\n", index, GetIthIndependent(index), bestVal);
BufferToConsole (buf);
}
if (CheckEqual(GetIthIndependent(index), bestVal) && fabs (middleValue-maxSoFar) > 1e-12*errorTolerance) {
if (CheckEqual(GetIthIndependent(index), bestVal) && fabs (middleValue-maxSoFar) > 1e-10*errorTolerance) {
char buf[256];
snprintf (buf, 256, " \n\tERROR: [_LikelihoodFunction::LocateTheBump (index %ld) current value %20.16g (parameter = %20.16g), best value %20.16g (parameter = %20.16g)); delta = %20.16g, tolerance = %20.16g]\n\n", index, middleValue, GetIthIndependent(index), maxSoFar, bestVal, maxSoFar - middleValue, 1e-12*errorTolerance);
_TerminateAndDump (_String (buf) & "\n" & "\nParameter name " & *GetIthIndependentName(index));
snprintf (buf, 256, " \n\tERROR: [_LikelihoodFunction::LocateTheBump (index %ld) current value %20.16g (parameter = %20.16g), best value %20.16g (parameter = %20.16g)); delta = %20.16g, tolerance = %20.16g]\n\n", index, middleValue, GetIthIndependent(index), maxSoFar, bestVal, maxSoFar - middleValue, 1e-10*errorTolerance);
if (hy_env::EnvVariableTrue(hy_env::tolerate_numerical_errors)) {
ReportWarningConsole(buf);
} else {
_TerminateAndDump (_String (buf) & "\n" & "\nParameter name " & *GetIthIndependentName(index));
}
}
SetIthIndependent(index,bestVal);
} else {
Expand Down Expand Up @@ -8762,10 +8778,7 @@ hyFloat _LikelihoodFunction::ComputeBlock (long index, hyFloat* siteRes, long c


if (hy_env::EnvVariableTrue(hy_env::tolerate_numerical_errors)) {
NLToConsole();
BufferToConsole("***WARNING***\n");
StringToConsole(err_msg);
NLToConsole();
ReportWarningConsole (err_msg);
} else {
_TerminateAndDump (err_msg);
return -INFINITY;
Expand Down

1 comment on commit 6db6c58

@kjlevitz
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark 'Benchmark.js Benchmark'.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 2.

Benchmark suite Current: 6db6c58 Previous: d604af0 Ratio
BGM.wbf Infinity secs/op (±0.000000%) null secs/op (±0.000000%) Infinity
ABSREL-MH.wbf Infinity secs/op (±0.000000%) null secs/op (±0.000000%) Infinity

This comment was automatically generated by workflow using github-action-benchmark.

CC: @klevitz

Please sign in to comment.