scoring_metrics.py Functions¶
-
scoring_metrics.
document_level_annot_comparison_runner
(reference_filename, confusion_matrix, score_card, reference_annot, test_entries, fuzzy_flag, scorable_attributes)[source]¶
-
scoring_metrics.
end_comparison_runner
(reference_filename, confusion_matrix, score_card, reference_annot, test_entries, start_key, end_key, fuzzy_flag, scorable_attributes, scorable_engines, norm_synonyms)[source]¶
-
scoring_metrics.
evaluate_doc_properties
(reference_filename, confusion_matrix, score_card, reference_ss, test_ss, patterns, fuzzy_flag='doc-property', scorable_attributes=[], scorable_engines=[], norm_synonyms={})[source]¶
-
scoring_metrics.
evaluate_positions
(reference_filename, confusion_matrix, score_card, reference_ss, test_ss, fuzzy_flag='exact', use_mapped_chars=False, scorable_attributes=[], scorable_engines=[], norm_synonyms={})[source]¶
-
scoring_metrics.
exact_comparison_runner
(reference_filename, confusion_matrix, score_card, reference_annot, test_entries, start_key, end_key, fuzzy_flag, scorable_attributes, scorable_engines, norm_synonyms)[source]¶
-
scoring_metrics.
fully_contained_comparison_runner
(reference_filename, confusion_matrix, score_card, reference_annot, test_entries, start_key, end_key, fuzzy_flag, scorable_attributes, scorable_engines, norm_synonyms)[source]¶
-
scoring_metrics.
output_metrics
(class_data, fuzzy_flag, metrics, delimiter_prefix, delimiter, stdout_flag, csv_out_filename, pretty_print_flag)[source]¶
-
scoring_metrics.
partial_comparison_runner
(reference_filename, confusion_matrix, score_card, reference_annot, test_entries, start_key, end_key, fuzzy_flag, scorable_attributes, scorable_engines, norm_synonyms)[source]¶
-
scoring_metrics.
print_confusion_matrix
(confusion_matrix, file_mapping, reference_config, test_config, fuzzy_flag, args)[source]¶
-
scoring_metrics.
print_confusion_matrix_shell
(confusion_matrix, file_mapping, reference_patterns, test_patterns, args)[source]¶
-
scoring_metrics.
print_counts_summary
(score_card, file_list, config_patterns, args, set_type)[source]¶
-
scoring_metrics.
print_score_summary
(score_card, file_mapping, reference_config, test_config, fuzzy_flag, args, norm_engine='')[source]¶
-
scoring_metrics.
print_score_summary_shell
(score_card, file_mapping, reference_config, test_config, args)[source]¶
-
scoring_metrics.
reference_annot_comparison_runner
(reference_filename, confusion_matrix, score_card, reference_annot, test_entries, start_key, end_key, fuzzy_flag, scorable_attributes, scorable_engines, norm_synonyms)[source]¶
-
scoring_metrics.
start_comparison_runner
(reference_filename, confusion_matrix, score_card, reference_annot, test_entries, start_key, end_key, fuzzy_flag, scorable_attributes, scorable_engines, norm_synonyms)[source]¶