Skip to content
Snippets Groups Projects
Select Git revision
  • c3490bfa8a90afe86b4695be73796f993100eb74
  • master default protected
  • calo-pid-45-135-deg
  • beamline_training
  • beamlie_training_CI
  • pr/nhcal
  • npsim
  • code.jlab.org
  • pr/rerun_fix
  • pr/minor_fixes
  • temp/df
  • Simple-Shyam-patch-1
  • interruptible-resource-groups
  • femc-collect-always
  • add_lowq2_benchmarks
  • pr/insert_neutron_flaky
  • Low-Q2_onnx_training
  • Low-Q2_training
  • matplotlibrc
  • pr/normalized_output_branch_sizes
  • snakemake-batch
  • v0.0.1
22 results

benchmark.h

Blame
  • benchmark.h 4.28 KiB
    #ifndef BENCHMARK_H
    #define BENCHMARK_H
    
    #include "exception.h"
    #include <fmt/core.h>
    #include <fstream>
    #include <iomanip>
    #include <iostream>
    #include <nlohmann/json.hpp>
    #include <string>
    
    // Bookkeeping of test data to store data of one or more tests in a json file to
    // facilitate future accounting.
    //
    // Usage Example 1 (single test):
    // ==============================
    // 1. define our test
    //      eic::util::Test test1{
    //        {{"name", "example_test"},
    //        {"title", "Example Test"},
    //        {"description", "This is an example of a test definition"},
    //        {"quantity", "efficiency"},
    //        {"target", "1"}}};
    // 2. set pass/fail/error status and return value (in this case .99)
    //      test1.pass(0.99)
    // 3. write our test data to a json file
    //      eic::util::write_test(test1, "test1.json");
    //
    // Usage Example 2 (multiple tests):
    // =================================
    // 1. define our tests
    //      eic::util::Test test1{
    //        {{"name", "example_test"},
    //        {"title", "Example Test"},
    //        {"description", "This is an example of a test definition"},
    //        {"quantity", "efficiency"},
    //        {"target", "1"}}};
    //      eic::util::Test test2{
    //        {{"name", "another_test"},
    //        {"title", "Another example Test"},
    //        {"description", "This is a second example of a test definition"},
    //        {"quantity", "resolution"},
    //        {"target", "3."}}};
    // 2. set pass/fail/error status and return value (in this case .99)
    //      test1.fail(10)
    // 3. write our test data to a json file
    //      eic::util::write_test({test1, test2}, "test.json");
    
    // Namespace for utility scripts, FIXME this should be part of an independent
    // library
    namespace eic::util {
    
      struct TestDefinitionError : Exception {
        TestDefinitionError(std::string_view msg) : Exception(msg, "test_definition_error") {}
      };
    
      // Wrapper for our test data json, with three methods to set the status
      // after test completion (to pass, fail or error). The default value
      // is error.
      // The following fields should be defined in the definitions json
      // for the test to make sense:
      //  - name: unique identifier for this test
      //  - title: Slightly more verbose identifier for this test
      //  - description: Concise description of what is tested
      //  - quantity: What quantity is tested? Unites of value/target
      //  - target: Target value of <quantity> that we want to reach
      //  - value: Actual value of <quantity>
      //  - weight: Weight for this test (this is defaulted to 1.0 if not specified)
      //  - result: pass/fail/error
      struct Test {