Return File Library to /lib as it is needed by Local Discretization (factorize)

This commit is contained in:
2024-04-30 20:31:14 +02:00
parent 7aeffba740
commit 618a1e539c
148 changed files with 1804 additions and 1769 deletions

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,77 +65,77 @@
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L63">bayesnet::Metrics::getScoresKBest() const</a></td>
<td class="coverFnHi">48</td>
<td class="coverFnHi">16</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L17">bayesnet::Metrics::Metrics(std::vector&lt;std::vector&lt;int, std::allocator&lt;int&gt; &gt;, std::allocator&lt;std::vector&lt;int, std::allocator&lt;int&gt; &gt; &gt; &gt; const&amp;, std::vector&lt;int, std::allocator&lt;int&gt; &gt; const&amp;, std::vector&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt;, std::allocator&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; &gt; &gt; const&amp;, std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; const&amp;, int)</a></td>
<td class="coverFnHi">96</td>
<td class="coverFnHi">32</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L154">bayesnet::Metrics::maximumSpanningTree(std::vector&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt;, std::allocator&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; &gt; &gt; const&amp;, at::Tensor const&amp;, int)</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L68">bayesnet::Metrics::conditionalEdge(at::Tensor const&amp;)</a></td>
<td class="coverFnHi">204</td>
<td class="coverFnHi">68</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L28">bayesnet::Metrics::SelectKBestWeighted(at::Tensor const&amp;, bool, unsigned int)</a></td>
<td class="coverFnHi">690</td>
<td class="coverFnHi">230</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L9">bayesnet::Metrics::Metrics(at::Tensor const&amp;, std::vector&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt;, std::allocator&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; &gt; &gt; const&amp;, std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; const&amp;, int)</a></td>
<td class="coverFnHi">2248</td>
<td class="coverFnHi">744</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L45">bayesnet::Metrics::SelectKBestWeighted(at::Tensor const&amp;, bool, unsigned int)::{lambda(int, int)#1}::operator()(int, int) const</a></td>
<td class="coverFnHi">2718</td>
<td class="coverFnHi">906</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L116">bayesnet::Metrics::conditionalEntropy(at::Tensor const&amp;, at::Tensor const&amp;, at::Tensor const&amp;)</a></td>
<td class="coverFnHi">44793</td>
<td class="coverFnHi">14836</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L145">bayesnet::Metrics::mutualInformation(at::Tensor const&amp;, at::Tensor const&amp;, at::Tensor const&amp;)</a></td>
<td class="coverFnHi">44793</td>
<td class="coverFnHi">14836</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L106">bayesnet::Metrics::entropy(at::Tensor const&amp;, at::Tensor const&amp;)</a></td>
<td class="coverFnHi">50295</td>
<td class="coverFnHi">16480</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L55">bayesnet::Metrics::SelectKBestWeighted(at::Tensor const&amp;, bool, unsigned int)::{lambda(int, int)#2}::operator()(int, int) const</a></td>
<td class="coverFnHi">97212</td>
<td class="coverFnHi">32404</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,77 +65,77 @@
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L9">bayesnet::Metrics::Metrics(at::Tensor const&amp;, std::vector&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt;, std::allocator&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; &gt; &gt; const&amp;, std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; const&amp;, int)</a></td>
<td class="coverFnHi">2248</td>
<td class="coverFnHi">744</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L17">bayesnet::Metrics::Metrics(std::vector&lt;std::vector&lt;int, std::allocator&lt;int&gt; &gt;, std::allocator&lt;std::vector&lt;int, std::allocator&lt;int&gt; &gt; &gt; &gt; const&amp;, std::vector&lt;int, std::allocator&lt;int&gt; &gt; const&amp;, std::vector&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt;, std::allocator&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; &gt; &gt; const&amp;, std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; const&amp;, int)</a></td>
<td class="coverFnHi">96</td>
<td class="coverFnHi">32</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L28">bayesnet::Metrics::SelectKBestWeighted(at::Tensor const&amp;, bool, unsigned int)</a></td>
<td class="coverFnHi">690</td>
<td class="coverFnHi">230</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L45">bayesnet::Metrics::SelectKBestWeighted(at::Tensor const&amp;, bool, unsigned int)::{lambda(int, int)#1}::operator()(int, int) const</a></td>
<td class="coverFnHi">2718</td>
<td class="coverFnHi">906</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L55">bayesnet::Metrics::SelectKBestWeighted(at::Tensor const&amp;, bool, unsigned int)::{lambda(int, int)#2}::operator()(int, int) const</a></td>
<td class="coverFnHi">97212</td>
<td class="coverFnHi">32404</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L68">bayesnet::Metrics::conditionalEdge(at::Tensor const&amp;)</a></td>
<td class="coverFnHi">204</td>
<td class="coverFnHi">68</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L116">bayesnet::Metrics::conditionalEntropy(at::Tensor const&amp;, at::Tensor const&amp;, at::Tensor const&amp;)</a></td>
<td class="coverFnHi">44793</td>
<td class="coverFnHi">14836</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L106">bayesnet::Metrics::entropy(at::Tensor const&amp;, at::Tensor const&amp;)</a></td>
<td class="coverFnHi">50295</td>
<td class="coverFnHi">16480</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L63">bayesnet::Metrics::getScoresKBest() const</a></td>
<td class="coverFnHi">48</td>
<td class="coverFnHi">16</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L154">bayesnet::Metrics::maximumSpanningTree(std::vector&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt;, std::allocator&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; &gt; &gt; const&amp;, at::Tensor const&amp;, int)</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.cc.gcov.html#L145">bayesnet::Metrics::mutualInformation(at::Tensor const&amp;, at::Tensor const&amp;, at::Tensor const&amp;)</a></td>
<td class="coverFnHi">44793</td>
<td class="coverFnHi">14836</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -70,156 +70,156 @@
<span id="L8"><span class="lineNum"> 8</span> : #include &quot;BayesMetrics.h&quot;</span>
<span id="L9"><span class="lineNum"> 9</span> : namespace bayesnet {</span>
<span id="L10"><span class="lineNum"> 10</span> : //samples is n+1xm tensor used to fit the model</span>
<span id="L11"><span class="lineNum"> 11</span> <span class="tlaGNC tlaBgGNC"> 2248 : Metrics::Metrics(const torch::Tensor&amp; samples, const std::vector&lt;std::string&gt;&amp; features, const std::string&amp; className, const int classNumStates)</span></span>
<span id="L12"><span class="lineNum"> 12</span> <span class="tlaGNC"> 2248 : : samples(samples)</span></span>
<span id="L13"><span class="lineNum"> 13</span> <span class="tlaGNC"> 2248 : , className(className)</span></span>
<span id="L14"><span class="lineNum"> 14</span> <span class="tlaGNC"> 2248 : , features(features)</span></span>
<span id="L15"><span class="lineNum"> 15</span> <span class="tlaGNC"> 2248 : , classNumStates(classNumStates)</span></span>
<span id="L11"><span class="lineNum"> 11</span> <span class="tlaGNC tlaBgGNC"> 744 : Metrics::Metrics(const torch::Tensor&amp; samples, const std::vector&lt;std::string&gt;&amp; features, const std::string&amp; className, const int classNumStates)</span></span>
<span id="L12"><span class="lineNum"> 12</span> <span class="tlaGNC"> 744 : : samples(samples)</span></span>
<span id="L13"><span class="lineNum"> 13</span> <span class="tlaGNC"> 744 : , className(className)</span></span>
<span id="L14"><span class="lineNum"> 14</span> <span class="tlaGNC"> 744 : , features(features)</span></span>
<span id="L15"><span class="lineNum"> 15</span> <span class="tlaGNC"> 744 : , classNumStates(classNumStates)</span></span>
<span id="L16"><span class="lineNum"> 16</span> : {</span>
<span id="L17"><span class="lineNum"> 17</span> <span class="tlaGNC"> 2248 : }</span></span>
<span id="L17"><span class="lineNum"> 17</span> <span class="tlaGNC"> 744 : }</span></span>
<span id="L18"><span class="lineNum"> 18</span> : //samples is n+1xm std::vector used to fit the model</span>
<span id="L19"><span class="lineNum"> 19</span> <span class="tlaGNC"> 96 : Metrics::Metrics(const std::vector&lt;std::vector&lt;int&gt;&gt;&amp; vsamples, const std::vector&lt;int&gt;&amp; labels, const std::vector&lt;std::string&gt;&amp; features, const std::string&amp; className, const int classNumStates)</span></span>
<span id="L20"><span class="lineNum"> 20</span> <span class="tlaGNC"> 96 : : samples(torch::zeros({ static_cast&lt;int&gt;(vsamples.size() + 1), static_cast&lt;int&gt;(vsamples[0].size()) }, torch::kInt32))</span></span>
<span id="L21"><span class="lineNum"> 21</span> <span class="tlaGNC"> 96 : , className(className)</span></span>
<span id="L22"><span class="lineNum"> 22</span> <span class="tlaGNC"> 96 : , features(features)</span></span>
<span id="L23"><span class="lineNum"> 23</span> <span class="tlaGNC"> 96 : , classNumStates(classNumStates)</span></span>
<span id="L19"><span class="lineNum"> 19</span> <span class="tlaGNC"> 32 : Metrics::Metrics(const std::vector&lt;std::vector&lt;int&gt;&gt;&amp; vsamples, const std::vector&lt;int&gt;&amp; labels, const std::vector&lt;std::string&gt;&amp; features, const std::string&amp; className, const int classNumStates)</span></span>
<span id="L20"><span class="lineNum"> 20</span> <span class="tlaGNC"> 32 : : samples(torch::zeros({ static_cast&lt;int&gt;(vsamples.size() + 1), static_cast&lt;int&gt;(vsamples[0].size()) }, torch::kInt32))</span></span>
<span id="L21"><span class="lineNum"> 21</span> <span class="tlaGNC"> 32 : , className(className)</span></span>
<span id="L22"><span class="lineNum"> 22</span> <span class="tlaGNC"> 32 : , features(features)</span></span>
<span id="L23"><span class="lineNum"> 23</span> <span class="tlaGNC"> 32 : , classNumStates(classNumStates)</span></span>
<span id="L24"><span class="lineNum"> 24</span> : {</span>
<span id="L25"><span class="lineNum"> 25</span> <span class="tlaGNC"> 768 : for (int i = 0; i &lt; vsamples.size(); ++i) {</span></span>
<span id="L26"><span class="lineNum"> 26</span> <span class="tlaGNC"> 2688 : samples.index_put_({ i, &quot;...&quot; }, torch::tensor(vsamples[i], torch::kInt32));</span></span>
<span id="L25"><span class="lineNum"> 25</span> <span class="tlaGNC"> 256 : for (int i = 0; i &lt; vsamples.size(); ++i) {</span></span>
<span id="L26"><span class="lineNum"> 26</span> <span class="tlaGNC"> 896 : samples.index_put_({ i, &quot;...&quot; }, torch::tensor(vsamples[i], torch::kInt32));</span></span>
<span id="L27"><span class="lineNum"> 27</span> : }</span>
<span id="L28"><span class="lineNum"> 28</span> <span class="tlaGNC"> 384 : samples.index_put_({ -1, &quot;...&quot; }, torch::tensor(labels, torch::kInt32));</span></span>
<span id="L29"><span class="lineNum"> 29</span> <span class="tlaGNC"> 864 : }</span></span>
<span id="L30"><span class="lineNum"> 30</span> <span class="tlaGNC"> 690 : std::vector&lt;int&gt; Metrics::SelectKBestWeighted(const torch::Tensor&amp; weights, bool ascending, unsigned k)</span></span>
<span id="L28"><span class="lineNum"> 28</span> <span class="tlaGNC"> 128 : samples.index_put_({ -1, &quot;...&quot; }, torch::tensor(labels, torch::kInt32));</span></span>
<span id="L29"><span class="lineNum"> 29</span> <span class="tlaGNC"> 288 : }</span></span>
<span id="L30"><span class="lineNum"> 30</span> <span class="tlaGNC"> 230 : std::vector&lt;int&gt; Metrics::SelectKBestWeighted(const torch::Tensor&amp; weights, bool ascending, unsigned k)</span></span>
<span id="L31"><span class="lineNum"> 31</span> : {</span>
<span id="L32"><span class="lineNum"> 32</span> : // Return the K Best features </span>
<span id="L33"><span class="lineNum"> 33</span> <span class="tlaGNC"> 690 : auto n = features.size();</span></span>
<span id="L34"><span class="lineNum"> 34</span> <span class="tlaGNC"> 690 : if (k == 0) {</span></span>
<span id="L33"><span class="lineNum"> 33</span> <span class="tlaGNC"> 230 : auto n = features.size();</span></span>
<span id="L34"><span class="lineNum"> 34</span> <span class="tlaGNC"> 230 : if (k == 0) {</span></span>
<span id="L35"><span class="lineNum"> 35</span> <span class="tlaUNC tlaBgUNC"> 0 : k = n;</span></span>
<span id="L36"><span class="lineNum"> 36</span> : }</span>
<span id="L37"><span class="lineNum"> 37</span> : // compute scores</span>
<span id="L38"><span class="lineNum"> 38</span> <span class="tlaGNC tlaBgGNC"> 690 : scoresKBest.clear();</span></span>
<span id="L39"><span class="lineNum"> 39</span> <span class="tlaGNC"> 690 : featuresKBest.clear();</span></span>
<span id="L40"><span class="lineNum"> 40</span> <span class="tlaGNC"> 2070 : auto label = samples.index({ -1, &quot;...&quot; });</span></span>
<span id="L41"><span class="lineNum"> 41</span> <span class="tlaGNC"> 15576 : for (int i = 0; i &lt; n; ++i) {</span></span>
<span id="L42"><span class="lineNum"> 42</span> <span class="tlaGNC"> 44658 : scoresKBest.push_back(mutualInformation(label, samples.index({ i, &quot;...&quot; }), weights));</span></span>
<span id="L43"><span class="lineNum"> 43</span> <span class="tlaGNC"> 14886 : featuresKBest.push_back(i);</span></span>
<span id="L38"><span class="lineNum"> 38</span> <span class="tlaGNC tlaBgGNC"> 230 : scoresKBest.clear();</span></span>
<span id="L39"><span class="lineNum"> 39</span> <span class="tlaGNC"> 230 : featuresKBest.clear();</span></span>
<span id="L40"><span class="lineNum"> 40</span> <span class="tlaGNC"> 690 : auto label = samples.index({ -1, &quot;...&quot; });</span></span>
<span id="L41"><span class="lineNum"> 41</span> <span class="tlaGNC"> 5192 : for (int i = 0; i &lt; n; ++i) {</span></span>
<span id="L42"><span class="lineNum"> 42</span> <span class="tlaGNC"> 14886 : scoresKBest.push_back(mutualInformation(label, samples.index({ i, &quot;...&quot; }), weights));</span></span>
<span id="L43"><span class="lineNum"> 43</span> <span class="tlaGNC"> 4962 : featuresKBest.push_back(i);</span></span>
<span id="L44"><span class="lineNum"> 44</span> : }</span>
<span id="L45"><span class="lineNum"> 45</span> : // sort &amp; reduce scores and features</span>
<span id="L46"><span class="lineNum"> 46</span> <span class="tlaGNC"> 690 : if (ascending) {</span></span>
<span id="L47"><span class="lineNum"> 47</span> <span class="tlaGNC"> 114 : sort(featuresKBest.begin(), featuresKBest.end(), [&amp;](int i, int j)</span></span>
<span id="L48"><span class="lineNum"> 48</span> <span class="tlaGNC"> 2718 : { return scoresKBest[i] &lt; scoresKBest[j]; });</span></span>
<span id="L49"><span class="lineNum"> 49</span> <span class="tlaGNC"> 114 : sort(scoresKBest.begin(), scoresKBest.end(), std::less&lt;double&gt;());</span></span>
<span id="L50"><span class="lineNum"> 50</span> <span class="tlaGNC"> 114 : if (k &lt; n) {</span></span>
<span id="L51"><span class="lineNum"> 51</span> <span class="tlaGNC"> 168 : for (int i = 0; i &lt; n - k; ++i) {</span></span>
<span id="L52"><span class="lineNum"> 52</span> <span class="tlaGNC"> 120 : featuresKBest.erase(featuresKBest.begin());</span></span>
<span id="L53"><span class="lineNum"> 53</span> <span class="tlaGNC"> 120 : scoresKBest.erase(scoresKBest.begin());</span></span>
<span id="L46"><span class="lineNum"> 46</span> <span class="tlaGNC"> 230 : if (ascending) {</span></span>
<span id="L47"><span class="lineNum"> 47</span> <span class="tlaGNC"> 38 : sort(featuresKBest.begin(), featuresKBest.end(), [&amp;](int i, int j)</span></span>
<span id="L48"><span class="lineNum"> 48</span> <span class="tlaGNC"> 906 : { return scoresKBest[i] &lt; scoresKBest[j]; });</span></span>
<span id="L49"><span class="lineNum"> 49</span> <span class="tlaGNC"> 38 : sort(scoresKBest.begin(), scoresKBest.end(), std::less&lt;double&gt;());</span></span>
<span id="L50"><span class="lineNum"> 50</span> <span class="tlaGNC"> 38 : if (k &lt; n) {</span></span>
<span id="L51"><span class="lineNum"> 51</span> <span class="tlaGNC"> 56 : for (int i = 0; i &lt; n - k; ++i) {</span></span>
<span id="L52"><span class="lineNum"> 52</span> <span class="tlaGNC"> 40 : featuresKBest.erase(featuresKBest.begin());</span></span>
<span id="L53"><span class="lineNum"> 53</span> <span class="tlaGNC"> 40 : scoresKBest.erase(scoresKBest.begin());</span></span>
<span id="L54"><span class="lineNum"> 54</span> : }</span>
<span id="L55"><span class="lineNum"> 55</span> : }</span>
<span id="L56"><span class="lineNum"> 56</span> : } else {</span>
<span id="L57"><span class="lineNum"> 57</span> <span class="tlaGNC"> 576 : sort(featuresKBest.begin(), featuresKBest.end(), [&amp;](int i, int j)</span></span>
<span id="L58"><span class="lineNum"> 58</span> <span class="tlaGNC"> 97212 : { return scoresKBest[i] &gt; scoresKBest[j]; });</span></span>
<span id="L59"><span class="lineNum"> 59</span> <span class="tlaGNC"> 576 : sort(scoresKBest.begin(), scoresKBest.end(), std::greater&lt;double&gt;());</span></span>
<span id="L60"><span class="lineNum"> 60</span> <span class="tlaGNC"> 576 : featuresKBest.resize(k);</span></span>
<span id="L61"><span class="lineNum"> 61</span> <span class="tlaGNC"> 576 : scoresKBest.resize(k);</span></span>
<span id="L57"><span class="lineNum"> 57</span> <span class="tlaGNC"> 192 : sort(featuresKBest.begin(), featuresKBest.end(), [&amp;](int i, int j)</span></span>
<span id="L58"><span class="lineNum"> 58</span> <span class="tlaGNC"> 32404 : { return scoresKBest[i] &gt; scoresKBest[j]; });</span></span>
<span id="L59"><span class="lineNum"> 59</span> <span class="tlaGNC"> 192 : sort(scoresKBest.begin(), scoresKBest.end(), std::greater&lt;double&gt;());</span></span>
<span id="L60"><span class="lineNum"> 60</span> <span class="tlaGNC"> 192 : featuresKBest.resize(k);</span></span>
<span id="L61"><span class="lineNum"> 61</span> <span class="tlaGNC"> 192 : scoresKBest.resize(k);</span></span>
<span id="L62"><span class="lineNum"> 62</span> : }</span>
<span id="L63"><span class="lineNum"> 63</span> <span class="tlaGNC"> 1380 : return featuresKBest;</span></span>
<span id="L64"><span class="lineNum"> 64</span> <span class="tlaGNC"> 16266 : }</span></span>
<span id="L65"><span class="lineNum"> 65</span> <span class="tlaGNC"> 48 : std::vector&lt;double&gt; Metrics::getScoresKBest() const</span></span>
<span id="L63"><span class="lineNum"> 63</span> <span class="tlaGNC"> 460 : return featuresKBest;</span></span>
<span id="L64"><span class="lineNum"> 64</span> <span class="tlaGNC"> 5422 : }</span></span>
<span id="L65"><span class="lineNum"> 65</span> <span class="tlaGNC"> 16 : std::vector&lt;double&gt; Metrics::getScoresKBest() const</span></span>
<span id="L66"><span class="lineNum"> 66</span> : {</span>
<span id="L67"><span class="lineNum"> 67</span> <span class="tlaGNC"> 48 : return scoresKBest;</span></span>
<span id="L67"><span class="lineNum"> 67</span> <span class="tlaGNC"> 16 : return scoresKBest;</span></span>
<span id="L68"><span class="lineNum"> 68</span> : }</span>
<span id="L69"><span class="lineNum"> 69</span> : </span>
<span id="L70"><span class="lineNum"> 70</span> <span class="tlaGNC"> 204 : torch::Tensor Metrics::conditionalEdge(const torch::Tensor&amp; weights)</span></span>
<span id="L70"><span class="lineNum"> 70</span> <span class="tlaGNC"> 68 : torch::Tensor Metrics::conditionalEdge(const torch::Tensor&amp; weights)</span></span>
<span id="L71"><span class="lineNum"> 71</span> : {</span>
<span id="L72"><span class="lineNum"> 72</span> <span class="tlaGNC"> 204 : auto result = std::vector&lt;double&gt;();</span></span>
<span id="L73"><span class="lineNum"> 73</span> <span class="tlaGNC"> 204 : auto source = std::vector&lt;std::string&gt;(features);</span></span>
<span id="L74"><span class="lineNum"> 74</span> <span class="tlaGNC"> 204 : source.push_back(className);</span></span>
<span id="L75"><span class="lineNum"> 75</span> <span class="tlaGNC"> 204 : auto combinations = doCombinations(source);</span></span>
<span id="L72"><span class="lineNum"> 72</span> <span class="tlaGNC"> 68 : auto result = std::vector&lt;double&gt;();</span></span>
<span id="L73"><span class="lineNum"> 73</span> <span class="tlaGNC"> 68 : auto source = std::vector&lt;std::string&gt;(features);</span></span>
<span id="L74"><span class="lineNum"> 74</span> <span class="tlaGNC"> 68 : source.push_back(className);</span></span>
<span id="L75"><span class="lineNum"> 75</span> <span class="tlaGNC"> 68 : auto combinations = doCombinations(source);</span></span>
<span id="L76"><span class="lineNum"> 76</span> : // Compute class prior</span>
<span id="L77"><span class="lineNum"> 77</span> <span class="tlaGNC"> 204 : auto margin = torch::zeros({ classNumStates }, torch::kFloat);</span></span>
<span id="L78"><span class="lineNum"> 78</span> <span class="tlaGNC"> 1104 : for (int value = 0; value &lt; classNumStates; ++value) {</span></span>
<span id="L79"><span class="lineNum"> 79</span> <span class="tlaGNC"> 3600 : auto mask = samples.index({ -1, &quot;...&quot; }) == value;</span></span>
<span id="L80"><span class="lineNum"> 80</span> <span class="tlaGNC"> 900 : margin[value] = mask.sum().item&lt;double&gt;() / samples.size(1);</span></span>
<span id="L81"><span class="lineNum"> 81</span> <span class="tlaGNC"> 900 : }</span></span>
<span id="L82"><span class="lineNum"> 82</span> <span class="tlaGNC"> 5508 : for (auto [first, second] : combinations) {</span></span>
<span id="L83"><span class="lineNum"> 83</span> <span class="tlaGNC"> 5304 : int index_first = find(features.begin(), features.end(), first) - features.begin();</span></span>
<span id="L84"><span class="lineNum"> 84</span> <span class="tlaGNC"> 5304 : int index_second = find(features.begin(), features.end(), second) - features.begin();</span></span>
<span id="L85"><span class="lineNum"> 85</span> <span class="tlaGNC"> 5304 : double accumulated = 0;</span></span>
<span id="L86"><span class="lineNum"> 86</span> <span class="tlaGNC"> 31440 : for (int value = 0; value &lt; classNumStates; ++value) {</span></span>
<span id="L87"><span class="lineNum"> 87</span> <span class="tlaGNC"> 104544 : auto mask = samples.index({ -1, &quot;...&quot; }) == value;</span></span>
<span id="L88"><span class="lineNum"> 88</span> <span class="tlaGNC"> 78408 : auto first_dataset = samples.index({ index_first, mask });</span></span>
<span id="L89"><span class="lineNum"> 89</span> <span class="tlaGNC"> 78408 : auto second_dataset = samples.index({ index_second, mask });</span></span>
<span id="L90"><span class="lineNum"> 90</span> <span class="tlaGNC"> 52272 : auto weights_dataset = weights.index({ mask });</span></span>
<span id="L91"><span class="lineNum"> 91</span> <span class="tlaGNC"> 52272 : auto mi = mutualInformation(first_dataset, second_dataset, weights_dataset);</span></span>
<span id="L92"><span class="lineNum"> 92</span> <span class="tlaGNC"> 26136 : auto pb = margin[value].item&lt;double&gt;();</span></span>
<span id="L93"><span class="lineNum"> 93</span> <span class="tlaGNC"> 26136 : accumulated += pb * mi;</span></span>
<span id="L94"><span class="lineNum"> 94</span> <span class="tlaGNC"> 26136 : }</span></span>
<span id="L95"><span class="lineNum"> 95</span> <span class="tlaGNC"> 5304 : result.push_back(accumulated);</span></span>
<span id="L96"><span class="lineNum"> 96</span> <span class="tlaGNC"> 5304 : }</span></span>
<span id="L97"><span class="lineNum"> 97</span> <span class="tlaGNC"> 204 : long n_vars = source.size();</span></span>
<span id="L98"><span class="lineNum"> 98</span> <span class="tlaGNC"> 204 : auto matrix = torch::zeros({ n_vars, n_vars });</span></span>
<span id="L99"><span class="lineNum"> 99</span> <span class="tlaGNC"> 204 : auto indices = torch::triu_indices(n_vars, n_vars, 1);</span></span>
<span id="L100"><span class="lineNum"> 100</span> <span class="tlaGNC"> 5508 : for (auto i = 0; i &lt; result.size(); ++i) {</span></span>
<span id="L101"><span class="lineNum"> 101</span> <span class="tlaGNC"> 5304 : auto x = indices[0][i];</span></span>
<span id="L102"><span class="lineNum"> 102</span> <span class="tlaGNC"> 5304 : auto y = indices[1][i];</span></span>
<span id="L103"><span class="lineNum"> 103</span> <span class="tlaGNC"> 5304 : matrix[x][y] = result[i];</span></span>
<span id="L104"><span class="lineNum"> 104</span> <span class="tlaGNC"> 5304 : matrix[y][x] = result[i];</span></span>
<span id="L105"><span class="lineNum"> 105</span> <span class="tlaGNC"> 5304 : }</span></span>
<span id="L106"><span class="lineNum"> 106</span> <span class="tlaGNC"> 408 : return matrix;</span></span>
<span id="L107"><span class="lineNum"> 107</span> <span class="tlaGNC"> 131784 : }</span></span>
<span id="L108"><span class="lineNum"> 108</span> <span class="tlaGNC"> 50295 : double Metrics::entropy(const torch::Tensor&amp; feature, const torch::Tensor&amp; weights)</span></span>
<span id="L77"><span class="lineNum"> 77</span> <span class="tlaGNC"> 68 : auto margin = torch::zeros({ classNumStates }, torch::kFloat);</span></span>
<span id="L78"><span class="lineNum"> 78</span> <span class="tlaGNC"> 368 : for (int value = 0; value &lt; classNumStates; ++value) {</span></span>
<span id="L79"><span class="lineNum"> 79</span> <span class="tlaGNC"> 1200 : auto mask = samples.index({ -1, &quot;...&quot; }) == value;</span></span>
<span id="L80"><span class="lineNum"> 80</span> <span class="tlaGNC"> 300 : margin[value] = mask.sum().item&lt;double&gt;() / samples.size(1);</span></span>
<span id="L81"><span class="lineNum"> 81</span> <span class="tlaGNC"> 300 : }</span></span>
<span id="L82"><span class="lineNum"> 82</span> <span class="tlaGNC"> 1836 : for (auto [first, second] : combinations) {</span></span>
<span id="L83"><span class="lineNum"> 83</span> <span class="tlaGNC"> 1768 : int index_first = find(features.begin(), features.end(), first) - features.begin();</span></span>
<span id="L84"><span class="lineNum"> 84</span> <span class="tlaGNC"> 1768 : int index_second = find(features.begin(), features.end(), second) - features.begin();</span></span>
<span id="L85"><span class="lineNum"> 85</span> <span class="tlaGNC"> 1768 : double accumulated = 0;</span></span>
<span id="L86"><span class="lineNum"> 86</span> <span class="tlaGNC"> 10480 : for (int value = 0; value &lt; classNumStates; ++value) {</span></span>
<span id="L87"><span class="lineNum"> 87</span> <span class="tlaGNC"> 34848 : auto mask = samples.index({ -1, &quot;...&quot; }) == value;</span></span>
<span id="L88"><span class="lineNum"> 88</span> <span class="tlaGNC"> 26136 : auto first_dataset = samples.index({ index_first, mask });</span></span>
<span id="L89"><span class="lineNum"> 89</span> <span class="tlaGNC"> 26136 : auto second_dataset = samples.index({ index_second, mask });</span></span>
<span id="L90"><span class="lineNum"> 90</span> <span class="tlaGNC"> 17424 : auto weights_dataset = weights.index({ mask });</span></span>
<span id="L91"><span class="lineNum"> 91</span> <span class="tlaGNC"> 17424 : auto mi = mutualInformation(first_dataset, second_dataset, weights_dataset);</span></span>
<span id="L92"><span class="lineNum"> 92</span> <span class="tlaGNC"> 8712 : auto pb = margin[value].item&lt;double&gt;();</span></span>
<span id="L93"><span class="lineNum"> 93</span> <span class="tlaGNC"> 8712 : accumulated += pb * mi;</span></span>
<span id="L94"><span class="lineNum"> 94</span> <span class="tlaGNC"> 8712 : }</span></span>
<span id="L95"><span class="lineNum"> 95</span> <span class="tlaGNC"> 1768 : result.push_back(accumulated);</span></span>
<span id="L96"><span class="lineNum"> 96</span> <span class="tlaGNC"> 1768 : }</span></span>
<span id="L97"><span class="lineNum"> 97</span> <span class="tlaGNC"> 68 : long n_vars = source.size();</span></span>
<span id="L98"><span class="lineNum"> 98</span> <span class="tlaGNC"> 68 : auto matrix = torch::zeros({ n_vars, n_vars });</span></span>
<span id="L99"><span class="lineNum"> 99</span> <span class="tlaGNC"> 68 : auto indices = torch::triu_indices(n_vars, n_vars, 1);</span></span>
<span id="L100"><span class="lineNum"> 100</span> <span class="tlaGNC"> 1836 : for (auto i = 0; i &lt; result.size(); ++i) {</span></span>
<span id="L101"><span class="lineNum"> 101</span> <span class="tlaGNC"> 1768 : auto x = indices[0][i];</span></span>
<span id="L102"><span class="lineNum"> 102</span> <span class="tlaGNC"> 1768 : auto y = indices[1][i];</span></span>
<span id="L103"><span class="lineNum"> 103</span> <span class="tlaGNC"> 1768 : matrix[x][y] = result[i];</span></span>
<span id="L104"><span class="lineNum"> 104</span> <span class="tlaGNC"> 1768 : matrix[y][x] = result[i];</span></span>
<span id="L105"><span class="lineNum"> 105</span> <span class="tlaGNC"> 1768 : }</span></span>
<span id="L106"><span class="lineNum"> 106</span> <span class="tlaGNC"> 136 : return matrix;</span></span>
<span id="L107"><span class="lineNum"> 107</span> <span class="tlaGNC"> 43928 : }</span></span>
<span id="L108"><span class="lineNum"> 108</span> <span class="tlaGNC"> 16480 : double Metrics::entropy(const torch::Tensor&amp; feature, const torch::Tensor&amp; weights)</span></span>
<span id="L109"><span class="lineNum"> 109</span> : {</span>
<span id="L110"><span class="lineNum"> 110</span> <span class="tlaGNC"> 50295 : torch::Tensor counts = feature.bincount(weights);</span></span>
<span id="L111"><span class="lineNum"> 111</span> <span class="tlaGNC"> 50295 : double totalWeight = counts.sum().item&lt;double&gt;();</span></span>
<span id="L112"><span class="lineNum"> 112</span> <span class="tlaGNC"> 50295 : torch::Tensor probs = counts.to(torch::kFloat) / totalWeight;</span></span>
<span id="L113"><span class="lineNum"> 113</span> <span class="tlaGNC"> 50295 : torch::Tensor logProbs = torch::log(probs);</span></span>
<span id="L114"><span class="lineNum"> 114</span> <span class="tlaGNC"> 50295 : torch::Tensor entropy = -probs * logProbs;</span></span>
<span id="L115"><span class="lineNum"> 115</span> <span class="tlaGNC"> 100590 : return entropy.nansum().item&lt;double&gt;();</span></span>
<span id="L116"><span class="lineNum"> 116</span> <span class="tlaGNC"> 50295 : }</span></span>
<span id="L110"><span class="lineNum"> 110</span> <span class="tlaGNC"> 16480 : torch::Tensor counts = feature.bincount(weights);</span></span>
<span id="L111"><span class="lineNum"> 111</span> <span class="tlaGNC"> 16480 : double totalWeight = counts.sum().item&lt;double&gt;();</span></span>
<span id="L112"><span class="lineNum"> 112</span> <span class="tlaGNC"> 16480 : torch::Tensor probs = counts.to(torch::kFloat) / totalWeight;</span></span>
<span id="L113"><span class="lineNum"> 113</span> <span class="tlaGNC"> 16480 : torch::Tensor logProbs = torch::log(probs);</span></span>
<span id="L114"><span class="lineNum"> 114</span> <span class="tlaGNC"> 16480 : torch::Tensor entropy = -probs * logProbs;</span></span>
<span id="L115"><span class="lineNum"> 115</span> <span class="tlaGNC"> 32960 : return entropy.nansum().item&lt;double&gt;();</span></span>
<span id="L116"><span class="lineNum"> 116</span> <span class="tlaGNC"> 16480 : }</span></span>
<span id="L117"><span class="lineNum"> 117</span> : // H(Y|X) = sum_{x in X} p(x) H(Y|X=x)</span>
<span id="L118"><span class="lineNum"> 118</span> <span class="tlaGNC"> 44793 : double Metrics::conditionalEntropy(const torch::Tensor&amp; firstFeature, const torch::Tensor&amp; secondFeature, const torch::Tensor&amp; weights)</span></span>
<span id="L118"><span class="lineNum"> 118</span> <span class="tlaGNC"> 14836 : double Metrics::conditionalEntropy(const torch::Tensor&amp; firstFeature, const torch::Tensor&amp; secondFeature, const torch::Tensor&amp; weights)</span></span>
<span id="L119"><span class="lineNum"> 119</span> : {</span>
<span id="L120"><span class="lineNum"> 120</span> <span class="tlaGNC"> 44793 : int numSamples = firstFeature.sizes()[0];</span></span>
<span id="L121"><span class="lineNum"> 121</span> <span class="tlaGNC"> 44793 : torch::Tensor featureCounts = secondFeature.bincount(weights);</span></span>
<span id="L122"><span class="lineNum"> 122</span> <span class="tlaGNC"> 44793 : std::unordered_map&lt;int, std::unordered_map&lt;int, double&gt;&gt; jointCounts;</span></span>
<span id="L123"><span class="lineNum"> 123</span> <span class="tlaGNC"> 44793 : double totalWeight = 0;</span></span>
<span id="L124"><span class="lineNum"> 124</span> <span class="tlaGNC"> 8954403 : for (auto i = 0; i &lt; numSamples; i++) {</span></span>
<span id="L125"><span class="lineNum"> 125</span> <span class="tlaGNC"> 8909610 : jointCounts[secondFeature[i].item&lt;int&gt;()][firstFeature[i].item&lt;int&gt;()] += weights[i].item&lt;double&gt;();</span></span>
<span id="L126"><span class="lineNum"> 126</span> <span class="tlaGNC"> 8909610 : totalWeight += weights[i].item&lt;float&gt;();</span></span>
<span id="L120"><span class="lineNum"> 120</span> <span class="tlaGNC"> 14836 : int numSamples = firstFeature.sizes()[0];</span></span>
<span id="L121"><span class="lineNum"> 121</span> <span class="tlaGNC"> 14836 : torch::Tensor featureCounts = secondFeature.bincount(weights);</span></span>
<span id="L122"><span class="lineNum"> 122</span> <span class="tlaGNC"> 14836 : std::unordered_map&lt;int, std::unordered_map&lt;int, double&gt;&gt; jointCounts;</span></span>
<span id="L123"><span class="lineNum"> 123</span> <span class="tlaGNC"> 14836 : double totalWeight = 0;</span></span>
<span id="L124"><span class="lineNum"> 124</span> <span class="tlaGNC"> 2946924 : for (auto i = 0; i &lt; numSamples; i++) {</span></span>
<span id="L125"><span class="lineNum"> 125</span> <span class="tlaGNC"> 2932088 : jointCounts[secondFeature[i].item&lt;int&gt;()][firstFeature[i].item&lt;int&gt;()] += weights[i].item&lt;double&gt;();</span></span>
<span id="L126"><span class="lineNum"> 126</span> <span class="tlaGNC"> 2932088 : totalWeight += weights[i].item&lt;float&gt;();</span></span>
<span id="L127"><span class="lineNum"> 127</span> : }</span>
<span id="L128"><span class="lineNum"> 128</span> <span class="tlaGNC"> 44793 : if (totalWeight == 0)</span></span>
<span id="L128"><span class="lineNum"> 128</span> <span class="tlaGNC"> 14836 : if (totalWeight == 0)</span></span>
<span id="L129"><span class="lineNum"> 129</span> <span class="tlaUNC tlaBgUNC"> 0 : return 0;</span></span>
<span id="L130"><span class="lineNum"> 130</span> <span class="tlaGNC tlaBgGNC"> 44793 : double entropyValue = 0;</span></span>
<span id="L131"><span class="lineNum"> 131</span> <span class="tlaGNC"> 222747 : for (int value = 0; value &lt; featureCounts.sizes()[0]; ++value) {</span></span>
<span id="L132"><span class="lineNum"> 132</span> <span class="tlaGNC"> 177954 : double p_f = featureCounts[value].item&lt;double&gt;() / totalWeight;</span></span>
<span id="L133"><span class="lineNum"> 133</span> <span class="tlaGNC"> 177954 : double entropy_f = 0;</span></span>
<span id="L134"><span class="lineNum"> 134</span> <span class="tlaGNC"> 601680 : for (auto&amp; [label, jointCount] : jointCounts[value]) {</span></span>
<span id="L135"><span class="lineNum"> 135</span> <span class="tlaGNC"> 423726 : double p_l_f = jointCount / featureCounts[value].item&lt;double&gt;();</span></span>
<span id="L136"><span class="lineNum"> 136</span> <span class="tlaGNC"> 423726 : if (p_l_f &gt; 0) {</span></span>
<span id="L137"><span class="lineNum"> 137</span> <span class="tlaGNC"> 423726 : entropy_f -= p_l_f * log(p_l_f);</span></span>
<span id="L130"><span class="lineNum"> 130</span> <span class="tlaGNC tlaBgGNC"> 14836 : double entropyValue = 0;</span></span>
<span id="L131"><span class="lineNum"> 131</span> <span class="tlaGNC"> 73754 : for (int value = 0; value &lt; featureCounts.sizes()[0]; ++value) {</span></span>
<span id="L132"><span class="lineNum"> 132</span> <span class="tlaGNC"> 58918 : double p_f = featureCounts[value].item&lt;double&gt;() / totalWeight;</span></span>
<span id="L133"><span class="lineNum"> 133</span> <span class="tlaGNC"> 58918 : double entropy_f = 0;</span></span>
<span id="L134"><span class="lineNum"> 134</span> <span class="tlaGNC"> 198966 : for (auto&amp; [label, jointCount] : jointCounts[value]) {</span></span>
<span id="L135"><span class="lineNum"> 135</span> <span class="tlaGNC"> 140048 : double p_l_f = jointCount / featureCounts[value].item&lt;double&gt;();</span></span>
<span id="L136"><span class="lineNum"> 136</span> <span class="tlaGNC"> 140048 : if (p_l_f &gt; 0) {</span></span>
<span id="L137"><span class="lineNum"> 137</span> <span class="tlaGNC"> 140048 : entropy_f -= p_l_f * log(p_l_f);</span></span>
<span id="L138"><span class="lineNum"> 138</span> : } else {</span>
<span id="L139"><span class="lineNum"> 139</span> <span class="tlaUNC tlaBgUNC"> 0 : entropy_f = 0;</span></span>
<span id="L140"><span class="lineNum"> 140</span> : }</span>
<span id="L141"><span class="lineNum"> 141</span> : }</span>
<span id="L142"><span class="lineNum"> 142</span> <span class="tlaGNC tlaBgGNC"> 177954 : entropyValue += p_f * entropy_f;</span></span>
<span id="L142"><span class="lineNum"> 142</span> <span class="tlaGNC tlaBgGNC"> 58918 : entropyValue += p_f * entropy_f;</span></span>
<span id="L143"><span class="lineNum"> 143</span> : }</span>
<span id="L144"><span class="lineNum"> 144</span> <span class="tlaGNC"> 44793 : return entropyValue;</span></span>
<span id="L145"><span class="lineNum"> 145</span> <span class="tlaGNC"> 44793 : }</span></span>
<span id="L144"><span class="lineNum"> 144</span> <span class="tlaGNC"> 14836 : return entropyValue;</span></span>
<span id="L145"><span class="lineNum"> 145</span> <span class="tlaGNC"> 14836 : }</span></span>
<span id="L146"><span class="lineNum"> 146</span> : // I(X;Y) = H(Y) - H(Y|X)</span>
<span id="L147"><span class="lineNum"> 147</span> <span class="tlaGNC"> 44793 : double Metrics::mutualInformation(const torch::Tensor&amp; firstFeature, const torch::Tensor&amp; secondFeature, const torch::Tensor&amp; weights)</span></span>
<span id="L147"><span class="lineNum"> 147</span> <span class="tlaGNC"> 14836 : double Metrics::mutualInformation(const torch::Tensor&amp; firstFeature, const torch::Tensor&amp; secondFeature, const torch::Tensor&amp; weights)</span></span>
<span id="L148"><span class="lineNum"> 148</span> : {</span>
<span id="L149"><span class="lineNum"> 149</span> <span class="tlaGNC"> 44793 : return entropy(firstFeature, weights) - conditionalEntropy(firstFeature, secondFeature, weights);</span></span>
<span id="L149"><span class="lineNum"> 149</span> <span class="tlaGNC"> 14836 : return entropy(firstFeature, weights) - conditionalEntropy(firstFeature, secondFeature, weights);</span></span>
<span id="L150"><span class="lineNum"> 150</span> : }</span>
<span id="L151"><span class="lineNum"> 151</span> : /*</span>
<span id="L152"><span class="lineNum"> 152</span> : Compute the maximum spanning tree considering the weights as distances</span>
<span id="L153"><span class="lineNum"> 153</span> : and the indices of the weights as nodes of this square matrix using</span>
<span id="L154"><span class="lineNum"> 154</span> : Kruskal algorithm</span>
<span id="L155"><span class="lineNum"> 155</span> : */</span>
<span id="L156"><span class="lineNum"> 156</span> <span class="tlaGNC"> 174 : std::vector&lt;std::pair&lt;int, int&gt;&gt; Metrics::maximumSpanningTree(const std::vector&lt;std::string&gt;&amp; features, const torch::Tensor&amp; weights, const int root)</span></span>
<span id="L156"><span class="lineNum"> 156</span> <span class="tlaGNC"> 58 : std::vector&lt;std::pair&lt;int, int&gt;&gt; Metrics::maximumSpanningTree(const std::vector&lt;std::string&gt;&amp; features, const torch::Tensor&amp; weights, const int root)</span></span>
<span id="L157"><span class="lineNum"> 157</span> : {</span>
<span id="L158"><span class="lineNum"> 158</span> <span class="tlaGNC"> 174 : auto mst = MST(features, weights, root);</span></span>
<span id="L159"><span class="lineNum"> 159</span> <span class="tlaGNC"> 348 : return mst.maximumSpanningTree();</span></span>
<span id="L160"><span class="lineNum"> 160</span> <span class="tlaGNC"> 174 : }</span></span>
<span id="L158"><span class="lineNum"> 158</span> <span class="tlaGNC"> 58 : auto mst = MST(features, weights, root);</span></span>
<span id="L159"><span class="lineNum"> 159</span> <span class="tlaGNC"> 116 : return mst.maximumSpanningTree();</span></span>
<span id="L160"><span class="lineNum"> 160</span> <span class="tlaGNC"> 58 : }</span></span>
<span id="L161"><span class="lineNum"> 161</span> : }</span>
</pre>
</td>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,35 +65,35 @@
<tr>
<td class="coverFn"><a href="BayesMetrics.h.gcov.html#L39">int bayesnet::Metrics::pop_first&lt;int&gt;(std::vector&lt;int, std::allocator&lt;int&gt; &gt;&amp;)</a></td>
<td class="coverFnHi">68</td>
<td class="coverFnHi">20</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.h.gcov.html#L27">std::vector&lt;std::pair&lt;int, int&gt;, std::allocator&lt;std::pair&lt;int, int&gt; &gt; &gt; bayesnet::Metrics::doCombinations&lt;int&gt;(std::vector&lt;int, std::allocator&lt;int&gt; &gt; const&amp;)</a></td>
<td class="coverFnHi">1251</td>
<td class="coverFnHi">384</td>
</tr>
<tr>
<td class="coverFnAlias"><a href="BayesMetrics.h.gcov.html#L27">std::vector<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > > bayesnet::Metrics::doCombinations<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >(std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&)</a></td>
<td class="coverFnAliasHi">204</td>
<td class="coverFnAliasHi">68</td>
</tr>
<tr>
<td class="coverFnAlias"><a href="BayesMetrics.h.gcov.html#L27">std::vector<std::pair<int, int>, std::allocator<std::pair<int, int> > > bayesnet::Metrics::doCombinations<int>(std::vector<int, std::allocator<int> > const&)</a></td>
<td class="coverFnAliasHi">1047</td>
<td class="coverFnAliasHi">316</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.h.gcov.html#L13">bayesnet::Metrics::Metrics()</a></td>
<td class="coverFnHi">2658</td>
<td class="coverFnHi">886</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,35 +65,35 @@
<tr>
<td class="coverFn"><a href="BayesMetrics.h.gcov.html#L13">bayesnet::Metrics::Metrics()</a></td>
<td class="coverFnHi">2658</td>
<td class="coverFnHi">886</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.h.gcov.html#L39">int bayesnet::Metrics::pop_first&lt;int&gt;(std::vector&lt;int, std::allocator&lt;int&gt; &gt;&amp;)</a></td>
<td class="coverFnHi">68</td>
<td class="coverFnHi">20</td>
</tr>
<tr>
<td class="coverFn"><a href="BayesMetrics.h.gcov.html#L27">std::vector&lt;std::pair&lt;int, int&gt;, std::allocator&lt;std::pair&lt;int, int&gt; &gt; &gt; bayesnet::Metrics::doCombinations&lt;int&gt;(std::vector&lt;int, std::allocator&lt;int&gt; &gt; const&amp;)</a></td>
<td class="coverFnHi">1251</td>
<td class="coverFnHi">384</td>
</tr>
<tr>
<td class="coverFnAlias"><a href="BayesMetrics.h.gcov.html#L27">std::vector<std::pair<int, int>, std::allocator<std::pair<int, int> > > bayesnet::Metrics::doCombinations<int>(std::vector<int, std::allocator<int> > const&)</a></td>
<td class="coverFnAliasHi">1047</td>
<td class="coverFnAliasHi">316</td>
</tr>
<tr>
<td class="coverFnAlias"><a href="BayesMetrics.h.gcov.html#L27">std::vector<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > > bayesnet::Metrics::doCombinations<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >(std::vector<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const&)</a></td>
<td class="coverFnAliasHi">204</td>
<td class="coverFnAliasHi">68</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -74,7 +74,7 @@
<span id="L12"><span class="lineNum"> 12</span> : namespace bayesnet {</span>
<span id="L13"><span class="lineNum"> 13</span> : class Metrics {</span>
<span id="L14"><span class="lineNum"> 14</span> : public:</span>
<span id="L15"><span class="lineNum"> 15</span> <span class="tlaGNC tlaBgGNC"> 2658 : Metrics() = default;</span></span>
<span id="L15"><span class="lineNum"> 15</span> <span class="tlaGNC tlaBgGNC"> 886 : Metrics() = default;</span></span>
<span id="L16"><span class="lineNum"> 16</span> : Metrics(const torch::Tensor&amp; samples, const std::vector&lt;std::string&gt;&amp; features, const std::string&amp; className, const int classNumStates);</span>
<span id="L17"><span class="lineNum"> 17</span> : Metrics(const std::vector&lt;std::vector&lt;int&gt;&gt;&amp; vsamples, const std::vector&lt;int&gt;&amp; labels, const std::vector&lt;std::string&gt;&amp; features, const std::string&amp; className, const int classNumStates);</span>
<span id="L18"><span class="lineNum"> 18</span> : std::vector&lt;int&gt; SelectKBestWeighted(const torch::Tensor&amp; weights, bool ascending = false, unsigned k = 0);</span>
@@ -88,23 +88,23 @@
<span id="L26"><span class="lineNum"> 26</span> : double entropy(const torch::Tensor&amp; feature, const torch::Tensor&amp; weights);</span>
<span id="L27"><span class="lineNum"> 27</span> : std::vector&lt;std::string&gt; features;</span>
<span id="L28"><span class="lineNum"> 28</span> : template &lt;class T&gt;</span>
<span id="L29"><span class="lineNum"> 29</span> <span class="tlaGNC"> 1251 : std::vector&lt;std::pair&lt;T, T&gt;&gt; doCombinations(const std::vector&lt;T&gt;&amp; source)</span></span>
<span id="L29"><span class="lineNum"> 29</span> <span class="tlaGNC"> 384 : std::vector&lt;std::pair&lt;T, T&gt;&gt; doCombinations(const std::vector&lt;T&gt;&amp; source)</span></span>
<span id="L30"><span class="lineNum"> 30</span> : {</span>
<span id="L31"><span class="lineNum"> 31</span> <span class="tlaGNC"> 1251 : std::vector&lt;std::pair&lt;T, T&gt;&gt; result;</span></span>
<span id="L32"><span class="lineNum"> 32</span> <span class="tlaGNC"> 6532 : for (int i = 0; i &lt; source.size(); ++i) {</span></span>
<span id="L33"><span class="lineNum"> 33</span> <span class="tlaGNC"> 5281 : T temp = source[i];</span></span>
<span id="L34"><span class="lineNum"> 34</span> <span class="tlaGNC"> 16445 : for (int j = i + 1; j &lt; source.size(); ++j) {</span></span>
<span id="L35"><span class="lineNum"> 35</span> <span class="tlaGNC"> 11164 : result.push_back({ temp, source[j] });</span></span>
<span id="L31"><span class="lineNum"> 31</span> <span class="tlaGNC"> 384 : std::vector&lt;std::pair&lt;T, T&gt;&gt; result;</span></span>
<span id="L32"><span class="lineNum"> 32</span> <span class="tlaGNC"> 2026 : for (int i = 0; i &lt; source.size(); ++i) {</span></span>
<span id="L33"><span class="lineNum"> 33</span> <span class="tlaGNC"> 1642 : T temp = source[i];</span></span>
<span id="L34"><span class="lineNum"> 34</span> <span class="tlaGNC"> 5180 : for (int j = i + 1; j &lt; source.size(); ++j) {</span></span>
<span id="L35"><span class="lineNum"> 35</span> <span class="tlaGNC"> 3538 : result.push_back({ temp, source[j] });</span></span>
<span id="L36"><span class="lineNum"> 36</span> : }</span>
<span id="L37"><span class="lineNum"> 37</span> : }</span>
<span id="L38"><span class="lineNum"> 38</span> <span class="tlaGNC"> 1251 : return result;</span></span>
<span id="L38"><span class="lineNum"> 38</span> <span class="tlaGNC"> 384 : return result;</span></span>
<span id="L39"><span class="lineNum"> 39</span> <span class="tlaUNC tlaBgUNC"> 0 : }</span></span>
<span id="L40"><span class="lineNum"> 40</span> : template &lt;class T&gt;</span>
<span id="L41"><span class="lineNum"> 41</span> <span class="tlaGNC tlaBgGNC"> 68 : T pop_first(std::vector&lt;T&gt;&amp; v)</span></span>
<span id="L41"><span class="lineNum"> 41</span> <span class="tlaGNC tlaBgGNC"> 20 : T pop_first(std::vector&lt;T&gt;&amp; v)</span></span>
<span id="L42"><span class="lineNum"> 42</span> : {</span>
<span id="L43"><span class="lineNum"> 43</span> <span class="tlaGNC"> 68 : T temp = v[0];</span></span>
<span id="L44"><span class="lineNum"> 44</span> <span class="tlaGNC"> 68 : v.erase(v.begin());</span></span>
<span id="L45"><span class="lineNum"> 45</span> <span class="tlaGNC"> 68 : return temp;</span></span>
<span id="L43"><span class="lineNum"> 43</span> <span class="tlaGNC"> 20 : T temp = v[0];</span></span>
<span id="L44"><span class="lineNum"> 44</span> <span class="tlaGNC"> 20 : v.erase(v.begin());</span></span>
<span id="L45"><span class="lineNum"> 45</span> <span class="tlaGNC"> 20 : return temp;</span></span>
<span id="L46"><span class="lineNum"> 46</span> : }</span>
<span id="L47"><span class="lineNum"> 47</span> : private:</span>
<span id="L48"><span class="lineNum"> 48</span> : int classNumStates = 0;</span>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,70 +65,70 @@
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L15">bayesnet::Graph::Graph(int)</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L39">bayesnet::Graph::kruskal_algorithm()</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L102">bayesnet::MST::MST(std::vector&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt;, std::allocator&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; &gt; &gt; const&amp;, at::Tensor const&amp;, int)</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L103">bayesnet::MST::maximumSpanningTree()</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L61">bayesnet::reorder(std::vector&lt;std::pair&lt;float, std::pair&lt;int, int&gt; &gt;, std::allocator&lt;std::pair&lt;float, std::pair&lt;int, int&gt; &gt; &gt; &gt;, int)</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L35">bayesnet::Graph::union_set(int, int)</a></td>
<td class="coverFnHi">954</td>
<td class="coverFnHi">318</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L54">bayesnet::insertElement(std::__cxx11::list&lt;int, std::allocator&lt;int&gt; &gt;&amp;, int)</a></td>
<td class="coverFnHi">954</td>
<td class="coverFnHi">318</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L22">bayesnet::Graph::addEdge(int, int, float)</a></td>
<td class="coverFnHi">3456</td>
<td class="coverFnHi">1152</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L42">auto bayesnet::Graph::kruskal_algorithm()::{lambda(auto:1 const&amp;, auto:2 const&amp;)#1}::operator()&lt;std::pair&lt;float, std::pair&lt;int, int&gt; &gt;, std::pair&lt;float, std::pair&lt;int, int&gt; &gt; &gt;(std::pair&lt;float, std::pair&lt;int, int&gt; &gt; const&amp;, std::pair&lt;float, std::pair&lt;int, int&gt; &gt; const&amp;) const</a></td>
<td class="coverFnHi">13278</td>
<td class="coverFnHi">4426</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L26">bayesnet::Graph::find_set(int)</a></td>
<td class="coverFnHi">15978</td>
<td class="coverFnHi">5326</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,70 +65,70 @@
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L42">auto bayesnet::Graph::kruskal_algorithm()::{lambda(auto:1 const&amp;, auto:2 const&amp;)#1}::operator()&lt;std::pair&lt;float, std::pair&lt;int, int&gt; &gt;, std::pair&lt;float, std::pair&lt;int, int&gt; &gt; &gt;(std::pair&lt;float, std::pair&lt;int, int&gt; &gt; const&amp;, std::pair&lt;float, std::pair&lt;int, int&gt; &gt; const&amp;) const</a></td>
<td class="coverFnHi">13278</td>
<td class="coverFnHi">4426</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L15">bayesnet::Graph::Graph(int)</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L22">bayesnet::Graph::addEdge(int, int, float)</a></td>
<td class="coverFnHi">3456</td>
<td class="coverFnHi">1152</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L26">bayesnet::Graph::find_set(int)</a></td>
<td class="coverFnHi">15978</td>
<td class="coverFnHi">5326</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L39">bayesnet::Graph::kruskal_algorithm()</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L35">bayesnet::Graph::union_set(int, int)</a></td>
<td class="coverFnHi">954</td>
<td class="coverFnHi">318</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L102">bayesnet::MST::MST(std::vector&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt;, std::allocator&lt;std::__cxx11::basic_string&lt;char, std::char_traits&lt;char&gt;, std::allocator&lt;char&gt; &gt; &gt; &gt; const&amp;, at::Tensor const&amp;, int)</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L103">bayesnet::MST::maximumSpanningTree()</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L54">bayesnet::insertElement(std::__cxx11::list&lt;int, std::allocator&lt;int&gt; &gt;&amp;, int)</a></td>
<td class="coverFnHi">954</td>
<td class="coverFnHi">318</td>
</tr>
<tr>
<td class="coverFn"><a href="Mst.cc.gcov.html#L61">bayesnet::reorder(std::vector&lt;std::pair&lt;float, std::pair&lt;int, int&gt; &gt;, std::allocator&lt;std::pair&lt;float, std::pair&lt;int, int&gt; &gt; &gt; &gt;, int)</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -76,108 +76,108 @@
<span id="L14"><span class="lineNum"> 14</span> : */</span>
<span id="L15"><span class="lineNum"> 15</span> : </span>
<span id="L16"><span class="lineNum"> 16</span> : namespace bayesnet {</span>
<span id="L17"><span class="lineNum"> 17</span> <span class="tlaGNC tlaBgGNC"> 348 : Graph::Graph(int V) : V(V), parent(std::vector&lt;int&gt;(V))</span></span>
<span id="L17"><span class="lineNum"> 17</span> <span class="tlaGNC tlaBgGNC"> 116 : Graph::Graph(int V) : V(V), parent(std::vector&lt;int&gt;(V))</span></span>
<span id="L18"><span class="lineNum"> 18</span> : {</span>
<span id="L19"><span class="lineNum"> 19</span> <span class="tlaGNC"> 1302 : for (int i = 0; i &lt; V; i++)</span></span>
<span id="L20"><span class="lineNum"> 20</span> <span class="tlaGNC"> 1128 : parent[i] = i;</span></span>
<span id="L21"><span class="lineNum"> 21</span> <span class="tlaGNC"> 174 : G.clear();</span></span>
<span id="L22"><span class="lineNum"> 22</span> <span class="tlaGNC"> 174 : T.clear();</span></span>
<span id="L23"><span class="lineNum"> 23</span> <span class="tlaGNC"> 174 : }</span></span>
<span id="L24"><span class="lineNum"> 24</span> <span class="tlaGNC"> 3456 : void Graph::addEdge(int u, int v, float wt)</span></span>
<span id="L19"><span class="lineNum"> 19</span> <span class="tlaGNC"> 434 : for (int i = 0; i &lt; V; i++)</span></span>
<span id="L20"><span class="lineNum"> 20</span> <span class="tlaGNC"> 376 : parent[i] = i;</span></span>
<span id="L21"><span class="lineNum"> 21</span> <span class="tlaGNC"> 58 : G.clear();</span></span>
<span id="L22"><span class="lineNum"> 22</span> <span class="tlaGNC"> 58 : T.clear();</span></span>
<span id="L23"><span class="lineNum"> 23</span> <span class="tlaGNC"> 58 : }</span></span>
<span id="L24"><span class="lineNum"> 24</span> <span class="tlaGNC"> 1152 : void Graph::addEdge(int u, int v, float wt)</span></span>
<span id="L25"><span class="lineNum"> 25</span> : {</span>
<span id="L26"><span class="lineNum"> 26</span> <span class="tlaGNC"> 3456 : G.push_back({ wt, { u, v } });</span></span>
<span id="L27"><span class="lineNum"> 27</span> <span class="tlaGNC"> 3456 : }</span></span>
<span id="L28"><span class="lineNum"> 28</span> <span class="tlaGNC"> 15978 : int Graph::find_set(int i)</span></span>
<span id="L26"><span class="lineNum"> 26</span> <span class="tlaGNC"> 1152 : G.push_back({ wt, { u, v } });</span></span>
<span id="L27"><span class="lineNum"> 27</span> <span class="tlaGNC"> 1152 : }</span></span>
<span id="L28"><span class="lineNum"> 28</span> <span class="tlaGNC"> 5326 : int Graph::find_set(int i)</span></span>
<span id="L29"><span class="lineNum"> 29</span> : {</span>
<span id="L30"><span class="lineNum"> 30</span> : // If i is the parent of itself</span>
<span id="L31"><span class="lineNum"> 31</span> <span class="tlaGNC"> 15978 : if (i == parent[i])</span></span>
<span id="L32"><span class="lineNum"> 32</span> <span class="tlaGNC"> 6912 : return i;</span></span>
<span id="L31"><span class="lineNum"> 31</span> <span class="tlaGNC"> 5326 : if (i == parent[i])</span></span>
<span id="L32"><span class="lineNum"> 32</span> <span class="tlaGNC"> 2304 : return i;</span></span>
<span id="L33"><span class="lineNum"> 33</span> : else</span>
<span id="L34"><span class="lineNum"> 34</span> : //else recursively find the parent of i</span>
<span id="L35"><span class="lineNum"> 35</span> <span class="tlaGNC"> 9066 : return find_set(parent[i]);</span></span>
<span id="L35"><span class="lineNum"> 35</span> <span class="tlaGNC"> 3022 : return find_set(parent[i]);</span></span>
<span id="L36"><span class="lineNum"> 36</span> : }</span>
<span id="L37"><span class="lineNum"> 37</span> <span class="tlaGNC"> 954 : void Graph::union_set(int u, int v)</span></span>
<span id="L37"><span class="lineNum"> 37</span> <span class="tlaGNC"> 318 : void Graph::union_set(int u, int v)</span></span>
<span id="L38"><span class="lineNum"> 38</span> : {</span>
<span id="L39"><span class="lineNum"> 39</span> <span class="tlaGNC"> 954 : parent[u] = parent[v];</span></span>
<span id="L40"><span class="lineNum"> 40</span> <span class="tlaGNC"> 954 : }</span></span>
<span id="L41"><span class="lineNum"> 41</span> <span class="tlaGNC"> 174 : void Graph::kruskal_algorithm()</span></span>
<span id="L39"><span class="lineNum"> 39</span> <span class="tlaGNC"> 318 : parent[u] = parent[v];</span></span>
<span id="L40"><span class="lineNum"> 40</span> <span class="tlaGNC"> 318 : }</span></span>
<span id="L41"><span class="lineNum"> 41</span> <span class="tlaGNC"> 58 : void Graph::kruskal_algorithm()</span></span>
<span id="L42"><span class="lineNum"> 42</span> : {</span>
<span id="L43"><span class="lineNum"> 43</span> : // sort the edges ordered on decreasing weight</span>
<span id="L44"><span class="lineNum"> 44</span> <span class="tlaGNC"> 13452 : stable_sort(G.begin(), G.end(), [](const auto&amp; left, const auto&amp; right) {return left.first &gt; right.first;});</span></span>
<span id="L45"><span class="lineNum"> 45</span> <span class="tlaGNC"> 3630 : for (int i = 0; i &lt; G.size(); i++) {</span></span>
<span id="L44"><span class="lineNum"> 44</span> <span class="tlaGNC"> 4484 : stable_sort(G.begin(), G.end(), [](const auto&amp; left, const auto&amp; right) {return left.first &gt; right.first;});</span></span>
<span id="L45"><span class="lineNum"> 45</span> <span class="tlaGNC"> 1210 : for (int i = 0; i &lt; G.size(); i++) {</span></span>
<span id="L46"><span class="lineNum"> 46</span> : int uSt, vEd;</span>
<span id="L47"><span class="lineNum"> 47</span> <span class="tlaGNC"> 3456 : uSt = find_set(G[i].second.first);</span></span>
<span id="L48"><span class="lineNum"> 48</span> <span class="tlaGNC"> 3456 : vEd = find_set(G[i].second.second);</span></span>
<span id="L49"><span class="lineNum"> 49</span> <span class="tlaGNC"> 3456 : if (uSt != vEd) {</span></span>
<span id="L50"><span class="lineNum"> 50</span> <span class="tlaGNC"> 954 : T.push_back(G[i]); // add to mst std::vector</span></span>
<span id="L51"><span class="lineNum"> 51</span> <span class="tlaGNC"> 954 : union_set(uSt, vEd);</span></span>
<span id="L47"><span class="lineNum"> 47</span> <span class="tlaGNC"> 1152 : uSt = find_set(G[i].second.first);</span></span>
<span id="L48"><span class="lineNum"> 48</span> <span class="tlaGNC"> 1152 : vEd = find_set(G[i].second.second);</span></span>
<span id="L49"><span class="lineNum"> 49</span> <span class="tlaGNC"> 1152 : if (uSt != vEd) {</span></span>
<span id="L50"><span class="lineNum"> 50</span> <span class="tlaGNC"> 318 : T.push_back(G[i]); // add to mst std::vector</span></span>
<span id="L51"><span class="lineNum"> 51</span> <span class="tlaGNC"> 318 : union_set(uSt, vEd);</span></span>
<span id="L52"><span class="lineNum"> 52</span> : }</span>
<span id="L53"><span class="lineNum"> 53</span> : }</span>
<span id="L54"><span class="lineNum"> 54</span> <span class="tlaGNC"> 174 : }</span></span>
<span id="L54"><span class="lineNum"> 54</span> <span class="tlaGNC"> 58 : }</span></span>
<span id="L55"><span class="lineNum"> 55</span> : </span>
<span id="L56"><span class="lineNum"> 56</span> <span class="tlaGNC"> 954 : void insertElement(std::list&lt;int&gt;&amp; variables, int variable)</span></span>
<span id="L56"><span class="lineNum"> 56</span> <span class="tlaGNC"> 318 : void insertElement(std::list&lt;int&gt;&amp; variables, int variable)</span></span>
<span id="L57"><span class="lineNum"> 57</span> : {</span>
<span id="L58"><span class="lineNum"> 58</span> <span class="tlaGNC"> 954 : if (std::find(variables.begin(), variables.end(), variable) == variables.end()) {</span></span>
<span id="L59"><span class="lineNum"> 59</span> <span class="tlaGNC"> 954 : variables.push_front(variable);</span></span>
<span id="L58"><span class="lineNum"> 58</span> <span class="tlaGNC"> 318 : if (std::find(variables.begin(), variables.end(), variable) == variables.end()) {</span></span>
<span id="L59"><span class="lineNum"> 59</span> <span class="tlaGNC"> 318 : variables.push_front(variable);</span></span>
<span id="L60"><span class="lineNum"> 60</span> : }</span>
<span id="L61"><span class="lineNum"> 61</span> <span class="tlaGNC"> 954 : }</span></span>
<span id="L61"><span class="lineNum"> 61</span> <span class="tlaGNC"> 318 : }</span></span>
<span id="L62"><span class="lineNum"> 62</span> : </span>
<span id="L63"><span class="lineNum"> 63</span> <span class="tlaGNC"> 174 : std::vector&lt;std::pair&lt;int, int&gt;&gt; reorder(std::vector&lt;std::pair&lt;float, std::pair&lt;int, int&gt;&gt;&gt; T, int root_original)</span></span>
<span id="L63"><span class="lineNum"> 63</span> <span class="tlaGNC"> 58 : std::vector&lt;std::pair&lt;int, int&gt;&gt; reorder(std::vector&lt;std::pair&lt;float, std::pair&lt;int, int&gt;&gt;&gt; T, int root_original)</span></span>
<span id="L64"><span class="lineNum"> 64</span> : {</span>
<span id="L65"><span class="lineNum"> 65</span> : // Create the edges of a DAG from the MST</span>
<span id="L66"><span class="lineNum"> 66</span> : // replacing unordered_set with list because unordered_set cannot guarantee the order of the elements inserted</span>
<span id="L67"><span class="lineNum"> 67</span> <span class="tlaGNC"> 174 : auto result = std::vector&lt;std::pair&lt;int, int&gt;&gt;();</span></span>
<span id="L68"><span class="lineNum"> 68</span> <span class="tlaGNC"> 174 : auto visited = std::vector&lt;int&gt;();</span></span>
<span id="L69"><span class="lineNum"> 69</span> <span class="tlaGNC"> 174 : auto nextVariables = std::list&lt;int&gt;();</span></span>
<span id="L70"><span class="lineNum"> 70</span> <span class="tlaGNC"> 174 : nextVariables.push_front(root_original);</span></span>
<span id="L71"><span class="lineNum"> 71</span> <span class="tlaGNC"> 1302 : while (nextVariables.size() &gt; 0) {</span></span>
<span id="L72"><span class="lineNum"> 72</span> <span class="tlaGNC"> 1128 : int root = nextVariables.front();</span></span>
<span id="L73"><span class="lineNum"> 73</span> <span class="tlaGNC"> 1128 : nextVariables.pop_front();</span></span>
<span id="L74"><span class="lineNum"> 74</span> <span class="tlaGNC"> 3984 : for (int i = 0; i &lt; T.size(); ++i) {</span></span>
<span id="L75"><span class="lineNum"> 75</span> <span class="tlaGNC"> 2856 : auto [weight, edge] = T[i];</span></span>
<span id="L76"><span class="lineNum"> 76</span> <span class="tlaGNC"> 2856 : auto [from, to] = edge;</span></span>
<span id="L77"><span class="lineNum"> 77</span> <span class="tlaGNC"> 2856 : if (from == root || to == root) {</span></span>
<span id="L78"><span class="lineNum"> 78</span> <span class="tlaGNC"> 954 : visited.insert(visited.begin(), i);</span></span>
<span id="L79"><span class="lineNum"> 79</span> <span class="tlaGNC"> 954 : if (from == root) {</span></span>
<span id="L80"><span class="lineNum"> 80</span> <span class="tlaGNC"> 636 : result.push_back({ from, to });</span></span>
<span id="L81"><span class="lineNum"> 81</span> <span class="tlaGNC"> 636 : insertElement(nextVariables, to);</span></span>
<span id="L67"><span class="lineNum"> 67</span> <span class="tlaGNC"> 58 : auto result = std::vector&lt;std::pair&lt;int, int&gt;&gt;();</span></span>
<span id="L68"><span class="lineNum"> 68</span> <span class="tlaGNC"> 58 : auto visited = std::vector&lt;int&gt;();</span></span>
<span id="L69"><span class="lineNum"> 69</span> <span class="tlaGNC"> 58 : auto nextVariables = std::list&lt;int&gt;();</span></span>
<span id="L70"><span class="lineNum"> 70</span> <span class="tlaGNC"> 58 : nextVariables.push_front(root_original);</span></span>
<span id="L71"><span class="lineNum"> 71</span> <span class="tlaGNC"> 434 : while (nextVariables.size() &gt; 0) {</span></span>
<span id="L72"><span class="lineNum"> 72</span> <span class="tlaGNC"> 376 : int root = nextVariables.front();</span></span>
<span id="L73"><span class="lineNum"> 73</span> <span class="tlaGNC"> 376 : nextVariables.pop_front();</span></span>
<span id="L74"><span class="lineNum"> 74</span> <span class="tlaGNC"> 1328 : for (int i = 0; i &lt; T.size(); ++i) {</span></span>
<span id="L75"><span class="lineNum"> 75</span> <span class="tlaGNC"> 952 : auto [weight, edge] = T[i];</span></span>
<span id="L76"><span class="lineNum"> 76</span> <span class="tlaGNC"> 952 : auto [from, to] = edge;</span></span>
<span id="L77"><span class="lineNum"> 77</span> <span class="tlaGNC"> 952 : if (from == root || to == root) {</span></span>
<span id="L78"><span class="lineNum"> 78</span> <span class="tlaGNC"> 318 : visited.insert(visited.begin(), i);</span></span>
<span id="L79"><span class="lineNum"> 79</span> <span class="tlaGNC"> 318 : if (from == root) {</span></span>
<span id="L80"><span class="lineNum"> 80</span> <span class="tlaGNC"> 212 : result.push_back({ from, to });</span></span>
<span id="L81"><span class="lineNum"> 81</span> <span class="tlaGNC"> 212 : insertElement(nextVariables, to);</span></span>
<span id="L82"><span class="lineNum"> 82</span> : } else {</span>
<span id="L83"><span class="lineNum"> 83</span> <span class="tlaGNC"> 318 : result.push_back({ to, from });</span></span>
<span id="L84"><span class="lineNum"> 84</span> <span class="tlaGNC"> 318 : insertElement(nextVariables, from);</span></span>
<span id="L83"><span class="lineNum"> 83</span> <span class="tlaGNC"> 106 : result.push_back({ to, from });</span></span>
<span id="L84"><span class="lineNum"> 84</span> <span class="tlaGNC"> 106 : insertElement(nextVariables, from);</span></span>
<span id="L85"><span class="lineNum"> 85</span> : }</span>
<span id="L86"><span class="lineNum"> 86</span> : }</span>
<span id="L87"><span class="lineNum"> 87</span> : }</span>
<span id="L88"><span class="lineNum"> 88</span> : // Remove visited</span>
<span id="L89"><span class="lineNum"> 89</span> <span class="tlaGNC"> 2082 : for (int i = 0; i &lt; visited.size(); ++i) {</span></span>
<span id="L90"><span class="lineNum"> 90</span> <span class="tlaGNC"> 954 : T.erase(T.begin() + visited[i]);</span></span>
<span id="L89"><span class="lineNum"> 89</span> <span class="tlaGNC"> 694 : for (int i = 0; i &lt; visited.size(); ++i) {</span></span>
<span id="L90"><span class="lineNum"> 90</span> <span class="tlaGNC"> 318 : T.erase(T.begin() + visited[i]);</span></span>
<span id="L91"><span class="lineNum"> 91</span> : }</span>
<span id="L92"><span class="lineNum"> 92</span> <span class="tlaGNC"> 1128 : visited.clear();</span></span>
<span id="L92"><span class="lineNum"> 92</span> <span class="tlaGNC"> 376 : visited.clear();</span></span>
<span id="L93"><span class="lineNum"> 93</span> : }</span>
<span id="L94"><span class="lineNum"> 94</span> <span class="tlaGNC"> 174 : if (T.size() &gt; 0) {</span></span>
<span id="L94"><span class="lineNum"> 94</span> <span class="tlaGNC"> 58 : if (T.size() &gt; 0) {</span></span>
<span id="L95"><span class="lineNum"> 95</span> <span class="tlaUNC tlaBgUNC"> 0 : for (int i = 0; i &lt; T.size(); ++i) {</span></span>
<span id="L96"><span class="lineNum"> 96</span> <span class="tlaUNC"> 0 : auto [weight, edge] = T[i];</span></span>
<span id="L97"><span class="lineNum"> 97</span> <span class="tlaUNC"> 0 : auto [from, to] = edge;</span></span>
<span id="L98"><span class="lineNum"> 98</span> <span class="tlaUNC"> 0 : result.push_back({ from, to });</span></span>
<span id="L99"><span class="lineNum"> 99</span> : }</span>
<span id="L100"><span class="lineNum"> 100</span> : }</span>
<span id="L101"><span class="lineNum"> 101</span> <span class="tlaGNC tlaBgGNC"> 348 : return result;</span></span>
<span id="L102"><span class="lineNum"> 102</span> <span class="tlaGNC"> 174 : }</span></span>
<span id="L101"><span class="lineNum"> 101</span> <span class="tlaGNC tlaBgGNC"> 116 : return result;</span></span>
<span id="L102"><span class="lineNum"> 102</span> <span class="tlaGNC"> 58 : }</span></span>
<span id="L103"><span class="lineNum"> 103</span> : </span>
<span id="L104"><span class="lineNum"> 104</span> <span class="tlaGNC"> 174 : MST::MST(const std::vector&lt;std::string&gt;&amp; features, const torch::Tensor&amp; weights, const int root) : features(features), weights(weights), root(root) {}</span></span>
<span id="L105"><span class="lineNum"> 105</span> <span class="tlaGNC"> 174 : std::vector&lt;std::pair&lt;int, int&gt;&gt; MST::maximumSpanningTree()</span></span>
<span id="L104"><span class="lineNum"> 104</span> <span class="tlaGNC"> 58 : MST::MST(const std::vector&lt;std::string&gt;&amp; features, const torch::Tensor&amp; weights, const int root) : features(features), weights(weights), root(root) {}</span></span>
<span id="L105"><span class="lineNum"> 105</span> <span class="tlaGNC"> 58 : std::vector&lt;std::pair&lt;int, int&gt;&gt; MST::maximumSpanningTree()</span></span>
<span id="L106"><span class="lineNum"> 106</span> : {</span>
<span id="L107"><span class="lineNum"> 107</span> <span class="tlaGNC"> 174 : auto num_features = features.size();</span></span>
<span id="L108"><span class="lineNum"> 108</span> <span class="tlaGNC"> 174 : Graph g(num_features);</span></span>
<span id="L107"><span class="lineNum"> 107</span> <span class="tlaGNC"> 58 : auto num_features = features.size();</span></span>
<span id="L108"><span class="lineNum"> 108</span> <span class="tlaGNC"> 58 : Graph g(num_features);</span></span>
<span id="L109"><span class="lineNum"> 109</span> : // Make a complete graph</span>
<span id="L110"><span class="lineNum"> 110</span> <span class="tlaGNC"> 1128 : for (int i = 0; i &lt; num_features - 1; ++i) {</span></span>
<span id="L111"><span class="lineNum"> 111</span> <span class="tlaGNC"> 4410 : for (int j = i + 1; j &lt; num_features; ++j) {</span></span>
<span id="L112"><span class="lineNum"> 112</span> <span class="tlaGNC"> 3456 : g.addEdge(i, j, weights[i][j].item&lt;float&gt;());</span></span>
<span id="L110"><span class="lineNum"> 110</span> <span class="tlaGNC"> 376 : for (int i = 0; i &lt; num_features - 1; ++i) {</span></span>
<span id="L111"><span class="lineNum"> 111</span> <span class="tlaGNC"> 1470 : for (int j = i + 1; j &lt; num_features; ++j) {</span></span>
<span id="L112"><span class="lineNum"> 112</span> <span class="tlaGNC"> 1152 : g.addEdge(i, j, weights[i][j].item&lt;float&gt;());</span></span>
<span id="L113"><span class="lineNum"> 113</span> : }</span>
<span id="L114"><span class="lineNum"> 114</span> : }</span>
<span id="L115"><span class="lineNum"> 115</span> <span class="tlaGNC"> 174 : g.kruskal_algorithm();</span></span>
<span id="L116"><span class="lineNum"> 116</span> <span class="tlaGNC"> 174 : auto mst = g.get_mst();</span></span>
<span id="L117"><span class="lineNum"> 117</span> <span class="tlaGNC"> 348 : return reorder(mst, root);</span></span>
<span id="L118"><span class="lineNum"> 118</span> <span class="tlaGNC"> 174 : }</span></span>
<span id="L115"><span class="lineNum"> 115</span> <span class="tlaGNC"> 58 : g.kruskal_algorithm();</span></span>
<span id="L116"><span class="lineNum"> 116</span> <span class="tlaGNC"> 58 : auto mst = g.get_mst();</span></span>
<span id="L117"><span class="lineNum"> 117</span> <span class="tlaGNC"> 116 : return reorder(mst, root);</span></span>
<span id="L118"><span class="lineNum"> 118</span> <span class="tlaGNC"> 58 : }</span></span>
<span id="L119"><span class="lineNum"> 119</span> : </span>
<span id="L120"><span class="lineNum"> 120</span> : }</span>
</pre>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,7 +65,7 @@
<tr>
<td class="coverFn"><a href="Mst.h.gcov.html#L28">bayesnet::Graph::get_mst()</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,7 +65,7 @@
<tr>
<td class="coverFn"><a href="Mst.h.gcov.html#L28">bayesnet::Graph::get_mst()</a></td>
<td class="coverFnHi">174</td>
<td class="coverFnHi">58</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -89,7 +89,7 @@
<span id="L27"><span class="lineNum"> 27</span> : int find_set(int i);</span>
<span id="L28"><span class="lineNum"> 28</span> : void union_set(int u, int v);</span>
<span id="L29"><span class="lineNum"> 29</span> : void kruskal_algorithm();</span>
<span id="L30"><span class="lineNum"> 30</span> <span class="tlaGNC tlaBgGNC"> 174 : std::vector &lt;std::pair&lt;float, std::pair&lt;int, int&gt;&gt;&gt; get_mst() { return T; }</span></span>
<span id="L30"><span class="lineNum"> 30</span> <span class="tlaGNC tlaBgGNC"> 58 : std::vector &lt;std::pair&lt;float, std::pair&lt;int, int&gt;&gt;&gt; get_mst() { return T; }</span></span>
<span id="L31"><span class="lineNum"> 31</span> : private:</span>
<span id="L32"><span class="lineNum"> 32</span> : int V; // number of nodes in graph</span>
<span id="L33"><span class="lineNum"> 33</span> : std::vector &lt;std::pair&lt;float, std::pair&lt;int, int&gt;&gt;&gt; G; // std::vector for graph</span>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,28 +65,28 @@
<tr>
<td class="coverFn"><a href="bayesnetUtils.cc.gcov.html#L17">bayesnet::tensorToVectorDouble(at::Tensor&amp;)</a></td>
<td class="coverFnHi">42</td>
<td class="coverFnHi">14</td>
</tr>
<tr>
<td class="coverFn"><a href="bayesnetUtils.cc.gcov.html#L29">bayesnet::vectorToTensor(std::vector&lt;std::vector&lt;int, std::allocator&lt;int&gt; &gt;, std::allocator&lt;std::vector&lt;int, std::allocator&lt;int&gt; &gt; &gt; &gt;&amp;, bool)</a></td>
<td class="coverFnHi">54</td>
<td class="coverFnHi">18</td>
</tr>
<tr>
<td class="coverFn"><a href="bayesnetUtils.cc.gcov.html#L9">bayesnet::argsort(std::vector&lt;double, std::allocator&lt;double&gt; &gt;&amp;)</a></td>
<td class="coverFnHi">186</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="bayesnetUtils.cc.gcov.html#L14">bayesnet::argsort(std::vector&lt;double, std::allocator&lt;double&gt; &gt;&amp;)::{lambda(int, int)#1}::operator()(int, int) const</a></td>
<td class="coverFnHi">3630</td>
<td class="coverFnHi">1134</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -65,28 +65,28 @@
<tr>
<td class="coverFn"><a href="bayesnetUtils.cc.gcov.html#L9">bayesnet::argsort(std::vector&lt;double, std::allocator&lt;double&gt; &gt;&amp;)</a></td>
<td class="coverFnHi">186</td>
<td class="coverFnHi">58</td>
</tr>
<tr>
<td class="coverFn"><a href="bayesnetUtils.cc.gcov.html#L14">bayesnet::argsort(std::vector&lt;double, std::allocator&lt;double&gt; &gt;&amp;)::{lambda(int, int)#1}::operator()(int, int) const</a></td>
<td class="coverFnHi">3630</td>
<td class="coverFnHi">1134</td>
</tr>
<tr>
<td class="coverFn"><a href="bayesnetUtils.cc.gcov.html#L17">bayesnet::tensorToVectorDouble(at::Tensor&amp;)</a></td>
<td class="coverFnHi">42</td>
<td class="coverFnHi">14</td>
</tr>
<tr>
<td class="coverFn"><a href="bayesnetUtils.cc.gcov.html#L29">bayesnet::vectorToTensor(std::vector&lt;std::vector&lt;int, std::allocator&lt;int&gt; &gt;, std::allocator&lt;std::vector&lt;int, std::allocator&lt;int&gt; &gt; &gt; &gt;&amp;, bool)</a></td>
<td class="coverFnHi">54</td>
<td class="coverFnHi">18</td>
</tr>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>
@@ -70,38 +70,38 @@
<span id="L8"><span class="lineNum"> 8</span> : #include &quot;bayesnetUtils.h&quot;</span>
<span id="L9"><span class="lineNum"> 9</span> : namespace bayesnet {</span>
<span id="L10"><span class="lineNum"> 10</span> : // Return the indices in descending order</span>
<span id="L11"><span class="lineNum"> 11</span> <span class="tlaGNC tlaBgGNC"> 186 : std::vector&lt;int&gt; argsort(std::vector&lt;double&gt;&amp; nums)</span></span>
<span id="L11"><span class="lineNum"> 11</span> <span class="tlaGNC tlaBgGNC"> 58 : std::vector&lt;int&gt; argsort(std::vector&lt;double&gt;&amp; nums)</span></span>
<span id="L12"><span class="lineNum"> 12</span> : {</span>
<span id="L13"><span class="lineNum"> 13</span> <span class="tlaGNC"> 186 : int n = nums.size();</span></span>
<span id="L14"><span class="lineNum"> 14</span> <span class="tlaGNC"> 186 : std::vector&lt;int&gt; indices(n);</span></span>
<span id="L15"><span class="lineNum"> 15</span> <span class="tlaGNC"> 186 : iota(indices.begin(), indices.end(), 0);</span></span>
<span id="L16"><span class="lineNum"> 16</span> <span class="tlaGNC"> 3816 : sort(indices.begin(), indices.end(), [&amp;nums](int i, int j) {return nums[i] &gt; nums[j];});</span></span>
<span id="L17"><span class="lineNum"> 17</span> <span class="tlaGNC"> 186 : return indices;</span></span>
<span id="L13"><span class="lineNum"> 13</span> <span class="tlaGNC"> 58 : int n = nums.size();</span></span>
<span id="L14"><span class="lineNum"> 14</span> <span class="tlaGNC"> 58 : std::vector&lt;int&gt; indices(n);</span></span>
<span id="L15"><span class="lineNum"> 15</span> <span class="tlaGNC"> 58 : iota(indices.begin(), indices.end(), 0);</span></span>
<span id="L16"><span class="lineNum"> 16</span> <span class="tlaGNC"> 1192 : sort(indices.begin(), indices.end(), [&amp;nums](int i, int j) {return nums[i] &gt; nums[j];});</span></span>
<span id="L17"><span class="lineNum"> 17</span> <span class="tlaGNC"> 58 : return indices;</span></span>
<span id="L18"><span class="lineNum"> 18</span> <span class="tlaUNC tlaBgUNC"> 0 : }</span></span>
<span id="L19"><span class="lineNum"> 19</span> <span class="tlaGNC tlaBgGNC"> 42 : std::vector&lt;std::vector&lt;double&gt;&gt; tensorToVectorDouble(torch::Tensor&amp; dtensor)</span></span>
<span id="L19"><span class="lineNum"> 19</span> <span class="tlaGNC tlaBgGNC"> 14 : std::vector&lt;std::vector&lt;double&gt;&gt; tensorToVectorDouble(torch::Tensor&amp; dtensor)</span></span>
<span id="L20"><span class="lineNum"> 20</span> : {</span>
<span id="L21"><span class="lineNum"> 21</span> : // convert mxn tensor to mxn std::vector</span>
<span id="L22"><span class="lineNum"> 22</span> <span class="tlaGNC"> 42 : std::vector&lt;std::vector&lt;double&gt;&gt; result;</span></span>
<span id="L22"><span class="lineNum"> 22</span> <span class="tlaGNC"> 14 : std::vector&lt;std::vector&lt;double&gt;&gt; result;</span></span>
<span id="L23"><span class="lineNum"> 23</span> : // Iterate over cols</span>
<span id="L24"><span class="lineNum"> 24</span> <span class="tlaGNC"> 10818 : for (int i = 0; i &lt; dtensor.size(0); ++i) {</span></span>
<span id="L25"><span class="lineNum"> 25</span> <span class="tlaGNC"> 32328 : auto col_tensor = dtensor.index({ i, &quot;...&quot; });</span></span>
<span id="L26"><span class="lineNum"> 26</span> <span class="tlaGNC"> 10776 : auto col = std::vector&lt;double&gt;(col_tensor.data_ptr&lt;float&gt;(), col_tensor.data_ptr&lt;float&gt;() + dtensor.size(1));</span></span>
<span id="L27"><span class="lineNum"> 27</span> <span class="tlaGNC"> 10776 : result.push_back(col);</span></span>
<span id="L28"><span class="lineNum"> 28</span> <span class="tlaGNC"> 10776 : }</span></span>
<span id="L29"><span class="lineNum"> 29</span> <span class="tlaGNC"> 42 : return result;</span></span>
<span id="L30"><span class="lineNum"> 30</span> <span class="tlaGNC"> 10776 : }</span></span>
<span id="L31"><span class="lineNum"> 31</span> <span class="tlaGNC"> 54 : torch::Tensor vectorToTensor(std::vector&lt;std::vector&lt;int&gt;&gt;&amp; vector, bool transpose)</span></span>
<span id="L24"><span class="lineNum"> 24</span> <span class="tlaGNC"> 3606 : for (int i = 0; i &lt; dtensor.size(0); ++i) {</span></span>
<span id="L25"><span class="lineNum"> 25</span> <span class="tlaGNC"> 10776 : auto col_tensor = dtensor.index({ i, &quot;...&quot; });</span></span>
<span id="L26"><span class="lineNum"> 26</span> <span class="tlaGNC"> 3592 : auto col = std::vector&lt;double&gt;(col_tensor.data_ptr&lt;float&gt;(), col_tensor.data_ptr&lt;float&gt;() + dtensor.size(1));</span></span>
<span id="L27"><span class="lineNum"> 27</span> <span class="tlaGNC"> 3592 : result.push_back(col);</span></span>
<span id="L28"><span class="lineNum"> 28</span> <span class="tlaGNC"> 3592 : }</span></span>
<span id="L29"><span class="lineNum"> 29</span> <span class="tlaGNC"> 14 : return result;</span></span>
<span id="L30"><span class="lineNum"> 30</span> <span class="tlaGNC"> 3592 : }</span></span>
<span id="L31"><span class="lineNum"> 31</span> <span class="tlaGNC"> 18 : torch::Tensor vectorToTensor(std::vector&lt;std::vector&lt;int&gt;&gt;&amp; vector, bool transpose)</span></span>
<span id="L32"><span class="lineNum"> 32</span> : {</span>
<span id="L33"><span class="lineNum"> 33</span> : // convert nxm std::vector to mxn tensor if transpose</span>
<span id="L34"><span class="lineNum"> 34</span> <span class="tlaGNC"> 54 : long int m = transpose ? vector[0].size() : vector.size();</span></span>
<span id="L35"><span class="lineNum"> 35</span> <span class="tlaGNC"> 54 : long int n = transpose ? vector.size() : vector[0].size();</span></span>
<span id="L36"><span class="lineNum"> 36</span> <span class="tlaGNC"> 54 : auto tensor = torch::zeros({ m, n }, torch::kInt32);</span></span>
<span id="L37"><span class="lineNum"> 37</span> <span class="tlaGNC"> 354 : for (int i = 0; i &lt; m; ++i) {</span></span>
<span id="L38"><span class="lineNum"> 38</span> <span class="tlaGNC"> 74886 : for (int j = 0; j &lt; n; ++j) {</span></span>
<span id="L39"><span class="lineNum"> 39</span> <span class="tlaGNC"> 74586 : tensor[i][j] = transpose ? vector[j][i] : vector[i][j];</span></span>
<span id="L34"><span class="lineNum"> 34</span> <span class="tlaGNC"> 18 : long int m = transpose ? vector[0].size() : vector.size();</span></span>
<span id="L35"><span class="lineNum"> 35</span> <span class="tlaGNC"> 18 : long int n = transpose ? vector.size() : vector[0].size();</span></span>
<span id="L36"><span class="lineNum"> 36</span> <span class="tlaGNC"> 18 : auto tensor = torch::zeros({ m, n }, torch::kInt32);</span></span>
<span id="L37"><span class="lineNum"> 37</span> <span class="tlaGNC"> 118 : for (int i = 0; i &lt; m; ++i) {</span></span>
<span id="L38"><span class="lineNum"> 38</span> <span class="tlaGNC"> 24962 : for (int j = 0; j &lt; n; ++j) {</span></span>
<span id="L39"><span class="lineNum"> 39</span> <span class="tlaGNC"> 24862 : tensor[i][j] = transpose ? vector[j][i] : vector[i][j];</span></span>
<span id="L40"><span class="lineNum"> 40</span> : }</span>
<span id="L41"><span class="lineNum"> 41</span> : }</span>
<span id="L42"><span class="lineNum"> 42</span> <span class="tlaGNC"> 54 : return tensor;</span></span>
<span id="L42"><span class="lineNum"> 42</span> <span class="tlaGNC"> 18 : return tensor;</span></span>
<span id="L43"><span class="lineNum"> 43</span> <span class="tlaUNC tlaBgUNC"> 0 : }</span></span>
<span id="L44"><span class="lineNum"> 44</span> : }</span>
</pre>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>

View File

@@ -37,7 +37,7 @@
</tr>
<tr>
<td class="headerItem">Test Date:</td>
<td class="headerValue">2024-04-30 13:59:18</td>
<td class="headerValue">2024-04-30 20:26:57</td>
<td></td>
<td class="headerItem">Functions:</td>
<td class="headerCovTableEntryHi">100.0&nbsp;%</td>