🎉Community Raffle - Win $25

An exclusive raffle opportunity for active members like you! Complete your profile, answer questions and get your first accepted badge to enter the raffle.
Join and Win

FFS template: logging anomaly or bug?

User: "wessel"
New Altair Community Member
Updated by Jocelyn
Hello,

I ran the Feature Selection template, I looked at the log results, and noticed many '?' (NaN) symbols at the start of the log.
So I made some modifications to the template to understand where these '?' symbols come from.
But I can't explain why FFS is running the same attribute subset many times, and get different performance score each generation.

The log looks like this:


# FS.performance FS.generation Performance.kappa Apply Model.applycount FS.feature_names
NaN 0.0 0.0 1.0 ?
NaN 0.0 0.39 2.0 ?
NaN 0.0 0.39 3.0 ?
NaN 0.0 0.0 4.0 ?
NaN 0.0 0.0 5.0 ?
NaN 0.0 0.0 6.0 ?
NaN 0.0 0.0 7.0 ?
NaN 0.0 0.199 8.0 ?
NaN 0.0 0.0 9.0 ?
NaN 0.0 0.0 10.0 ?
NaN 0.0 0.0 11.0 ?
NaN 0.0 0.0 12.0 ?
NaN 0.0 0.0 13.0 ?
NaN 0.0 0.0 14.0 ?
NaN 0.0 0.0 15.0 ?
NaN 0.0 0.0 16.0 ?
0.39 1.0 0.39 17.0 wage-inc-1st
0.39 1.0 0.39 18.0 wage-inc-1st
0.39 1.0 0.0 19.0 wage-inc-1st
0.39 1.0 0.39 20.0 wage-inc-1st
0.39 1.0 0.196 21.0 wage-inc-1st
0.39 1.0 0.0 22.0 wage-inc-1st
0.39 1.0 0.39 23.0 wage-inc-1st
0.39 1.0 0.39 24.0 wage-inc-1st
0.39 1.0 0.39 25.0 wage-inc-1st
0.39 1.0 0.5 26.0 wage-inc-1st
0.39 1.0 0.0 27.0 wage-inc-1st
0.39 1.0 0.39 28.0 wage-inc-1st
0.39 1.0 0.39 29.0 wage-inc-1st
0.39 1.0 0.39 30.0 wage-inc-1st
0.39 1.0 0.39 31.0 wage-inc-1st
0.5 2.0 0.5 32.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.39 33.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 34.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 35.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.196 36.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 37.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 38.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 39.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 40.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.0 41.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 42.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 43.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 44.0 wage-inc-2nd, statutory-holidays
0.5 2.0 0.5 45.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 46.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.39 47.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 48.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 49.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.196 50.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 51.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 52.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 53.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 54.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 55.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 56.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 57.0 wage-inc-2nd, statutory-holidays
0.5 3.0 0.5 58.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 59.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.39 60.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 61.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 62.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.196 63.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 64.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 65.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 66.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 67.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 68.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 69.0 wage-inc-2nd, statutory-holidays
0.5 4.0 0.5 70.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 71.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.39 72.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 73.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 74.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.196 75.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 76.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 77.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 78.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 79.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 80.0 wage-inc-2nd, statutory-holidays
0.5 5.0 0.5 81.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.5 82.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.39 83.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.5 84.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.5 85.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.196 86.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.5 87.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.5 88.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.5 89.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.5 90.0 wage-inc-2nd, statutory-holidays
0.5 6.0 0.5 91.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.5 92.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.39 93.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.5 94.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.5 95.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.196 96.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.5 97.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.5 98.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.5 99.0 wage-inc-2nd, statutory-holidays
0.5 7.0 0.5 100.0 wage-inc-2nd, statutory-holidays
0.5 8.0 0.5 101.0 wage-inc-2nd, statutory-holidays
0.5 8.0 0.39 102.0 wage-inc-2nd, statutory-holidays
0.5 8.0 0.5 103.0 wage-inc-2nd, statutory-holidays
0.5 8.0 0.5 104.0 wage-inc-2nd, statutory-holidays
0.5 8.0 0.196 105.0 wage-inc-2nd, statutory-holidays
0.5 8.0 0.5 106.0 wage-inc-2nd, statutory-holidays
0.5 8.0 0.5 107.0 wage-inc-2nd, statutory-holidays
0.5 8.0 0.5 108.0 wage-inc-2nd, statutory-holidays
0.5 9.0 0.5 109.0 wage-inc-2nd, statutory-holidays
0.5 9.0 0.39 110.0 wage-inc-2nd, statutory-holidays
0.5 9.0 0.5 111.0 wage-inc-2nd, statutory-holidays
0.5 9.0 0.5 112.0 wage-inc-2nd, statutory-holidays
0.5 9.0 0.196 113.0 wage-inc-2nd, statutory-holidays
0.5 9.0 0.5 114.0 wage-inc-2nd, statutory-holidays
0.5 9.0 0.5 115.0 wage-inc-2nd, statutory-holidays
0.5 10.0 0.5 116.0 wage-inc-2nd, statutory-holidays
0.5 10.0 0.39 117.0 wage-inc-2nd, statutory-holidays
0.5 10.0 0.5 118.0 wage-inc-2nd, statutory-holidays
0.5 10.0 0.5 119.0 wage-inc-2nd, statutory-holidays
0.5 10.0 0.196 120.0 wage-inc-2nd, statutory-holidays
0.5 10.0 0.5 121.0 wage-inc-2nd, statutory-holidays
0.5 11.0 0.5 122.0 wage-inc-2nd, statutory-holidays
0.5 11.0 0.39 123.0 wage-inc-2nd, statutory-holidays
0.5 11.0 0.5 124.0 wage-inc-2nd, statutory-holidays
0.5 11.0 0.5 125.0 wage-inc-2nd, statutory-holidays
0.5 11.0 0.196 126.0 wage-inc-2nd, statutory-holidays
0.5 12.0 0.5 127.0 wage-inc-2nd, statutory-holidays
0.5 12.0 0.39 128.0 wage-inc-2nd, statutory-holidays
0.5 12.0 0.5 129.0 wage-inc-2nd, statutory-holidays
0.5 12.0 0.196 130.0 wage-inc-2nd, statutory-holidays
0.5 13.0 0.5 131.0 wage-inc-2nd, statutory-holidays
0.5 13.0 0.39 132.0 wage-inc-2nd, statutory-holidays
0.5 13.0 0.196 133.0 wage-inc-2nd, statutory-holidays
0.5 14.0 0.39 134.0 wage-inc-2nd, statutory-holidays
0.5 14.0 0.196 135.0 wage-inc-2nd, statutory-holidays
0.39 15.0 0.39 136.0 wage-inc-2nd, statutory-holidays



this is the xml:
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<process version="5.0">
 <context>
   <input>
     <location/>
   </input>
   <output>
     <location/>
     <location/>
     <location/>
   </output>
   <macros/>
 </context>
 <operator activated="true" class="process" expanded="true" name="Root">
   <description>&lt;p&gt; Transformations of the attribute space may ease learning in a way, that simple learning schemes may be able to learn complex functions. This is the basic idea of the kernel trick. But even without kernel based learning schemes the transformation of feature space may be necessary to reach good learning results. &lt;/p&gt;  &lt;p&gt; RapidMiner offers several different feature selection, construction, and extraction methods. This selection process (the well known forward selection) uses an inner cross validation for performance estimation. This building block serves as fitness evaluation for all candidate feature sets. Since the performance of a certain learning scheme is taken into account we refer to processes of this type as &amp;quot;wrapper approaches&amp;quot;.&lt;/p&gt;  &lt;p&gt;Additionally the process log operator plots intermediate results. You can inspect them online in the Results tab. Please refer to the visualization sample processes or the RapidMiner tutorial for further details.&lt;/p&gt;  &lt;p&gt; Try the following: &lt;ul&gt; &lt;li&gt;Start the process and change to &amp;quot;Result&amp;quot; view. There can be a plot selected. Plot the &amp;quot;performance&amp;quot; against the &amp;quot;generation&amp;quot; of the feature selection operator.&lt;/li&gt; &lt;li&gt;Select the feature selection operator in the tree view. Change the search directory from forward (forward selection) to backward (backward elimination). Restart the process. All features will be selected.&lt;/li&gt; &lt;li&gt;Select the feature selection operator. Right click to open the context menu and repace the operator by another feature selection scheme (for example a genetic algorithm).&lt;/li&gt; &lt;li&gt;Have a look at the list of the process log operator. Every time it is applied it collects the specified data. Please refer to the RapidMiner Tutorial for further explanations. After changing the feature selection operator to the genetic algorithm approach, you have to specify the correct values. &lt;table&gt;&lt;tr&gt;&lt;td&gt;&lt;icon&gt;groups/24/visualization&lt;/icon&gt;&lt;/td&gt;&lt;td&gt;&lt;i&gt;Use the process log operator to log values online.&lt;/i&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt; &lt;/li&gt; &lt;/ul&gt; &lt;/p&gt;</description>
   <process expanded="true" height="500" width="576">
     <operator activated="true" class="retrieve" expanded="true" height="60" name="Retrieve" width="90" x="45" y="30">
       <parameter key="repository_entry" value="//Samples/data/Labor-Negotiations"/>
     </operator>
     <operator activated="true" class="split_data" expanded="true" height="94" name="Split Data" width="90" x="179" y="30">
       <enumeration key="partitions">
         <parameter key="ratio" value="0.5"/>
         <parameter key="ratio" value="0.5"/>
       </enumeration>
     </operator>
     <operator activated="true" class="optimize_selection" expanded="true" height="94" name="FS" width="90" x="313" y="30">
       <parameter key="limit_generations_without_improval" value="false"/>
       <parameter key="generations_without_improval" value="2"/>
       <parameter key="maximum_number_of_generations" value="1"/>
       <process expanded="true" height="500" width="570">
         <operator activated="true" class="weka:W-J48" expanded="true" height="76" name="W-J48" width="90" x="45" y="30"/>
         <operator activated="true" class="apply_model" expanded="true" height="76" name="Apply Model" width="90" x="180" y="30">
           <list key="application_parameters"/>
         </operator>
         <operator activated="true" class="performance_binominal_classification" expanded="true" height="76" name="Performance" width="90" x="315" y="30">
           <parameter key="accuracy" value="false"/>
           <parameter key="kappa" value="true"/>
           <parameter key="AUC" value="true"/>
           <parameter key="sensitivity" value="true"/>
           <parameter key="specificity" value="true"/>
         </operator>
         <operator activated="true" class="log" expanded="true" height="76" name="ProcessLog" width="90" x="447" y="30">
           <parameter key="filename" value="C:\Users\wessel\Desktop\sdf.log"/>
           <list key="log">
             <parameter key="FS.performance" value="operator.FS.value.performance"/>
             <parameter key="FS.generation" value="operator.FS.value.generation"/>
             <parameter key="kappa" value="operator.Performance.value.kappa"/>
             <parameter key="Perf.AUC" value="operator.Performance.value.AUC"/>
             <parameter key="Perf.sens" value="operator.Performance.value.sensitivity"/>
             <parameter key="Performance.speci" value="operator.Performance.value.specificity"/>
             <parameter key="Apply Model.applycount" value="operator.Apply Model.value.applycount"/>
             <parameter key="J48.looptime" value="operator.W-J48.value.looptime"/>
             <parameter key="FS.feature_names" value="operator.FS.value.feature_names"/>
           </list>
         </operator>
         <connect from_port="example set" to_op="W-J48" to_port="training set"/>
         <connect from_port="through 1" to_op="Apply Model" to_port="unlabelled data"/>
         <connect from_op="W-J48" from_port="model" to_op="Apply Model" to_port="model"/>
         <connect from_op="Apply Model" from_port="labelled data" to_op="Performance" to_port="labelled data"/>
         <connect from_op="Performance" from_port="performance" to_op="ProcessLog" to_port="through 1"/>
         <connect from_op="ProcessLog" from_port="through 1" to_port="performance"/>
         <portSpacing port="source_example set" spacing="0"/>
         <portSpacing port="source_through 1" spacing="0"/>
         <portSpacing port="source_through 2" spacing="0"/>
         <portSpacing port="sink_performance" spacing="0"/>
       </process>
     </operator>
     <connect from_op="Retrieve" from_port="output" to_op="Split Data" to_port="example set"/>
     <connect from_op="Split Data" from_port="partition 1" to_op="FS" to_port="example set in"/>
     <connect from_op="Split Data" from_port="partition 2" to_op="FS" to_port="through 1"/>
     <connect from_op="FS" from_port="example set out" to_port="result 1"/>
     <connect from_op="FS" from_port="performance" to_port="result 2"/>
     <portSpacing port="source_input 1" spacing="0"/>
     <portSpacing port="sink_result 1" spacing="0"/>
     <portSpacing port="sink_result 2" spacing="0"/>
     <portSpacing port="sink_result 3" spacing="0"/>
   </process>
 </operator>
</process>

Find more posts tagged with