🎉Community Raffle - Win $25

An exclusive raffle opportunity for active members like you! Complete your profile, answer questions and get your first accepted badge to enter the raffle.
Join and Win

input example set does not have a label attribute

User: "jimmyanna"
New Altair Community Member
Updated by Jocelyn
Hello,

thank you for your tool. it is very helpful. I am trying to input some documents (text format) in rapid miner and output their context for my university project. I have used Process documents from Files operator and I have two directories one has text documents(.log extension)about children and other directory is about sports(again .log extensions since they are logs from chats). now when I run my process this is what I get and I am not sure what to exactly do:

Input example set does not have a label attribute

Many operators like classification and regression methods or the PerformanceEvaluator require the input example sets to have a label or class attribute. if this not the case, applying these operators is pointless. if you read the data using an ExampleSource, you can specify the label attribute by using a 'label' tag in the attribute description file.

Offending operator: Validation

Can you please help me with this? I am not sure what the problem is.

here is the XML of the process:

<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<process version="5.3.005">
  <context>
    <input/>
    <output/>
    <macros/>
  </context>
  <operator activated="true" class="process" compatibility="5.3.005" expanded="true" name="Process">
    <process expanded="true">
      <operator activated="true" class="text:process_document_from_file" compatibility="5.3.000" expanded="true" height="76" name="Process Documents from Files" width="90" x="179" y="30">
        <list key="text_directories">
          <parameter key="child" value="C:\Users\Anahid\Desktop\chat samples\sample1"/>
          <parameter key="other" value="C:\Users\Anahid\Desktop\chat samples\sample2"/>
        </list>
        <parameter key="prune_method" value="percentual"/>
        <parameter key="prunde_below_percent" value="1.0"/>
        <parameter key="prune_above_percent" value="90.0"/>
        <process expanded="true">
          <operator activated="true" class="text:process_document_from_file" compatibility="5.3.000" expanded="true" height="76" name="Process Documents from Files (2)" width="90" x="45" y="30">
            <list key="text_directories"/>
            <process expanded="true">
              <operator activated="true" class="text:tokenize" compatibility="5.3.000" expanded="true" height="60" name="Tokenize" width="90" x="179" y="30"/>
              <operator activated="true" class="text:filter_by_length" compatibility="5.3.000" expanded="true" height="60" name="Filter Tokens (by Length)" width="90" x="179" y="120"/>
              <operator activated="true" class="text:stem_porter" compatibility="5.3.000" expanded="true" height="60" name="Stem (Porter)" width="90" x="179" y="210"/>
              <operator activated="true" class="text:filter_stopwords_english" compatibility="5.3.000" expanded="true" height="60" name="Filter Stopwords (English)" width="90" x="179" y="300"/>
              <connect from_port="document" to_op="Tokenize" to_port="document"/>
              <connect from_op="Tokenize" from_port="document" to_op="Filter Tokens (by Length)" to_port="document"/>
              <connect from_op="Filter Tokens (by Length)" from_port="document" to_op="Stem (Porter)" to_port="document"/>
              <connect from_op="Stem (Porter)" from_port="document" to_op="Filter Stopwords (English)" to_port="document"/>
              <connect from_op="Filter Stopwords (English)" from_port="document" to_port="document 1"/>
              <portSpacing port="source_document" spacing="0"/>
              <portSpacing port="sink_document 1" spacing="0"/>
              <portSpacing port="sink_document 2" spacing="0"/>
            </process>
          </operator>
          <operator activated="true" class="store" compatibility="5.3.005" expanded="true" height="60" name="Store" width="90" x="112" y="210">
            <parameter key="repository_entry" value="//Local Repository/data/res"/>
          </operator>
          <operator activated="true" class="x_validation" compatibility="5.3.005" expanded="true" height="112" name="Validation" width="90" x="246" y="75">
            <process expanded="true">
              <operator activated="true" class="classification_by_regression" compatibility="5.3.005" expanded="true" height="76" name="Classification by Regression" width="90" x="75" y="30">
                <process expanded="true">
                  <operator activated="true" class="support_vector_machine_linear" compatibility="5.3.005" expanded="true" height="76" name="SVM (Linear)" width="90" x="45" y="30"/>
                  <connect from_port="training set" to_op="SVM (Linear)" to_port="training set"/>
                  <connect from_op="SVM (Linear)" from_port="model" to_port="model"/>
                  <portSpacing port="source_training set" spacing="0"/>
                  <portSpacing port="sink_model" spacing="0"/>
                </process>
              </operator>
              <connect from_port="training" to_op="Classification by Regression" to_port="training set"/>
              <connect from_op="Classification by Regression" from_port="model" to_port="model"/>
              <portSpacing port="source_training" spacing="0"/>
              <portSpacing port="sink_model" spacing="0"/>
              <portSpacing port="sink_through 1" spacing="0"/>
            </process>
            <process expanded="true">
              <operator activated="true" class="apply_model" compatibility="5.3.005" expanded="true" height="76" name="Apply Model" width="90" x="45" y="30">
                <list key="application_parameters"/>
              </operator>
              <operator activated="true" class="performance" compatibility="5.3.005" expanded="true" height="76" name="Performance" width="90" x="45" y="165"/>
              <connect from_port="model" to_op="Apply Model" to_port="model"/>
              <connect from_port="test set" to_op="Apply Model" to_port="unlabelled data"/>
              <connect from_op="Apply Model" from_port="labelled data" to_op="Performance" to_port="labelled data"/>
              <connect from_op="Performance" from_port="performance" to_port="averagable 1"/>
              <portSpacing port="source_model" spacing="0"/>
              <portSpacing port="source_test set" spacing="0"/>
              <portSpacing port="source_through 1" spacing="0"/>
              <portSpacing port="sink_averagable 1" spacing="0"/>
              <portSpacing port="sink_averagable 2" spacing="0"/>
            </process>
          </operator>
          <operator activated="true" class="store" compatibility="5.3.005" expanded="true" height="60" name="Store (2)" width="90" x="313" y="210">
            <parameter key="repository_entry" value="//Local Repository/data/Test1"/>
          </operator>
          <connect from_op="Process Documents from Files (2)" from_port="example set" to_op="Validation" to_port="training"/>
          <connect from_op="Process Documents from Files (2)" from_port="word list" to_op="Store" to_port="input"/>
          <connect from_op="Validation" from_port="model" to_op="Store (2)" to_port="input"/>
          <portSpacing port="source_document" spacing="0"/>
          <portSpacing port="sink_document 1" spacing="0"/>
        </process>
      </operator>
      <connect from_port="input 1" to_op="Process Documents from Files" to_port="word list"/>
      <connect from_op="Process Documents from Files" from_port="example set" to_port="result 1"/>
      <connect from_op="Process Documents from Files" from_port="word list" to_port="result 2"/>
      <portSpacing port="source_input 1" spacing="0"/>
      <portSpacing port="source_input 2" spacing="0"/>
      <portSpacing port="sink_result 1" spacing="0"/>
      <portSpacing port="sink_result 2" spacing="0"/>
      <portSpacing port="sink_result 3" spacing="0"/>
    </process>
  </operator>
</process>


Thank you very much!

Find more posts tagged with