grams word cloud representation

jmphillips
jmphillips New Altair Community Member
edited November 5 in Community Q&A
Hello: My rapidminer model represent in R wordclouds, but represent 1 gram, 2 grams, etc.

The question is: can I, only represent in a wordcloud 2 grams, how to filter, exclude 1 grams. 

Best Answer

  • jmphillips
    jmphillips New Altair Community Member
    Answer ✓
    <?xml version="1.0" encoding="UTF-8"?><process version="9.3.001">
      <context>
        <input/>
        <output/>
        <macros/>
      </context>
      <operator activated="true" class="process" compatibility="6.0.002" expanded="true" name="Process">
        <parameter key="logverbosity" value="init"/>
        <parameter key="random_seed" value="2001"/>
        <parameter key="send_mail" value="never"/>
        <parameter key="notification_email" value=""/>
        <parameter key="process_duration_for_mail" value="30"/>
        <parameter key="encoding" value="SYSTEM"/>
        <process expanded="true">
          <operator activated="true" class="loop_files" compatibility="9.3.001" expanded="true" height="103" name="Loop Files" width="90" x="112" y="34">
            <parameter key="directory" value="C:\CENDOC\input\txt"/>
            <parameter key="filtered_string" value="file name (last part of the path)"/>
            <parameter key="file_name_macro" value="file_name_TEST"/>
            <parameter key="file_path_macro" value="file_path"/>
            <parameter key="parent_path_macro" value="parent_path"/>
            <parameter key="recursive" value="false"/>
            <parameter key="iterate_over_files" value="true"/>
            <parameter key="iterate_over_subdirs" value="false"/>
            <process expanded="true">
              <operator activated="true" class="text:read_document" compatibility="8.2.000" expanded="true" height="68" name="Read Document" width="90" x="45" y="30">
                <parameter key="extract_text_only" value="true"/>
                <parameter key="use_file_extension_as_type" value="true"/>
                <parameter key="content_type" value="txt"/>
                <parameter key="encoding" value="UTF-8"/>
              </operator>
              <operator activated="true" class="text:process_documents" compatibility="8.2.000" expanded="true" height="103" name="Process Documents" width="90" x="246" y="34">
                <parameter key="create_word_vector" value="false"/>
                <parameter key="vector_creation" value="TF-IDF"/>
                <parameter key="add_meta_information" value="true"/>
                <parameter key="keep_text" value="true"/>
                <parameter key="prune_method" value="none"/>
                <parameter key="prune_below_percent" value="3.0"/>
                <parameter key="prune_above_percent" value="30.0"/>
                <parameter key="prune_below_rank" value="5.0"/>
                <parameter key="prune_above_rank" value="5.0"/>
                <parameter key="datamanagement" value="double_sparse_array"/>
                <parameter key="data_management" value="auto"/>
                <process expanded="true">
                  <operator activated="true" class="text:tokenize" compatibility="8.2.000" expanded="true" height="68" name="Tokenize" width="90" x="45" y="85">
                    <parameter key="mode" value="non letters"/>
                    <parameter key="characters" value=".:"/>
                    <parameter key="language" value="English"/>
                    <parameter key="max_token_length" value="3"/>
                  </operator>
                  <operator activated="true" class="text:transform_cases" compatibility="8.2.000" expanded="true" height="68" name="Transform Cases" width="90" x="179" y="85">
                    <parameter key="transform_to" value="upper case"/>
                  </operator>
                  <operator activated="true" class="text:filter_stopwords_dictionary" compatibility="8.2.000" expanded="true" height="82" name="Filter Stopwords (Dictionary)" width="90" x="313" y="85">
                    <parameter key="file" value="C:\Users\jphillips\Downloads\sw.txt"/>
                    <parameter key="case_sensitive" value="false"/>
                    <parameter key="encoding" value="UTF-8"/>
                  </operator>
                  <operator activated="true" class="text:filter_by_length" compatibility="8.2.000" expanded="true" height="68" name="Filter Tokens (by Length)" width="90" x="514" y="85">
                    <parameter key="min_chars" value="4"/>
                    <parameter key="max_chars" value="25"/>
                  </operator>
                  <operator activated="true" class="text:generate_n_grams_terms" compatibility="8.2.000" expanded="true" height="68" name="Generate n-Grams (Terms)" width="90" x="715" y="136">
                    <parameter key="max_length" value="3"/>
                  </operator>
                  <connect from_port="document" to_op="Tokenize" to_port="document"/>
                  <connect from_op="Tokenize" from_port="document" to_op="Transform Cases" to_port="document"/>
                  <connect from_op="Transform Cases" from_port="document" to_op="Filter Stopwords (Dictionary)" to_port="document"/>
                  <connect from_op="Filter Stopwords (Dictionary)" from_port="document" to_op="Filter Tokens (by Length)" to_port="document"/>
                  <connect from_op="Filter Tokens (by Length)" from_port="document" to_op="Generate n-Grams (Terms)" to_port="document"/>
                  <connect from_op="Generate n-Grams (Terms)" from_port="document" to_port="document 1"/>
                  <portSpacing port="source_document" spacing="0"/>
                  <portSpacing port="sink_document 1" spacing="0"/>
                  <portSpacing port="sink_document 2" spacing="0"/>
                </process>
              </operator>
              <operator activated="true" class="text:wordlist_to_data" compatibility="8.2.000" expanded="true" height="82" name="WordList to Data" width="90" x="447" y="34"/>
              <operator activated="true" class="multiply" compatibility="9.3.001" expanded="true" height="103" name="Multiply" width="90" x="581" y="34"/>
              <operator activated="true" class="r_scripting:execute_r" compatibility="9.1.000" expanded="true" height="103" name="Execute R" width="90" x="782" y="187">
                <parameter key="script" value="# rm_main is a mandatory function, &#10;# the number of arguments has to be the number of input ports (can be none)&#10;rm_main = function(data)&#10;{&#10;&#9;library(&quot;wordcloud&quot;)&#10;&#9;library(&quot;RColorBrewer&quot;)&#10;&#10;&#9;Encoding(data$word)     &lt;-   &quot;UTF-8&quot;&#10;&#9;&#10;&#9;png(&quot;C:\\CENDOC\\output\\WordClouds\\%{file_name_TEST}_word_list.png&quot;, width=1280,height=800)&#10;&#9;wordcloud(words = data$word, freq = data$total, min.freq = 1, max.words=100, lang= &quot;spanish&quot;, random.order=FALSE, rot.per=0.35, colors=brewer.pal(8, &quot;Dark2&quot;))&#10;&#9;dev.off()&#10;&#9;&#10;&#9;return (data)&#10;}&#10;"/>
              </operator>
              <operator activated="true" class="write_excel" compatibility="9.2.001" expanded="true" height="103" name="Write Excel" width="90" x="782" y="34">
                <parameter key="excel_file" value="C:\CENDOC\output\Excel\%{file_name_TEST}_word_list.xlsx"/>
                <parameter key="file_format" value="xlsx"/>
                <enumeration key="sheet_names"/>
                <parameter key="sheet_name" value="RapidMiner Data"/>
                <parameter key="date_format" value="yyyy-MM-dd HH:mm:ss"/>
                <parameter key="number_format" value="#.0"/>
                <parameter key="encoding" value="SYSTEM"/>
              </operator>
              <connect from_port="file object" to_op="Read Document" to_port="file"/>
              <connect from_op="Read Document" from_port="output" to_op="Process Documents" to_port="documents 1"/>
              <connect from_op="Process Documents" from_port="word list" to_op="WordList to Data" to_port="word list"/>
              <connect from_op="WordList to Data" from_port="example set" to_op="Multiply" to_port="input"/>
              <connect from_op="Multiply" from_port="output 1" to_op="Write Excel" to_port="input"/>
              <connect from_op="Multiply" from_port="output 2" to_op="Execute R" to_port="input 1"/>
              <connect from_op="Execute R" from_port="output 1" to_port="out 2"/>
              <connect from_op="Write Excel" from_port="through" to_port="out 1"/>
              <portSpacing port="source_file object" spacing="0"/>
              <portSpacing port="source_in 1" spacing="0"/>
              <portSpacing port="sink_out 1" spacing="0"/>
              <portSpacing port="sink_out 2" spacing="0"/>
              <portSpacing port="sink_out 3" spacing="0"/>
            </process>
          </operator>
          <operator activated="false" class="append" compatibility="9.3.001" expanded="true" height="68" name="Append" width="90" x="313" y="340">
            <parameter key="datamanagement" value="double_array"/>
            <parameter key="data_management" value="auto"/>
            <parameter key="merge_type" value="all"/>
          </operator>
          <connect from_op="Loop Files" from_port="out 1" to_port="result 1"/>
          <portSpacing port="source_input 1" spacing="0"/>
          <portSpacing port="sink_result 1" spacing="0"/>
          <portSpacing port="sink_result 2" spacing="0"/>
        </process>
      </operator>
    </process>



Answers

  • rfuentealba
    rfuentealba New Altair Community Member
    Hello,

    Does filtering by if the record contains a _ work? Like, if you have n-grams, 1-grams won't contain this character.

    All the best,

    Rodrigo.
  • jmphillips
    jmphillips New Altair Community Member
    filter tokens (by XXXX).


  • jmphillips
    jmphillips New Altair Community Member
    creo que es mejor hablar en español, ya que los 2 lo hacemos.. !!! jjjjj

  • rfuentealba
    rfuentealba New Altair Community Member
    Hola, así es.

    ¿Es posible importar el proceso como XML y enviármelo? Así puedo hacerme a la idea y ver si puedo ayudarte.

    Saludos,

    Rodrigo.
  • jmphillips
    jmphillips New Altair Community Member
    Answer ✓
    <?xml version="1.0" encoding="UTF-8"?><process version="9.3.001">
      <context>
        <input/>
        <output/>
        <macros/>
      </context>
      <operator activated="true" class="process" compatibility="6.0.002" expanded="true" name="Process">
        <parameter key="logverbosity" value="init"/>
        <parameter key="random_seed" value="2001"/>
        <parameter key="send_mail" value="never"/>
        <parameter key="notification_email" value=""/>
        <parameter key="process_duration_for_mail" value="30"/>
        <parameter key="encoding" value="SYSTEM"/>
        <process expanded="true">
          <operator activated="true" class="loop_files" compatibility="9.3.001" expanded="true" height="103" name="Loop Files" width="90" x="112" y="34">
            <parameter key="directory" value="C:\CENDOC\input\txt"/>
            <parameter key="filtered_string" value="file name (last part of the path)"/>
            <parameter key="file_name_macro" value="file_name_TEST"/>
            <parameter key="file_path_macro" value="file_path"/>
            <parameter key="parent_path_macro" value="parent_path"/>
            <parameter key="recursive" value="false"/>
            <parameter key="iterate_over_files" value="true"/>
            <parameter key="iterate_over_subdirs" value="false"/>
            <process expanded="true">
              <operator activated="true" class="text:read_document" compatibility="8.2.000" expanded="true" height="68" name="Read Document" width="90" x="45" y="30">
                <parameter key="extract_text_only" value="true"/>
                <parameter key="use_file_extension_as_type" value="true"/>
                <parameter key="content_type" value="txt"/>
                <parameter key="encoding" value="UTF-8"/>
              </operator>
              <operator activated="true" class="text:process_documents" compatibility="8.2.000" expanded="true" height="103" name="Process Documents" width="90" x="246" y="34">
                <parameter key="create_word_vector" value="false"/>
                <parameter key="vector_creation" value="TF-IDF"/>
                <parameter key="add_meta_information" value="true"/>
                <parameter key="keep_text" value="true"/>
                <parameter key="prune_method" value="none"/>
                <parameter key="prune_below_percent" value="3.0"/>
                <parameter key="prune_above_percent" value="30.0"/>
                <parameter key="prune_below_rank" value="5.0"/>
                <parameter key="prune_above_rank" value="5.0"/>
                <parameter key="datamanagement" value="double_sparse_array"/>
                <parameter key="data_management" value="auto"/>
                <process expanded="true">
                  <operator activated="true" class="text:tokenize" compatibility="8.2.000" expanded="true" height="68" name="Tokenize" width="90" x="45" y="85">
                    <parameter key="mode" value="non letters"/>
                    <parameter key="characters" value=".:"/>
                    <parameter key="language" value="English"/>
                    <parameter key="max_token_length" value="3"/>
                  </operator>
                  <operator activated="true" class="text:transform_cases" compatibility="8.2.000" expanded="true" height="68" name="Transform Cases" width="90" x="179" y="85">
                    <parameter key="transform_to" value="upper case"/>
                  </operator>
                  <operator activated="true" class="text:filter_stopwords_dictionary" compatibility="8.2.000" expanded="true" height="82" name="Filter Stopwords (Dictionary)" width="90" x="313" y="85">
                    <parameter key="file" value="C:\Users\jphillips\Downloads\sw.txt"/>
                    <parameter key="case_sensitive" value="false"/>
                    <parameter key="encoding" value="UTF-8"/>
                  </operator>
                  <operator activated="true" class="text:filter_by_length" compatibility="8.2.000" expanded="true" height="68" name="Filter Tokens (by Length)" width="90" x="514" y="85">
                    <parameter key="min_chars" value="4"/>
                    <parameter key="max_chars" value="25"/>
                  </operator>
                  <operator activated="true" class="text:generate_n_grams_terms" compatibility="8.2.000" expanded="true" height="68" name="Generate n-Grams (Terms)" width="90" x="715" y="136">
                    <parameter key="max_length" value="3"/>
                  </operator>
                  <connect from_port="document" to_op="Tokenize" to_port="document"/>
                  <connect from_op="Tokenize" from_port="document" to_op="Transform Cases" to_port="document"/>
                  <connect from_op="Transform Cases" from_port="document" to_op="Filter Stopwords (Dictionary)" to_port="document"/>
                  <connect from_op="Filter Stopwords (Dictionary)" from_port="document" to_op="Filter Tokens (by Length)" to_port="document"/>
                  <connect from_op="Filter Tokens (by Length)" from_port="document" to_op="Generate n-Grams (Terms)" to_port="document"/>
                  <connect from_op="Generate n-Grams (Terms)" from_port="document" to_port="document 1"/>
                  <portSpacing port="source_document" spacing="0"/>
                  <portSpacing port="sink_document 1" spacing="0"/>
                  <portSpacing port="sink_document 2" spacing="0"/>
                </process>
              </operator>
              <operator activated="true" class="text:wordlist_to_data" compatibility="8.2.000" expanded="true" height="82" name="WordList to Data" width="90" x="447" y="34"/>
              <operator activated="true" class="multiply" compatibility="9.3.001" expanded="true" height="103" name="Multiply" width="90" x="581" y="34"/>
              <operator activated="true" class="r_scripting:execute_r" compatibility="9.1.000" expanded="true" height="103" name="Execute R" width="90" x="782" y="187">
                <parameter key="script" value="# rm_main is a mandatory function, &#10;# the number of arguments has to be the number of input ports (can be none)&#10;rm_main = function(data)&#10;{&#10;&#9;library(&quot;wordcloud&quot;)&#10;&#9;library(&quot;RColorBrewer&quot;)&#10;&#10;&#9;Encoding(data$word)     &lt;-   &quot;UTF-8&quot;&#10;&#9;&#10;&#9;png(&quot;C:\\CENDOC\\output\\WordClouds\\%{file_name_TEST}_word_list.png&quot;, width=1280,height=800)&#10;&#9;wordcloud(words = data$word, freq = data$total, min.freq = 1, max.words=100, lang= &quot;spanish&quot;, random.order=FALSE, rot.per=0.35, colors=brewer.pal(8, &quot;Dark2&quot;))&#10;&#9;dev.off()&#10;&#9;&#10;&#9;return (data)&#10;}&#10;"/>
              </operator>
              <operator activated="true" class="write_excel" compatibility="9.2.001" expanded="true" height="103" name="Write Excel" width="90" x="782" y="34">
                <parameter key="excel_file" value="C:\CENDOC\output\Excel\%{file_name_TEST}_word_list.xlsx"/>
                <parameter key="file_format" value="xlsx"/>
                <enumeration key="sheet_names"/>
                <parameter key="sheet_name" value="RapidMiner Data"/>
                <parameter key="date_format" value="yyyy-MM-dd HH:mm:ss"/>
                <parameter key="number_format" value="#.0"/>
                <parameter key="encoding" value="SYSTEM"/>
              </operator>
              <connect from_port="file object" to_op="Read Document" to_port="file"/>
              <connect from_op="Read Document" from_port="output" to_op="Process Documents" to_port="documents 1"/>
              <connect from_op="Process Documents" from_port="word list" to_op="WordList to Data" to_port="word list"/>
              <connect from_op="WordList to Data" from_port="example set" to_op="Multiply" to_port="input"/>
              <connect from_op="Multiply" from_port="output 1" to_op="Write Excel" to_port="input"/>
              <connect from_op="Multiply" from_port="output 2" to_op="Execute R" to_port="input 1"/>
              <connect from_op="Execute R" from_port="output 1" to_port="out 2"/>
              <connect from_op="Write Excel" from_port="through" to_port="out 1"/>
              <portSpacing port="source_file object" spacing="0"/>
              <portSpacing port="source_in 1" spacing="0"/>
              <portSpacing port="sink_out 1" spacing="0"/>
              <portSpacing port="sink_out 2" spacing="0"/>
              <portSpacing port="sink_out 3" spacing="0"/>
            </process>
          </operator>
          <operator activated="false" class="append" compatibility="9.3.001" expanded="true" height="68" name="Append" width="90" x="313" y="340">
            <parameter key="datamanagement" value="double_array"/>
            <parameter key="data_management" value="auto"/>
            <parameter key="merge_type" value="all"/>
          </operator>
          <connect from_op="Loop Files" from_port="out 1" to_port="result 1"/>
          <portSpacing port="source_input 1" spacing="0"/>
          <portSpacing port="sink_result 1" spacing="0"/>
          <portSpacing port="sink_result 2" spacing="0"/>
        </process>
      </operator>
    </process>