[SOLVED] Process failed error at NN operator
Klohtun
New Altair Community Member
Hello.
I'm new here (forum and rapidminer both). Sorry, my English isn't good.
I have "process failed" message when I try to run the following process:
I want the parametres of neural network are exactly the same as in my process (to change NN's parametres is not an option).
Thanks in advance to everyone who will answer.
I'm new here (forum and rapidminer both). Sorry, my English isn't good.
I have "process failed" message when I try to run the following process:
<?xml version="1.0" encoding="UTF-8" standalone="no"?>Could anyone help to solve this problem or just explain to me why this error appears?
<process version="5.3.015">
<context>
<input/>
<output/>
<macros/>
</context>
<operator activated="true" class="process" compatibility="5.3.015" expanded="true" name="Process">
<process expanded="true">
<operator activated="true" class="subprocess" compatibility="5.3.015" expanded="true" height="76" name="Gen. Data" width="90" x="45" y="30">
<process expanded="true">
<operator activated="true" class="generate_data" compatibility="5.3.015" expanded="true" height="60" name="Generate Data" width="90" x="45" y="30">
<parameter key="number_examples" value="30"/>
<parameter key="number_of_attributes" value="1"/>
</operator>
<operator activated="true" class="rename" compatibility="5.3.015" expanded="true" height="76" name="Rename" width="90" x="180" y="30">
<parameter key="old_name" value="att1"/>
<parameter key="new_name" value="X"/>
<list key="rename_additional_attributes"/>
</operator>
<operator activated="true" class="generate_attributes" compatibility="5.3.015" expanded="true" height="76" name="Generate Attributes" width="90" x="315" y="30">
<list key="function_descriptions">
<parameter key="Y" value="50*sin(X)"/>
</list>
</operator>
<operator activated="true" class="exchange_roles" compatibility="5.3.015" expanded="true" height="76" name="Exchange Roles" width="90" x="447" y="30">
<parameter key="first_attribute" value="label"/>
<parameter key="second_attribute" value="Y"/>
</operator>
<operator activated="true" class="select_attributes" compatibility="5.3.015" expanded="true" height="76" name="Select Attributes" width="90" x="581" y="30">
<parameter key="attribute_filter_type" value="subset"/>
<parameter key="attributes" value="|Y|X"/>
</operator>
<connect from_op="Generate Data" from_port="output" to_op="Rename" to_port="example set input"/>
<connect from_op="Rename" from_port="example set output" to_op="Generate Attributes" to_port="example set input"/>
<connect from_op="Generate Attributes" from_port="example set output" to_op="Exchange Roles" to_port="example set input"/>
<connect from_op="Exchange Roles" from_port="example set output" to_op="Select Attributes" to_port="example set input"/>
<connect from_op="Select Attributes" from_port="example set output" to_port="out 1"/>
<portSpacing port="source_in 1" spacing="0"/>
<portSpacing port="sink_out 1" spacing="0"/>
<portSpacing port="sink_out 2" spacing="0"/>
</process>
</operator>
<operator activated="true" class="multiply" compatibility="5.3.015" expanded="true" height="94" name="Multiply" width="90" x="179" y="30"/>
<operator activated="false" class="optimize_parameters_grid" compatibility="5.3.015" expanded="true" height="130" name="Optimize Parameters (Grid)" width="90" x="313" y="30">
<list key="parameters">
<parameter key="Neural Net.learning_rate" value="[0.0001;1.0;5;linear]"/>
<parameter key="Neural Net.momentum" value="[0;1.0;5;linear]"/>
<parameter key="Neural Net.decay" value="true,false"/>
<parameter key="Neural Net.shuffle" value="true,false"/>
<parameter key="Neural Net.normalize" value="true,false"/>
</list>
<process expanded="true">
<operator activated="true" class="neural_net" compatibility="5.3.015" expanded="true" height="76" name="Neural Net" width="90" x="112" y="30">
<list key="hidden_layers">
<parameter key="1" value="10"/>
</list>
<parameter key="training_cycles" value="1000"/>
<parameter key="learning_rate" value="0.60004"/>
<parameter key="momentum" value="0.0"/>
<parameter key="normalize" value="false"/>
<parameter key="error_epsilon" value="1.0E-6"/>
</operator>
<operator activated="true" class="apply_model" compatibility="5.3.015" expanded="true" height="76" name="Apply Model" width="90" x="246" y="30">
<list key="application_parameters"/>
</operator>
<operator activated="true" class="performance_regression" compatibility="5.3.015" expanded="true" height="76" name="Performance" width="90" x="380" y="30"/>
<connect from_port="input 1" to_op="Neural Net" to_port="training set"/>
<connect from_op="Neural Net" from_port="model" to_op="Apply Model" to_port="model"/>
<connect from_op="Neural Net" from_port="exampleSet" to_op="Apply Model" to_port="unlabelled data"/>
<connect from_op="Apply Model" from_port="labelled data" to_op="Performance" to_port="labelled data"/>
<connect from_op="Apply Model" from_port="model" to_port="result 2"/>
<connect from_op="Performance" from_port="performance" to_port="performance"/>
<connect from_op="Performance" from_port="example set" to_port="result 1"/>
<portSpacing port="source_input 1" spacing="0"/>
<portSpacing port="source_input 2" spacing="0"/>
<portSpacing port="sink_performance" spacing="0"/>
<portSpacing port="sink_result 1" spacing="0"/>
<portSpacing port="sink_result 2" spacing="0"/>
<portSpacing port="sink_result 3" spacing="0"/>
</process>
</operator>
<operator activated="true" class="neural_net" compatibility="5.3.015" expanded="true" height="76" name="Neural Net (2)" width="90" x="313" y="210">
<list key="hidden_layers">
<parameter key="1" value="10"/>
</list>
<parameter key="training_cycles" value="1000"/>
<parameter key="learning_rate" value="0.386"/>
<parameter key="momentum" value="0.0"/>
<parameter key="normalize" value="false"/>
<parameter key="error_epsilon" value="1.0E-6"/>
</operator>
<operator activated="true" class="apply_model" compatibility="5.3.015" expanded="true" height="76" name="Apply Model (2)" width="90" x="447" y="210">
<list key="application_parameters"/>
</operator>
<operator activated="true" class="performance_regression" compatibility="5.3.015" expanded="true" height="76" name="Performance (2)" width="90" x="581" y="210"/>
<connect from_op="Gen. Data" from_port="out 1" to_op="Multiply" to_port="input"/>
<connect from_op="Multiply" from_port="output 1" to_port="result 1"/>
<connect from_op="Multiply" from_port="output 2" to_op="Neural Net (2)" to_port="training set"/>
<connect from_op="Neural Net (2)" from_port="model" to_op="Apply Model (2)" to_port="model"/>
<connect from_op="Neural Net (2)" from_port="exampleSet" to_op="Apply Model (2)" to_port="unlabelled data"/>
<connect from_op="Apply Model (2)" from_port="labelled data" to_op="Performance (2)" to_port="labelled data"/>
<connect from_op="Apply Model (2)" from_port="model" to_port="result 3"/>
<connect from_op="Performance (2)" from_port="performance" to_port="result 2"/>
<connect from_op="Performance (2)" from_port="example set" to_port="result 4"/>
<portSpacing port="source_input 1" spacing="0"/>
<portSpacing port="sink_result 1" spacing="0"/>
<portSpacing port="sink_result 2" spacing="162"/>
<portSpacing port="sink_result 3" spacing="0"/>
<portSpacing port="sink_result 4" spacing="0"/>
<portSpacing port="sink_result 5" spacing="0"/>
</process>
</operator>
</process>
I want the parametres of neural network are exactly the same as in my process (to change NN's parametres is not an option).
Thanks in advance to everyone who will answer.
0
Answers
-
Hi,
I can't explain the error to you, but (sorry for that) I can propose to activate the "normalize" parameter in the Neural Net. That can work around some numerical problems that obviously occur with your data set.
Best regards,
Marius0 -
It seems that the error is due to a very heavy weights in nodes of NN. These extremely heavy weights lead to a crash with an error when the output of NN is greater than the allowable size of the numbers (it's the "double" as I see). Although, I think the program should report the type and cause of error. That's not simple to understand what goes wrong when it's not.
By the way, if the problem is heavy weights and divergent process, it's not an error, it is one of the possible behaviors of the normal model. I think the program should know how to interpret this situation without any errors stopping the computation.0