Index: . =================================================================== --- . (revision 1526991) +++ . (working copy) Property changes on: . ___________________________________________________________________ Modified: svn:ignore ## -6,10 +6,8 ## **/docs logs .settings - lib target - *.ipr - *.iml +*.patch Index: CHANGES.txt =================================================================== --- CHANGES.txt (revision 1526991) +++ CHANGES.txt (working copy) @@ -3,6 +3,7 @@ Release 0.6.3 (unreleased changes) NEW FEATURES + HAMA-804: Create NeuralNetwork Example (Yexi Jiang) HAMA-795: Implement Autoencoder based on NeuralNetwork (Yexi Jiang) HAMA-767: Add vertex addition/removal APIs (Anastasis Andronidis via edwardyoon) @@ -24,6 +25,7 @@ IMPROVEMENTS + HAMA-806: Make the description of NeuralNetwork example more clear (Yexi Jiang) HAMA-749: Build for C++ Pipes (Martin Illecker) HAMA-796: Add Vector multiply Matrix for DoubleVector as well as DenseDoubleVector. (Yexi Jiang) HAMA-770: Use a unified model to represent linear regression, logistic regression, MLP, autoencoder, and deepNets (Yexi Jiang) Index: examples/src/main/java/org/apache/hama/examples/NeuralNetwork.java =================================================================== --- examples/src/main/java/org/apache/hama/examples/NeuralNetwork.java (revision 1526991) +++ examples/src/main/java/org/apache/hama/examples/NeuralNetwork.java (working copy) @@ -34,7 +34,8 @@ import org.apache.hama.ml.math.FunctionFactory; /** - * + * The example of using {@link SmallLayeredNeuralNetwork}, including the + * training phase and labeling phase. */ public class NeuralNetwork { @@ -50,9 +51,9 @@ return; } - String modelPath = args[1]; - String featureDataPath = args[2]; - String resultDataPath = args[3]; + String featureDataPath = args[1]; + String resultDataPath = args[2]; + String modelPath = args[3]; SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork(modelPath); @@ -187,14 +188,14 @@ System.out .println("\tMODE\t- train: train the model with given training data."); System.out - .println("\t\t- evaluate: obtain the result by feeding the features to the neural network."); + .println("\t\t- label: obtain the result by feeding the features to the neural network."); System.out - .println("\tINPUT_PATH\tin 'train' mode, it is the path of the training data; in 'evaluate' mode, it is the path of the to be evaluated data that lacks the label."); + .println("\tINPUT_PATH\tin 'train' mode, it is the path of the training data; in 'label' mode, it is the path of the to be evaluated data that lacks the label."); System.out - .println("\tOUTPUT_PATH\tin 'train' mode, it is where the trained model is stored; in 'evaluate' mode, it is where the labeled data is stored."); + .println("\tOUTPUT_PATH\tin 'train' mode, it is where the trained model is stored; in 'label' mode, it is where the labeled data is stored."); System.out.println("\n\tConditional Parameters:"); System.out - .println("\tMODEL_PATH\tonly required in 'evaluate' mode. It specifies where to load the trained neural network model."); + .println("\tMODEL_PATH\tonly required in 'label' mode. It specifies where to load the trained neural network model."); System.out .println("\tMAX_ITERATION\tonly used in 'train' mode. It specifies how many iterations for the neural network to run. Default is 0.01."); System.out @@ -205,9 +206,9 @@ .println("\tREGULARIZATION_WEIGHT\tonly required in 'train' model. It specifies the weight of reqularization."); System.out.println("\nExample:"); System.out - .println("Train a neural network with default setting:\n\tneuralnets train hdfs://localhost:30002/training_data hdfs://localhost:30002/model 8 1"); + .println("Train a neural network with with feature dimension 8, label dimension 1 and default setting:\n\tneuralnets train hdfs://localhost:30002/training_data hdfs://localhost:30002/model 8 1"); System.out - .println("Train a neural network by specify learning rate as 0.1, momemtum rate as 0.2, and regularization weight as 0.01:\n\tneuralnets.train hdfs://localhost:30002/training_data hdfs://localhost:30002/model 0.1 0.2 0.01"); + .println("Train a neural network with with feature dimension 8, label dimension 1 and specify learning rate as 0.1, momemtum rate as 0.2, and regularization weight as 0.01:\n\tneuralnets.train hdfs://localhost:30002/training_data hdfs://localhost:30002/model 8 1 0.1 0.2 0.01"); System.out .println("Label the data with trained model:\n\tneuralnets evaluate hdfs://localhost:30002/unlabeled_data hdfs://localhost:30002/result hdfs://localhost:30002/model"); } Index: examples/src/test/java/org/apache/hama/examples/NeuralNetworkTest.java =================================================================== --- examples/src/test/java/org/apache/hama/examples/NeuralNetworkTest.java (revision 1526991) +++ examples/src/test/java/org/apache/hama/examples/NeuralNetworkTest.java (working copy) @@ -58,7 +58,7 @@ String mode = "label"; try { NeuralNetwork - .main(new String[] { mode, MODEL_PATH, dataPath, RESULT_PATH }); + .main(new String[] { mode, dataPath, RESULT_PATH, MODEL_PATH }); // compare results with ground-truth BufferedReader groundTruthReader = new BufferedReader(new FileReader(