热门IT资讯网

mahout 0.8入门

发表于:2024-11-25 作者:热门IT资讯网编辑
编辑最后更新 2024年11月25日,mahout-distribution-0.8命令行对应哪个类可以查看源码配置文件driver.classes.default.propsmahout的APIhttps://builds.apache


mahout-distribution-0.8

命令行对应哪个类可以查看源码配置文件

driver.classes.default.props


mahout的API

https://builds.apache.org/job/Mahout-Quality/javadoc/



mahout实战参考博客:

http://itindex.net/detail/45259-mahout-%E7%94%B5%E5%BD%B1-%E6%8E%A8%E8%8D%90%E7%B3%BB%E7%BB%9F


聚类算法


kmeans:无法消除离群点的影响


canopy:两个阈值t1和t2,且t1>t2,简单快速不太准确,可以消除离群点的影响,一般用来决定聚类中心数目k


Canopy聚类算法
http://my.oschina.net/liangtee/blog/125407

mahout canopy算法实战

http://blog.csdn.net/xyilu/article/details/9631677




分类Bayes(训练集,基于概率的)、文本分类算法(监督学习)


朴素贝叶斯分类器两种模型:

  1. 多项式模型,以单词打标签,粒度不一样

  2. 伯努利模型,以文档打标签


用于新闻分类:体育、娱乐

mahout中提供了一种将指定文件下的文件转换成sequenceFile的方式。

mahout seqdirectory --input /hive/hadoopuser/ --output /mahout/seq/ --charset UTF-8


二进制文件转换为向量


mahout seq2sparse


完成朴素贝叶斯分类(中文分词)


f.dataguru.cn/thread-244375-1-1.html


http://www.cnblogs.com/panweishadow/p/4320720.html



低版本中还是老的贝叶斯testclassifier




0.11已经是新贝叶斯

#Classification
#new bayes
org.apache.mahout.classifier.naivebayes.training.TrainNaiveBayesJob = trainnb : Train the Vector-based Bayes classifier
org.apache.mahout.classifier.naivebayes.test.TestNaiveBayesDriver = testnb : Test the Vector-based Bayes classifier



cbayes=ComplementaryNaiveBayes

TestNaiveBayesDriver源码

/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements.  See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.mahout.classifier.naivebayes.test;import java.io.IOException;import java.util.List;import java.util.Map;import java.util.regex.Pattern;import com.google.common.base.Preconditions;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.SequenceFile;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;import org.apache.hadoop.util.ToolRunner;import org.apache.mahout.classifier.ClassifierResult;import org.apache.mahout.classifier.ResultAnalyzer;import org.apache.mahout.classifier.naivebayes.AbstractNaiveBayesClassifier;import org.apache.mahout.classifier.naivebayes.BayesUtils;import org.apache.mahout.classifier.naivebayes.ComplementaryNaiveBayesClassifier;import org.apache.mahout.classifier.naivebayes.NaiveBayesModel;import org.apache.mahout.classifier.naivebayes.StandardNaiveBayesClassifier;import org.apache.mahout.common.AbstractJob;import org.apache.mahout.common.HadoopUtil;import org.apache.mahout.common.Pair;import org.apache.mahout.common.commandline.DefaultOptionCreator;import org.apache.mahout.common.iterator.sequencefile.PathFilters;import org.apache.mahout.common.iterator.sequencefile.PathType;import org.apache.mahout.common.iterator.sequencefile.SequenceFileDirIterable;import org.apache.mahout.math.Vector;import org.apache.mahout.math.VectorWritable;import org.slf4j.Logger;import org.slf4j.LoggerFactory;/** * Test the (Complementary) Naive Bayes model that was built during training * by running the iterating the test set and comparing it to the model */public class TestNaiveBayesDriver extends AbstractJob {  private static final Logger log = LoggerFactory.getLogger(TestNaiveBayesDriver.class);  public static final String COMPLEMENTARY = "class"; //b for bayes, c for complementary  private static final Pattern SLASH = Pattern.compile("/");  public static void main(String[] args) throws Exception {    ToolRunner.run(new Configuration(), new TestNaiveBayesDriver(), args);  }  @Override  public int run(String[] args) throws Exception {    addInputOption();    addOutputOption();    addOption(addOption(DefaultOptionCreator.overwriteOption().create()));    addOption("model", "m", "The path to the model built during training", true);    addOption(buildOption("testComplementary", "c", "test complementary?", false, false, String.valueOf(false)));    addOption(buildOption("runSequential", "seq", "run sequential?", false, false, String.valueOf(false)));    addOption("labelIndex", "l", "The path to the location of the label index", true);    Map> parsedArgs = parseArguments(args);    if (parsedArgs == null) {      return -1;    }    if (hasOption(DefaultOptionCreator.OVERWRITE_OPTION)) {      HadoopUtil.delete(getConf(), getOutputPath());    }    boolean sequential = hasOption("runSequential");    boolean succeeded;    if (sequential) {       runSequential();    } else {      succeeded = runMapReduce();      if (!succeeded) {        return -1;      }    }    //load the labels    Map labelMap = BayesUtils.readLabelIndex(getConf(), new Path(getOption("labelIndex")));    //loop over the results and create the confusion matrix    SequenceFileDirIterable dirIterable =        new SequenceFileDirIterable<>(getOutputPath(), PathType.LIST, PathFilters.partFilter(), getConf());    ResultAnalyzer analyzer = new ResultAnalyzer(labelMap.values(), "DEFAULT");    analyzeResults(labelMap, dirIterable, analyzer);    log.info("{} Results: {}", hasOption("testComplementary") ? "Complementary" : "Standard NB", analyzer);    return 0;  }  private void runSequential() throws IOException {    boolean complementary = hasOption("testComplementary");    FileSystem fs = FileSystem.get(getConf());    NaiveBayesModel model = NaiveBayesModel.materialize(new Path(getOption("model")), getConf());        // Ensure that if we are testing in complementary mode, the model has been    // trained complementary. a complementarty model will work for standard classification    // a standard model will not work for complementary classification    if (complementary){        Preconditions.checkArgument((model.isComplemtary()),            "Complementary mode in model is different from test mode");    }        AbstractNaiveBayesClassifier classifier;    if (complementary) {      classifier = new ComplementaryNaiveBayesClassifier(model);    } else {      classifier = new StandardNaiveBayesClassifier(model);    }    try (SequenceFile.Writer writer =             SequenceFile.createWriter(fs, getConf(), new Path(getOutputPath(), "part-r-00000"),                 Text.class, VectorWritable.class)) {      SequenceFileDirIterable dirIterable =          new SequenceFileDirIterable<>(getInputPath(), PathType.LIST, PathFilters.partFilter(), getConf());      // loop through the part-r-* files in getInputPath() and get classification scores for all entries      for (Pair pair : dirIterable) {        writer.append(new Text(SLASH.split(pair.getFirst().toString())[1]),            new VectorWritable(classifier.classifyFull(pair.getSecond().get())));      }    }  }  private boolean runMapReduce() throws IOException,      InterruptedException, ClassNotFoundException {    Path model = new Path(getOption("model"));    HadoopUtil.cacheFiles(model, getConf());    //the output key is the expected value, the output value are the scores for all the labels    Job testJob = prepareJob(getInputPath(), getOutputPath(), SequenceFileInputFormat.class, BayesTestMapper.class,        Text.class, VectorWritable.class, SequenceFileOutputFormat.class);    //testJob.getConfiguration().set(LABEL_KEY, getOption("--labels"));    boolean complementary = hasOption("testComplementary");    testJob.getConfiguration().set(COMPLEMENTARY, String.valueOf(complementary));    return testJob.waitForCompletion(true);  }  private static void analyzeResults(Map labelMap,                                     SequenceFileDirIterable dirIterable,                                     ResultAnalyzer analyzer) {    for (Pair pair : dirIterable) {      int bestIdx = Integer.MIN_VALUE;      double bestScore = Long.MIN_VALUE;      for (Vector.Element element : pair.getSecond().get().all()) {        if (element.get() > bestScore) {          bestScore = element.get();          bestIdx = element.index();        }      }      if (bestIdx != Integer.MIN_VALUE) {        ClassifierResult classifierResult = new ClassifierResult(labelMap.get(bestIdx), bestScore);        analyzer.addInstance(pair.getFirst().toString(), classifierResult);      }    }  }}



BayesTestMapper源码

/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements.  See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.mahout.classifier.naivebayes.test;import com.google.common.base.Preconditions;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Mapper;import org.apache.mahout.classifier.naivebayes.AbstractNaiveBayesClassifier;import org.apache.mahout.classifier.naivebayes.ComplementaryNaiveBayesClassifier;import org.apache.mahout.classifier.naivebayes.NaiveBayesModel;import org.apache.mahout.classifier.naivebayes.StandardNaiveBayesClassifier;import org.apache.mahout.common.HadoopUtil;import org.apache.mahout.math.Vector;import org.apache.mahout.math.VectorWritable;import java.io.IOException;import java.util.regex.Pattern;/** * Run the input through the model and see if it matches. * 

* The output value is the generated label, the Pair is the expected label and true if they match: */public class BayesTestMapper extends Mapper { private static final Pattern SLASH = Pattern.compile("/"); private AbstractNaiveBayesClassifier classifier; @Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); Configuration conf = context.getConfiguration(); Path modelPath = HadoopUtil.getSingleCachedFile(conf); NaiveBayesModel model = NaiveBayesModel.materialize(modelPath, conf); boolean isComplementary = Boolean.parseBoolean(conf.get(TestNaiveBayesDriver.COMPLEMENTARY)); // ensure that if we are testing in complementary mode, the model has been // trained complementary. a complementarty model will work for standard classification // a standard model will not work for complementary classification if (isComplementary) { Preconditions.checkArgument((model.isComplemtary()), "Complementary mode in model is different than test mode"); } if (isComplementary) { classifier = new ComplementaryNaiveBayesClassifier(model); } else { classifier = new StandardNaiveBayesClassifier(model); } } @Override protected void map(Text key, VectorWritable value, Context context) throws IOException, InterruptedException { Vector result = classifier.classifyFull(value.get()); //the key is the expected value context.write(new Text(SLASH.split(key.toString())[1]), new VectorWritable(result)); }}


0