Exception when indexing text documents with Lucene, using SnowballAnalyzer for cleaning up
- by Julia
Hello!!!
I am indexing the documents with Lucene and am trying to apply the SnowballAnalyzer for punctuation and stopword removal from text .. I keep getting the following error :(
IllegalAccessError: tried to access method org.apache.lucene.analysis.Tokenizer.(Ljava/io/Reader;)V from class org.apache.lucene.analysis.snowball.SnowballAnalyzer
Here is the code, I would very much appreciate help!!!! I am new with this..
public class Indexer {
private Indexer(){};
private String[] stopWords = {....};
private String indexName;
private IndexWriter iWriter;
private static String FILES_TO_INDEX = "/Users/ssi/forindexing";
public static void main(String[] args) throws Exception {
Indexer m = new Indexer();
m.index("./newindex");
}
public void index(String indexName) throws Exception {
this.indexName = indexName;
final File docDir = new File(FILES_TO_INDEX);
if(!docDir.exists() || !docDir.canRead()){
System.err.println("Something wrong... " + docDir.getPath());
System.exit(1);
}
Date start = new Date();
PerFieldAnalyzerWrapper analyzers = new PerFieldAnalyzerWrapper(new SimpleAnalyzer());
analyzers.addAnalyzer("text", new SnowballAnalyzer("English", stopWords));
Directory directory = FSDirectory.open(new File(this.indexName));
IndexWriter.MaxFieldLength maxLength = IndexWriter.MaxFieldLength.UNLIMITED;
iWriter = new IndexWriter(directory, analyzers, true, maxLength);
System.out.println("Indexing to dir..........." + indexName);
if(docDir.isDirectory()){
File[] files = docDir.listFiles();
if(files != null){
for (int i = 0; i < files.length; i++) {
try {
indexDocument(files[i]);
}catch (FileNotFoundException fnfe){
fnfe.printStackTrace();
}
}
}
}
System.out.println("Optimizing...... ");
iWriter.optimize();
iWriter.close();
Date end = new Date();
System.out.println("Time to index was" + (end.getTime()-start.getTime()) + "miliseconds");
}
private void indexDocument(File someDoc) throws IOException {
Document doc = new Document();
Field name = new Field("name", someDoc.getName(), Field.Store.YES, Field.Index.ANALYZED);
Field text = new Field("text", new FileReader(someDoc), Field.TermVector.WITH_POSITIONS_OFFSETS);
doc.add(name);
doc.add(text);
iWriter.addDocument(doc);
}
}