home | career | drupal | java | mac | mysql | perl | scala | uml | unix  

Lucene example source code file (WikipediaTokenizerTest.java)

This example Lucene source code file (WikipediaTokenizerTest.java) is included in the DevDaily.com "Java Source Code Warehouse" project. The intent of this project is to help you "Learn Java by Example" TM.

Java - Lucene tags/keywords

alphanum, alphanum, category, category, exception, external_link, external_link_url, io, string, string, stringreader, test, this, this, util, wikipediatokenizer

The Lucene WikipediaTokenizerTest.java source code

/**
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */


package org.apache.lucene.analysis.wikipedia;

import java.io.StringReader;
import java.io.IOException;
import java.util.Set;
import java.util.HashSet;

import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;

import static org.apache.lucene.analysis.wikipedia.WikipediaTokenizer.*;

/**
 * Basic Tests for {@link WikipediaTokenizer}
 **/
public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
  protected static final String LINK_PHRASES = "click [[link here again]] click [http://lucene.apache.org here again] [[Category:a b c d]]";

  public void testSimple() throws Exception {
    String text = "This is a [[Category:foo]]";
    WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(text));
    assertTokenStreamContents(tf,
        new String[] { "This", "is", "a", "foo" },
        new int[] { 0, 5, 8, 21 },
        new int[] { 4, 7, 9, 24 },
        new String[] { "<ALPHANUM>", "", "", CATEGORY },
        new int[] { 1, 1, 1, 1, },
        text.length());
  }
  
  public void testHandwritten() throws Exception {
    // make sure all tokens are in only one type
    String test = "[[link]] This is a [[Category:foo]] Category  This is a linked [[:Category:bar none withstanding]] "
        + "Category This is (parens) This is a [[link]]  This is an external URL [http://lucene.apache.org] "
        + "Here is ''italics'' and ''more italics'', '''bold''' and '''''five quotes''''' "
        + " This is a [[link|display info]]  This is a period.  Here is $3.25 and here is 3.50.  Here's Johnny.  "
        + "==heading== ===sub head=== followed by some text  [[Category:blah| ]] "
        + "''[[Category:ital_cat]]''  here is some that is ''italics [[Category:foo]] but is never closed."
        + "'''same [[Category:foo]] goes for this '''''and2 [[Category:foo]] and this"
        + " [http://foo.boo.com/test/test/ Test Test] [http://foo.boo.com/test/test/test.html Test Test]"
        + " [http://foo.boo.com/test/test/test.html?g=b&c=d Test Test] <ref>Citation martian code";
    
    WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(test));
    assertTokenStreamContents(tf, 
      new String[] {"link", "This", "is", "a",
        "foo", "Category", "This", "is", "a", "linked", "bar", "none",
        "withstanding", "Category", "This", "is", "parens", "This", "is", "a",
        "link", "This", "is", "an", "external", "URL",
        "http://lucene.apache.org", "Here", "is", "italics", "and", "more",
        "italics", "bold", "and", "five", "quotes", "This", "is", "a", "link",
        "display", "info", "This", "is", "a", "period", "Here", "is", "3.25",
        "and", "here", "is", "3.50", "Here's", "Johnny", "heading", "sub",
        "head", "followed", "by", "some", "text", "blah", "ital", "cat",
        "here", "is", "some", "that", "is", "italics", "foo", "but", "is",
        "never", "closed", "same", "foo", "goes", "for", "this", "and2", "foo",
        "and", "this", "http://foo.boo.com/test/test/", "Test", "Test",
        "http://foo.boo.com/test/test/test.html", "Test", "Test",
        "http://foo.boo.com/test/test/test.html?g=b&c=d", "Test", "Test",
        "Citation", "martian", "code"}, 
      new String[] {INTERNAL_LINK,
        "<ALPHANUM>", "", "", CATEGORY, "",
        "<ALPHANUM>", "", "", "", CATEGORY,
        CATEGORY, CATEGORY, "<ALPHANUM>", "", "",
        "<ALPHANUM>", "", "", "", INTERNAL_LINK,
        "<ALPHANUM>", "", "", "", "",
        EXTERNAL_LINK_URL, "<ALPHANUM>", "", ITALICS, "",
        ITALICS, ITALICS, BOLD, "<ALPHANUM>", BOLD_ITALICS, BOLD_ITALICS,
        "<ALPHANUM>", "", "", INTERNAL_LINK, INTERNAL_LINK,
        INTERNAL_LINK, "<ALPHANUM>", "", "", "",
        "<ALPHANUM>", "", "", "", "",
        "<ALPHANUM>", "", "", "", HEADING,
        SUB_HEADING, SUB_HEADING, "<ALPHANUM>", "", "",
        "<ALPHANUM>", CATEGORY, CATEGORY, CATEGORY, "", "",
        "<ALPHANUM>", "", "", ITALICS, CATEGORY,
        "<ALPHANUM>", "", "", "", BOLD, CATEGORY,
        "<ALPHANUM>", "", "", BOLD_ITALICS, CATEGORY,
        "<ALPHANUM>", "", EXTERNAL_LINK_URL, EXTERNAL_LINK,
        EXTERNAL_LINK, EXTERNAL_LINK_URL, EXTERNAL_LINK, EXTERNAL_LINK,
        EXTERNAL_LINK_URL, EXTERNAL_LINK, EXTERNAL_LINK, CITATION,
        "<ALPHANUM>", ""});
  }

  public void testLinkPhrases() throws Exception {
    WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(LINK_PHRASES));
    checkLinkPhrases(tf);
  }

  private void checkLinkPhrases(WikipediaTokenizer tf) throws IOException {
    assertTokenStreamContents(tf,
        new String[] { "click", "link", "here", "again", "click", 
        "http://lucene.apache.org", "here", "again", "a", "b", "c", "d" },
        new int[] { 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1 });
  }

  public void testLinks() throws Exception {
    String test = "[http://lucene.apache.org/java/docs/index.html#news here] [http://lucene.apache.org/java/docs/index.html?b=c here] [https://lucene.apache.org/java/docs/index.html?b=c here]";
    WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(test));
    assertTokenStreamContents(tf,
        new String[] { "http://lucene.apache.org/java/docs/index.html#news", "here",
          "http://lucene.apache.org/java/docs/index.html?b=c", "here",
          "https://lucene.apache.org/java/docs/index.html?b=c", "here" },
        new String[] { EXTERNAL_LINK_URL, EXTERNAL_LINK,
          EXTERNAL_LINK_URL, EXTERNAL_LINK,
          EXTERNAL_LINK_URL, EXTERNAL_LINK, });
  }

  public void testLucene1133() throws Exception {
    Set<String> untoks = new HashSet();
    untoks.add(WikipediaTokenizer.CATEGORY);
    untoks.add(WikipediaTokenizer.ITALICS);
    //should be exactly the same, regardless of untoks
    WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(LINK_PHRASES), WikipediaTokenizer.TOKENS_ONLY, untoks);
    checkLinkPhrases(tf);
    String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h   i   j]]";
    tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.UNTOKENIZED_ONLY, untoks);
    assertTokenStreamContents(tf,
        new String[] { "a b c d", "e f g", "link", "here", "link",
          "there", "italics here", "something", "more italics", "h   i   j" },
        new int[] { 11, 32, 42, 47, 56, 61, 71, 86, 98, 124 },
        new int[] { 18, 37, 46, 51, 60, 66, 83, 95, 110, 133 },
        new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }
       );
  }

  public void testBoth() throws Exception {
    Set<String> untoks = new HashSet();
    untoks.add(WikipediaTokenizer.CATEGORY);
    untoks.add(WikipediaTokenizer.ITALICS);
    String test = "[[Category:a b c d]] [[Category:e f g]] [[link here]] [[link there]] ''italics here'' something ''more italics'' [[Category:h   i   j]]";
    //should output all the indivual tokens plus the untokenized tokens as well.  Untokenized tokens
    WikipediaTokenizer tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.BOTH, untoks);
    assertTokenStreamContents(tf,
        new String[] { "a b c d", "a", "b", "c", "d", "e f g", "e", "f", "g",
          "link", "here", "link", "there", "italics here", "italics", "here",
          "something", "more italics", "more", "italics", "h   i   j", "h", "i", "j" },
        new int[] { 11, 11, 13, 15, 17, 32, 32, 34, 36, 42, 47, 56, 61, 71, 71, 79, 86, 98,  98,  103, 124, 124, 128, 132 },
        new int[] { 18, 12, 14, 16, 18, 37, 33, 35, 37, 46, 51, 60, 66, 83, 78, 83, 95, 110, 102, 110, 133, 125, 129, 133 },
        new int[] { 1,  0,  1,  1,  1,  1,  0,  1,  1,  1,  1,  1,  1,  1,  0,  1,  1,  1,   0,   1,   1,   0,   1,   1 }
       );
    
    // now check the flags, TODO: add way to check flags from BaseTokenStreamTestCase?
    tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.BOTH, untoks);
    int expectedFlags[] = new int[] { UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0, 
        0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0 };
    FlagsAttribute flagsAtt = tf.addAttribute(FlagsAttribute.class);
    tf.reset();
    for (int i = 0; i < expectedFlags.length; i++) {
      assertTrue(tf.incrementToken());
      assertEquals("flags " + i, expectedFlags[i], flagsAtt.getFlags());
    }
    assertFalse(tf.incrementToken());
    tf.close();
  }
}

Other Lucene examples (source code examples)

Here is a short list of links related to this Lucene WikipediaTokenizerTest.java source code file:



my book on functional programming

 

new blog posts

 

Copyright 1998-2019 Alvin Alexander, alvinalexander.com
All Rights Reserved.

A percentage of advertising revenue from
pages under the /java/jwarehouse URI on this website is
paid back to open source projects.