• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Licensed to the Apache Software Foundation (ASF) under one or more
3  * contributor license agreements.  See the NOTICE file distributed with
4  * this work for additional information regarding copyright ownership.
5  * The ASF licenses this file to You under the Apache License, Version 2.0
6  * (the "License"); you may not use this file except in compliance with
7  * the License.  You may obtain a copy of the License at
8  *
9  *      http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  */
17 
18 package org.apache.commons.lang3.text;
19 
20 import static org.junit.jupiter.api.Assertions.assertEquals;
21 import static org.junit.jupiter.api.Assertions.assertFalse;
22 import static org.junit.jupiter.api.Assertions.assertNotSame;
23 import static org.junit.jupiter.api.Assertions.assertNull;
24 import static org.junit.jupiter.api.Assertions.assertThrows;
25 import static org.junit.jupiter.api.Assertions.assertTrue;
26 
27 import java.util.Arrays;
28 import java.util.Collections;
29 import java.util.List;
30 import java.util.NoSuchElementException;
31 
32 import org.apache.commons.lang3.AbstractLangTest;
33 import org.apache.commons.lang3.ArrayUtils;
34 import org.junit.jupiter.api.Test;
35 
36 /**
37  * Unit test for Tokenizer.
38  */
39 @Deprecated
40 public class StrTokenizerTest extends AbstractLangTest {
41 
42     private static final String CSV_SIMPLE_FIXTURE = "A,b,c";
43 
44     private static final String TSV_SIMPLE_FIXTURE = "A\tb\tc";
45 
checkClone(final StrTokenizer tokenizer)46     private void checkClone(final StrTokenizer tokenizer) {
47         assertNotSame(StrTokenizer.getCSVInstance(), tokenizer);
48         assertNotSame(StrTokenizer.getTSVInstance(), tokenizer);
49     }
50 
51     @Test
test1()52     public void test1() {
53 
54         final String input = "a;b;c;\"d;\"\"e\";f; ; ;  ";
55         final StrTokenizer tok = new StrTokenizer(input);
56         tok.setDelimiterChar(';');
57         tok.setQuoteChar('"');
58         tok.setIgnoredMatcher(StrMatcher.trimMatcher());
59         tok.setIgnoreEmptyTokens(false);
60         final String[] tokens = tok.getTokenArray();
61 
62         final String[] expected = {"a", "b", "c", "d;\"e", "f", "", "", ""};
63 
64         assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
65         for (int i = 0; i < expected.length; i++) {
66             assertEquals(expected[i], tokens[i],
67                     "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
68         }
69 
70     }
71 
72     @Test
test2()73     public void test2() {
74 
75         final String input = "a;b;c ;\"d;\"\"e\";f; ; ;";
76         final StrTokenizer tok = new StrTokenizer(input);
77         tok.setDelimiterChar(';');
78         tok.setQuoteChar('"');
79         tok.setIgnoredMatcher(StrMatcher.noneMatcher());
80         tok.setIgnoreEmptyTokens(false);
81         final String[] tokens = tok.getTokenArray();
82 
83         final String[] expected = {"a", "b", "c ", "d;\"e", "f", " ", " ", ""};
84 
85         assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
86         for (int i = 0; i < expected.length; i++) {
87             assertEquals(expected[i], tokens[i],
88                     "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
89         }
90 
91     }
92 
93     @Test
test3()94     public void test3() {
95 
96         final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
97         final StrTokenizer tok = new StrTokenizer(input);
98         tok.setDelimiterChar(';');
99         tok.setQuoteChar('"');
100         tok.setIgnoredMatcher(StrMatcher.noneMatcher());
101         tok.setIgnoreEmptyTokens(false);
102         final String[] tokens = tok.getTokenArray();
103 
104         final String[] expected = {"a", "b", " c", "d;\"e", "f", " ", " ", ""};
105 
106         assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
107         for (int i = 0; i < expected.length; i++) {
108             assertEquals(expected[i], tokens[i],
109                     "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
110         }
111 
112     }
113 
114     @Test
test4()115     public void test4() {
116 
117         final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
118         final StrTokenizer tok = new StrTokenizer(input);
119         tok.setDelimiterChar(';');
120         tok.setQuoteChar('"');
121         tok.setIgnoredMatcher(StrMatcher.trimMatcher());
122         tok.setIgnoreEmptyTokens(true);
123         final String[] tokens = tok.getTokenArray();
124 
125         final String[] expected = {"a", "b", "c", "d;\"e", "f"};
126 
127         assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
128         for (int i = 0; i < expected.length; i++) {
129             assertEquals(expected[i], tokens[i],
130                     "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
131         }
132 
133     }
134 
135     @Test
test5()136     public void test5() {
137 
138         final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
139         final StrTokenizer tok = new StrTokenizer(input);
140         tok.setDelimiterChar(';');
141         tok.setQuoteChar('"');
142         tok.setIgnoredMatcher(StrMatcher.trimMatcher());
143         tok.setIgnoreEmptyTokens(false);
144         tok.setEmptyTokenAsNull(true);
145         final String[] tokens = tok.getTokenArray();
146 
147         final String[] expected = {"a", "b", "c", "d;\"e", "f", null, null, null};
148 
149         assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
150         for (int i = 0; i < expected.length; i++) {
151             assertEquals(expected[i], tokens[i],
152                     "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
153         }
154 
155     }
156 
157     @Test
test6()158     public void test6() {
159 
160         final String input = "a;b; c;\"d;\"\"e\";f; ; ;";
161         final StrTokenizer tok = new StrTokenizer(input);
162         tok.setDelimiterChar(';');
163         tok.setQuoteChar('"');
164         tok.setIgnoredMatcher(StrMatcher.trimMatcher());
165         tok.setIgnoreEmptyTokens(false);
166         // tok.setTreatingEmptyAsNull(true);
167         final String[] tokens = tok.getTokenArray();
168 
169         final String[] expected = {"a", "b", " c", "d;\"e", "f", null, null, null};
170 
171         int nextCount = 0;
172         while (tok.hasNext()) {
173             tok.next();
174             nextCount++;
175         }
176 
177         int prevCount = 0;
178         while (tok.hasPrevious()) {
179             tok.previous();
180             prevCount++;
181         }
182 
183         assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
184 
185         assertEquals(nextCount, expected.length, "could not cycle through entire token list" + " using the 'hasNext' and 'next' methods");
186 
187         assertEquals(prevCount, expected.length, "could not cycle through entire token list" + " using the 'hasPrevious' and 'previous' methods");
188 
189     }
190 
191     @Test
test7()192     public void test7() {
193 
194         final String input = "a   b c \"d e\" f ";
195         final StrTokenizer tok = new StrTokenizer(input);
196         tok.setDelimiterMatcher(StrMatcher.spaceMatcher());
197         tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
198         tok.setIgnoredMatcher(StrMatcher.noneMatcher());
199         tok.setIgnoreEmptyTokens(false);
200         final String[] tokens = tok.getTokenArray();
201 
202         final String[] expected = {"a", "", "", "b", "c", "d e", "f", ""};
203 
204         assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
205         for (int i = 0; i < expected.length; i++) {
206             assertEquals(expected[i], tokens[i],
207                     "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
208         }
209 
210     }
211 
212     @Test
test8()213     public void test8() {
214 
215         final String input = "a   b c \"d e\" f ";
216         final StrTokenizer tok = new StrTokenizer(input);
217         tok.setDelimiterMatcher(StrMatcher.spaceMatcher());
218         tok.setQuoteMatcher(StrMatcher.doubleQuoteMatcher());
219         tok.setIgnoredMatcher(StrMatcher.noneMatcher());
220         tok.setIgnoreEmptyTokens(true);
221         final String[] tokens = tok.getTokenArray();
222 
223         final String[] expected = {"a", "b", "c", "d e", "f"};
224 
225         assertEquals(expected.length, tokens.length, ArrayUtils.toString(tokens));
226         for (int i = 0; i < expected.length; i++) {
227             assertEquals(expected[i], tokens[i],
228                     "token[" + i + "] was '" + tokens[i] + "' but was expected to be '" + expected[i] + "'");
229         }
230 
231     }
232 
233     @Test
testBasic1()234     public void testBasic1() {
235         final String input = "a  b c";
236         final StrTokenizer tok = new StrTokenizer(input);
237         assertEquals("a", tok.next());
238         assertEquals("b", tok.next());
239         assertEquals("c", tok.next());
240         assertFalse(tok.hasNext());
241     }
242 
243     @Test
testBasic2()244     public void testBasic2() {
245         final String input = "a \nb\fc";
246         final StrTokenizer tok = new StrTokenizer(input);
247         assertEquals("a", tok.next());
248         assertEquals("b", tok.next());
249         assertEquals("c", tok.next());
250         assertFalse(tok.hasNext());
251     }
252 
253     @Test
testBasic3()254     public void testBasic3() {
255         final String input = "a \nb\u0001\fc";
256         final StrTokenizer tok = new StrTokenizer(input);
257         assertEquals("a", tok.next());
258         assertEquals("b\u0001", tok.next());
259         assertEquals("c", tok.next());
260         assertFalse(tok.hasNext());
261     }
262 
263     @Test
testBasic4()264     public void testBasic4() {
265         final String input = "a \"b\" c";
266         final StrTokenizer tok = new StrTokenizer(input);
267         assertEquals("a", tok.next());
268         assertEquals("\"b\"", tok.next());
269         assertEquals("c", tok.next());
270         assertFalse(tok.hasNext());
271     }
272 
273     @Test
testBasic5()274     public void testBasic5() {
275         final String input = "a:b':c";
276         final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
277         assertEquals("a", tok.next());
278         assertEquals("b'", tok.next());
279         assertEquals("c", tok.next());
280         assertFalse(tok.hasNext());
281     }
282 
283     @Test
testBasicDelim1()284     public void testBasicDelim1() {
285         final String input = "a:b:c";
286         final StrTokenizer tok = new StrTokenizer(input, ':');
287         assertEquals("a", tok.next());
288         assertEquals("b", tok.next());
289         assertEquals("c", tok.next());
290         assertFalse(tok.hasNext());
291     }
292 
293     @Test
testBasicDelim2()294     public void testBasicDelim2() {
295         final String input = "a:b:c";
296         final StrTokenizer tok = new StrTokenizer(input, ',');
297         assertEquals("a:b:c", tok.next());
298         assertFalse(tok.hasNext());
299     }
300 
301     @Test
testBasicEmpty1()302     public void testBasicEmpty1() {
303         final String input = "a  b c";
304         final StrTokenizer tok = new StrTokenizer(input);
305         tok.setIgnoreEmptyTokens(false);
306         assertEquals("a", tok.next());
307         assertEquals("", tok.next());
308         assertEquals("b", tok.next());
309         assertEquals("c", tok.next());
310         assertFalse(tok.hasNext());
311     }
312 
313     @Test
testBasicEmpty2()314     public void testBasicEmpty2() {
315         final String input = "a  b c";
316         final StrTokenizer tok = new StrTokenizer(input);
317         tok.setIgnoreEmptyTokens(false);
318         tok.setEmptyTokenAsNull(true);
319         assertEquals("a", tok.next());
320         assertNull(tok.next());
321         assertEquals("b", tok.next());
322         assertEquals("c", tok.next());
323         assertFalse(tok.hasNext());
324     }
325 
326     @Test
testBasicQuoted1()327     public void testBasicQuoted1() {
328         final String input = "a 'b' c";
329         final StrTokenizer tok = new StrTokenizer(input, ' ', '\'');
330         assertEquals("a", tok.next());
331         assertEquals("b", tok.next());
332         assertEquals("c", tok.next());
333         assertFalse(tok.hasNext());
334     }
335 
336     @Test
testBasicQuoted2()337     public void testBasicQuoted2() {
338         final String input = "a:'b':";
339         final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
340         tok.setIgnoreEmptyTokens(false);
341         tok.setEmptyTokenAsNull(true);
342         assertEquals("a", tok.next());
343         assertEquals("b", tok.next());
344         assertNull(tok.next());
345         assertFalse(tok.hasNext());
346     }
347 
348     @Test
testBasicQuoted3()349     public void testBasicQuoted3() {
350         final String input = "a:'b''c'";
351         final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
352         tok.setIgnoreEmptyTokens(false);
353         tok.setEmptyTokenAsNull(true);
354         assertEquals("a", tok.next());
355         assertEquals("b'c", tok.next());
356         assertFalse(tok.hasNext());
357     }
358 
359     @Test
testBasicQuoted4()360     public void testBasicQuoted4() {
361         final String input = "a: 'b' 'c' :d";
362         final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
363         tok.setTrimmerMatcher(StrMatcher.trimMatcher());
364         tok.setIgnoreEmptyTokens(false);
365         tok.setEmptyTokenAsNull(true);
366         assertEquals("a", tok.next());
367         assertEquals("b c", tok.next());
368         assertEquals("d", tok.next());
369         assertFalse(tok.hasNext());
370     }
371 
372     @Test
testBasicQuoted5()373     public void testBasicQuoted5() {
374         final String input = "a: 'b'x'c' :d";
375         final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
376         tok.setTrimmerMatcher(StrMatcher.trimMatcher());
377         tok.setIgnoreEmptyTokens(false);
378         tok.setEmptyTokenAsNull(true);
379         assertEquals("a", tok.next());
380         assertEquals("bxc", tok.next());
381         assertEquals("d", tok.next());
382         assertFalse(tok.hasNext());
383     }
384 
385     @Test
testBasicQuoted6()386     public void testBasicQuoted6() {
387         final String input = "a:'b'\"c':d";
388         final StrTokenizer tok = new StrTokenizer(input, ':');
389         tok.setQuoteMatcher(StrMatcher.quoteMatcher());
390         assertEquals("a", tok.next());
391         assertEquals("b\"c:d", tok.next());
392         assertFalse(tok.hasNext());
393     }
394 
395     @Test
testBasicQuoted7()396     public void testBasicQuoted7() {
397         final String input = "a:\"There's a reason here\":b";
398         final StrTokenizer tok = new StrTokenizer(input, ':');
399         tok.setQuoteMatcher(StrMatcher.quoteMatcher());
400         assertEquals("a", tok.next());
401         assertEquals("There's a reason here", tok.next());
402         assertEquals("b", tok.next());
403         assertFalse(tok.hasNext());
404     }
405 
406     @Test
testBasicQuotedTrimmed1()407     public void testBasicQuotedTrimmed1() {
408         final String input = "a: 'b' :";
409         final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
410         tok.setTrimmerMatcher(StrMatcher.trimMatcher());
411         tok.setIgnoreEmptyTokens(false);
412         tok.setEmptyTokenAsNull(true);
413         assertEquals("a", tok.next());
414         assertEquals("b", tok.next());
415         assertNull(tok.next());
416         assertFalse(tok.hasNext());
417     }
418 
419     @Test
testBasicTrimmed1()420     public void testBasicTrimmed1() {
421         final String input = "a: b :  ";
422         final StrTokenizer tok = new StrTokenizer(input, ':');
423         tok.setTrimmerMatcher(StrMatcher.trimMatcher());
424         tok.setIgnoreEmptyTokens(false);
425         tok.setEmptyTokenAsNull(true);
426         assertEquals("a", tok.next());
427         assertEquals("b", tok.next());
428         assertNull(tok.next());
429         assertFalse(tok.hasNext());
430     }
431 
432     @Test
testBasicTrimmed2()433     public void testBasicTrimmed2() {
434         final String input = "a:  b  :";
435         final StrTokenizer tok = new StrTokenizer(input, ':');
436         tok.setTrimmerMatcher(StrMatcher.stringMatcher("  "));
437         tok.setIgnoreEmptyTokens(false);
438         tok.setEmptyTokenAsNull(true);
439         assertEquals("a", tok.next());
440         assertEquals("b", tok.next());
441         assertNull(tok.next());
442         assertFalse(tok.hasNext());
443     }
444 
445     @Test
testBasicIgnoreTrimmed1()446     public void testBasicIgnoreTrimmed1() {
447         final String input = "a: bIGNOREc : ";
448         final StrTokenizer tok = new StrTokenizer(input, ':');
449         tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
450         tok.setTrimmerMatcher(StrMatcher.trimMatcher());
451         tok.setIgnoreEmptyTokens(false);
452         tok.setEmptyTokenAsNull(true);
453         assertEquals("a", tok.next());
454         assertEquals("bc", tok.next());
455         assertNull(tok.next());
456         assertFalse(tok.hasNext());
457     }
458 
459     @Test
testBasicIgnoreTrimmed2()460     public void testBasicIgnoreTrimmed2() {
461         final String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE ";
462         final StrTokenizer tok = new StrTokenizer(input, ':');
463         tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
464         tok.setTrimmerMatcher(StrMatcher.trimMatcher());
465         tok.setIgnoreEmptyTokens(false);
466         tok.setEmptyTokenAsNull(true);
467         assertEquals("a", tok.next());
468         assertEquals("bc", tok.next());
469         assertNull(tok.next());
470         assertFalse(tok.hasNext());
471     }
472 
473     @Test
testBasicIgnoreTrimmed3()474     public void testBasicIgnoreTrimmed3() {
475         final String input = "IGNOREaIGNORE: IGNORE bIGNOREc IGNORE : IGNORE ";
476         final StrTokenizer tok = new StrTokenizer(input, ':');
477         tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
478         tok.setIgnoreEmptyTokens(false);
479         tok.setEmptyTokenAsNull(true);
480         assertEquals("a", tok.next());
481         assertEquals("  bc  ", tok.next());
482         assertEquals("  ", tok.next());
483         assertFalse(tok.hasNext());
484     }
485 
486     @Test
testBasicIgnoreTrimmed4()487     public void testBasicIgnoreTrimmed4() {
488         final String input = "IGNOREaIGNORE: IGNORE 'bIGNOREc'IGNORE'd' IGNORE : IGNORE ";
489         final StrTokenizer tok = new StrTokenizer(input, ':', '\'');
490         tok.setIgnoredMatcher(StrMatcher.stringMatcher("IGNORE"));
491         tok.setTrimmerMatcher(StrMatcher.trimMatcher());
492         tok.setIgnoreEmptyTokens(false);
493         tok.setEmptyTokenAsNull(true);
494         assertEquals("a", tok.next());
495         assertEquals("bIGNOREcd", tok.next());
496         assertNull(tok.next());
497         assertFalse(tok.hasNext());
498     }
499 
500     @Test
testListArray()501     public void testListArray() {
502         final String input = "a  b c";
503         final StrTokenizer tok = new StrTokenizer(input);
504         final String[] array = tok.getTokenArray();
505         final List<?> list = tok.getTokenList();
506 
507         assertEquals(Arrays.asList(array), list);
508         assertEquals(3, list.size());
509     }
510 
testCSV(final String data)511     private void testCSV(final String data) {
512         this.testXSVAbc(StrTokenizer.getCSVInstance(data));
513         this.testXSVAbc(StrTokenizer.getCSVInstance(data.toCharArray()));
514     }
515 
516     @Test
testCSVEmpty()517     public void testCSVEmpty() {
518         this.testEmpty(StrTokenizer.getCSVInstance());
519         this.testEmpty(StrTokenizer.getCSVInstance(""));
520     }
521 
522     @Test
testCSVSimple()523     public void testCSVSimple() {
524         this.testCSV(CSV_SIMPLE_FIXTURE);
525     }
526 
527     @Test
testCSVSimpleNeedsTrim()528     public void testCSVSimpleNeedsTrim() {
529         this.testCSV("   " + CSV_SIMPLE_FIXTURE);
530         this.testCSV("   \n\t  " + CSV_SIMPLE_FIXTURE);
531         this.testCSV("   \n  " + CSV_SIMPLE_FIXTURE + "\n\n\r");
532     }
533 
testEmpty(final StrTokenizer tokenizer)534     void testEmpty(final StrTokenizer tokenizer) {
535         this.checkClone(tokenizer);
536         assertFalse(tokenizer.hasNext());
537         assertFalse(tokenizer.hasPrevious());
538         assertNull(tokenizer.nextToken());
539         assertEquals(0, tokenizer.size());
540         assertThrows(NoSuchElementException.class, tokenizer::next);
541     }
542 
543     @Test
testGetContent()544     public void testGetContent() {
545         final String input = "a   b c \"d e\" f ";
546         StrTokenizer tok = new StrTokenizer(input);
547         assertEquals(input, tok.getContent());
548 
549         tok = new StrTokenizer(input.toCharArray());
550         assertEquals(input, tok.getContent());
551 
552         tok = new StrTokenizer();
553         assertNull(tok.getContent());
554     }
555 
556     @Test
testChaining()557     public void testChaining() {
558         final StrTokenizer tok = new StrTokenizer();
559         assertEquals(tok, tok.reset());
560         assertEquals(tok, tok.reset(""));
561         assertEquals(tok, tok.reset(new char[0]));
562         assertEquals(tok, tok.setDelimiterChar(' '));
563         assertEquals(tok, tok.setDelimiterString(" "));
564         assertEquals(tok, tok.setDelimiterMatcher(null));
565         assertEquals(tok, tok.setQuoteChar(' '));
566         assertEquals(tok, tok.setQuoteMatcher(null));
567         assertEquals(tok, tok.setIgnoredChar(' '));
568         assertEquals(tok, tok.setIgnoredMatcher(null));
569         assertEquals(tok, tok.setTrimmerMatcher(null));
570         assertEquals(tok, tok.setEmptyTokenAsNull(false));
571         assertEquals(tok, tok.setIgnoreEmptyTokens(false));
572     }
573 
574     /**
575      * Tests that the {@link StrTokenizer#clone()} clone method catches {@link CloneNotSupportedException} and returns
576      * {@code null}.
577      */
578     @Test
testCloneNotSupportedException()579     public void testCloneNotSupportedException() {
580         final Object notCloned = new StrTokenizer() {
581             @Override
582             Object cloneReset() throws CloneNotSupportedException {
583                 throw new CloneNotSupportedException("test");
584             }
585         }.clone();
586         assertNull(notCloned);
587     }
588 
589     @Test
testCloneNull()590     public void testCloneNull() {
591         final StrTokenizer tokenizer = new StrTokenizer((char[]) null);
592         // Start sanity check
593         assertNull(tokenizer.nextToken());
594         tokenizer.reset();
595         assertNull(tokenizer.nextToken());
596         // End sanity check
597         final StrTokenizer clonedTokenizer = (StrTokenizer) tokenizer.clone();
598         tokenizer.reset();
599         assertNull(tokenizer.nextToken());
600         assertNull(clonedTokenizer.nextToken());
601     }
602 
603     @Test
testCloneReset()604     public void testCloneReset() {
605         final char[] input = {'a'};
606         final StrTokenizer tokenizer = new StrTokenizer(input);
607         // Start sanity check
608         assertEquals("a", tokenizer.nextToken());
609         tokenizer.reset(input);
610         assertEquals("a", tokenizer.nextToken());
611         // End sanity check
612         final StrTokenizer clonedTokenizer = (StrTokenizer) tokenizer.clone();
613         input[0] = 'b';
614         tokenizer.reset(input);
615         assertEquals("b", tokenizer.nextToken());
616         assertEquals("a", clonedTokenizer.nextToken());
617     }
618 
619     @Test
testConstructor_String()620     public void testConstructor_String() {
621         StrTokenizer tok = new StrTokenizer("a b");
622         assertEquals("a", tok.next());
623         assertEquals("b", tok.next());
624         assertFalse(tok.hasNext());
625 
626         tok = new StrTokenizer("");
627         assertFalse(tok.hasNext());
628 
629         tok = new StrTokenizer((String) null);
630         assertFalse(tok.hasNext());
631     }
632 
633     @Test
testConstructor_String_char()634     public void testConstructor_String_char() {
635         StrTokenizer tok = new StrTokenizer("a b", ' ');
636         assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
637         assertEquals("a", tok.next());
638         assertEquals("b", tok.next());
639         assertFalse(tok.hasNext());
640 
641         tok = new StrTokenizer("", ' ');
642         assertFalse(tok.hasNext());
643 
644         tok = new StrTokenizer((String) null, ' ');
645         assertFalse(tok.hasNext());
646     }
647 
648     @Test
testConstructor_String_char_char()649     public void testConstructor_String_char_char() {
650         StrTokenizer tok = new StrTokenizer("a b", ' ', '"');
651         assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
652         assertEquals(1, tok.getQuoteMatcher().isMatch("\"".toCharArray(), 0, 0, 1));
653         assertEquals("a", tok.next());
654         assertEquals("b", tok.next());
655         assertFalse(tok.hasNext());
656 
657         tok = new StrTokenizer("", ' ', '"');
658         assertFalse(tok.hasNext());
659 
660         tok = new StrTokenizer((String) null, ' ', '"');
661         assertFalse(tok.hasNext());
662     }
663 
664     @Test
testConstructor_charArray()665     public void testConstructor_charArray() {
666         StrTokenizer tok = new StrTokenizer("a b".toCharArray());
667         assertEquals("a", tok.next());
668         assertEquals("b", tok.next());
669         assertFalse(tok.hasNext());
670 
671         tok = new StrTokenizer(new char[0]);
672         assertFalse(tok.hasNext());
673 
674         tok = new StrTokenizer((char[]) null);
675         assertFalse(tok.hasNext());
676     }
677 
678     @Test
testConstructor_charArray_char()679     public void testConstructor_charArray_char() {
680         StrTokenizer tok = new StrTokenizer("a b".toCharArray(), ' ');
681         assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
682         assertEquals("a", tok.next());
683         assertEquals("b", tok.next());
684         assertFalse(tok.hasNext());
685 
686         tok = new StrTokenizer(new char[0], ' ');
687         assertFalse(tok.hasNext());
688 
689         tok = new StrTokenizer((char[]) null, ' ');
690         assertFalse(tok.hasNext());
691     }
692 
693     @Test
testConstructor_charArray_char_char()694     public void testConstructor_charArray_char_char() {
695         StrTokenizer tok = new StrTokenizer("a b".toCharArray(), ' ', '"');
696         assertEquals(1, tok.getDelimiterMatcher().isMatch(" ".toCharArray(), 0, 0, 1));
697         assertEquals(1, tok.getQuoteMatcher().isMatch("\"".toCharArray(), 0, 0, 1));
698         assertEquals("a", tok.next());
699         assertEquals("b", tok.next());
700         assertFalse(tok.hasNext());
701 
702         tok = new StrTokenizer(new char[0], ' ', '"');
703         assertFalse(tok.hasNext());
704 
705         tok = new StrTokenizer((char[]) null, ' ', '"');
706         assertFalse(tok.hasNext());
707     }
708 
709     @Test
testReset()710     public void testReset() {
711         final StrTokenizer tok = new StrTokenizer("a b c");
712         assertEquals("a", tok.next());
713         assertEquals("b", tok.next());
714         assertEquals("c", tok.next());
715         assertFalse(tok.hasNext());
716 
717         tok.reset();
718         assertEquals("a", tok.next());
719         assertEquals("b", tok.next());
720         assertEquals("c", tok.next());
721         assertFalse(tok.hasNext());
722     }
723 
724     @Test
testReset_String()725     public void testReset_String() {
726         final StrTokenizer tok = new StrTokenizer("x x x");
727         tok.reset("d e");
728         assertEquals("d", tok.next());
729         assertEquals("e", tok.next());
730         assertFalse(tok.hasNext());
731 
732         tok.reset((String) null);
733         assertFalse(tok.hasNext());
734     }
735 
736     @Test
testReset_charArray()737     public void testReset_charArray() {
738         final StrTokenizer tok = new StrTokenizer("x x x");
739 
740         final char[] array = {'a', 'b', 'c'};
741         tok.reset(array);
742         assertEquals("abc", tok.next());
743         assertFalse(tok.hasNext());
744 
745         tok.reset((char[]) null);
746         assertFalse(tok.hasNext());
747     }
748 
749     @Test
testTSV()750     public void testTSV() {
751         this.testXSVAbc(StrTokenizer.getTSVInstance(TSV_SIMPLE_FIXTURE));
752         this.testXSVAbc(StrTokenizer.getTSVInstance(TSV_SIMPLE_FIXTURE.toCharArray()));
753     }
754 
755     @Test
testTSVEmpty()756     public void testTSVEmpty() {
757         this.testEmpty(StrTokenizer.getTSVInstance());
758         this.testEmpty(StrTokenizer.getTSVInstance(""));
759     }
760 
testXSVAbc(final StrTokenizer tokenizer)761     void testXSVAbc(final StrTokenizer tokenizer) {
762         this.checkClone(tokenizer);
763         assertEquals(-1, tokenizer.previousIndex());
764         assertEquals(0, tokenizer.nextIndex());
765         assertNull(tokenizer.previousToken());
766         assertEquals("A", tokenizer.nextToken());
767         assertEquals(1, tokenizer.nextIndex());
768         assertEquals("b", tokenizer.nextToken());
769         assertEquals(2, tokenizer.nextIndex());
770         assertEquals("c", tokenizer.nextToken());
771         assertEquals(3, tokenizer.nextIndex());
772         assertNull(tokenizer.nextToken());
773         assertEquals(3, tokenizer.nextIndex());
774         assertEquals("c", tokenizer.previousToken());
775         assertEquals(2, tokenizer.nextIndex());
776         assertEquals("b", tokenizer.previousToken());
777         assertEquals(1, tokenizer.nextIndex());
778         assertEquals("A", tokenizer.previousToken());
779         assertEquals(0, tokenizer.nextIndex());
780         assertNull(tokenizer.previousToken());
781         assertEquals(0, tokenizer.nextIndex());
782         assertEquals(-1, tokenizer.previousIndex());
783         assertEquals(3, tokenizer.size());
784     }
785 
786     @Test
testIteration()787     public void testIteration() {
788         final StrTokenizer tkn = new StrTokenizer("a b c");
789         assertFalse(tkn.hasPrevious());
790         assertThrows(NoSuchElementException.class, tkn::previous);
791         assertTrue(tkn.hasNext());
792 
793         assertEquals("a", tkn.next());
794         assertThrows(UnsupportedOperationException.class, tkn::remove);
795         assertThrows(UnsupportedOperationException.class, () -> tkn.set("x"));
796         assertThrows(UnsupportedOperationException.class, () -> tkn.add("y"));
797         assertTrue(tkn.hasPrevious());
798         assertTrue(tkn.hasNext());
799 
800         assertEquals("b", tkn.next());
801         assertTrue(tkn.hasPrevious());
802         assertTrue(tkn.hasNext());
803 
804         assertEquals("c", tkn.next());
805         assertTrue(tkn.hasPrevious());
806         assertFalse(tkn.hasNext());
807 
808         assertThrows(NoSuchElementException.class, tkn::next);
809         assertTrue(tkn.hasPrevious());
810         assertFalse(tkn.hasNext());
811     }
812 
813     @Test
testTokenizeSubclassInputChange()814     public void testTokenizeSubclassInputChange() {
815         final StrTokenizer tkn = new StrTokenizer("a b c d e") {
816             @Override
817             protected List<String> tokenize(final char[] chars, final int offset, final int count) {
818                 return super.tokenize("w x y z".toCharArray(), 2, 5);
819             }
820         };
821         assertEquals("x", tkn.next());
822         assertEquals("y", tkn.next());
823     }
824 
825     @Test
testTokenizeSubclassOutputChange()826     public void testTokenizeSubclassOutputChange() {
827         final StrTokenizer tkn = new StrTokenizer("a b c") {
828             @Override
829             protected List<String> tokenize(final char[] chars, final int offset, final int count) {
830                 final List<String> list = super.tokenize(chars, offset, count);
831                 Collections.reverse(list);
832                 return list;
833             }
834         };
835         assertEquals("c", tkn.next());
836         assertEquals("b", tkn.next());
837         assertEquals("a", tkn.next());
838     }
839 
840     @Test
testToString()841     public void testToString() {
842         final StrTokenizer tkn = new StrTokenizer("a b c d e");
843         assertEquals("StrTokenizer[not tokenized yet]", tkn.toString());
844         tkn.next();
845         assertEquals("StrTokenizer[a, b, c, d, e]", tkn.toString());
846     }
847 
848 }
849