}
Tokenizer tokenizer = SimpleTokenizer.INSTANCE; //<co id="co.opennlp.name.2"/>
for (int si = 0; si < sentences.length; si++) { //<co id="co.opennlp.name.3"/>
List<Annotation> allAnnotations = new ArrayList<Annotation>();
String[] tokens = tokenizer.tokenize(sentences[si]);//<co id="co.opennlp.name.4"/>
for (int fi = 0; fi < finders.length; fi++) { //<co id="co.opennlp.name.5"/>
Span[] spans = finders[fi].find(tokens); //<co id="co.opennlp.name.6"/>
double[] probs = finders[fi].probs(spans); //<co id="co.opennlp.name.7"/>
for (int ni = 0; ni < spans.length; ni++) {
allAnnotations.add( //<co id="co.opennlp.name.8"/>