Skip to content

Commit 41ee595

Browse files
asashourStanford NLP
authored andcommitted
Fix (Penn)TreePrintTest on Windows - use lineSeparator instead of \n
1 parent e134844 commit 41ee595

File tree

78 files changed

+780
-2039
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

78 files changed

+780
-2039
lines changed

build.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -569,7 +569,7 @@
569569
<zipfileset prefix="WEB-INF/data/models"
570570
file="/u/nlp/data/ner/goodClassifiers/english.muc.7class.distsim.crf.ser.gz"/>
571571
<zipfileset prefix="WEB-INF/data/models"
572-
file="/u/nlp/data/ner/goodClassifiers/german.distsim.crf.ser.gz"/>
572+
file="/u/nlp/data/ner/goodClassifiers/german.hgc_175m_600.crf.ser.gz"/>
573573
<zipfileset prefix="WEB-INF/data/models"
574574
file="/u/nlp/data/ner/goodClassifiers/chinese.misc.distsim.crf.ser.gz"/>
575575
<zipfileset file="${data.path}/webapps/favicon.ico"/>

itest/src/edu/stanford/nlp/coref/CorefBenchmark.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ public void setUp() throws Exception, IOException {
4444
String[] corefArgs = { "-props", PROPERTIES_PATH};
4545
props = StringUtils.argsToProperties(corefArgs);
4646
props.setProperty("coref.conllOutputPath", WORK_DIR_FILE.getAbsolutePath()+"/");
47+
props.setProperty("coref.verbose", "true");
4748

4849
// build CorefSystem
4950
corefSystem = new CorefSystem(props);

itest/src/edu/stanford/nlp/ie/crf/ThreadedCRFClassifierITest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ public class ThreadedCRFClassifierITest extends TestCase {
1515
Properties props;
1616

1717
private static final String german1 =
18-
"edu/stanford/nlp/models/ner/german.distsim.crf.ser.gz";
18+
"edu/stanford/nlp/models/ner/german.conll.germeval2014.hgc_175m_600.crf.ser.gz";
1919
/** -- We're no longer supporting this one
2020
private String german2 =
2121
"/u/nlp/data/ner/goodClassifiers/german.dewac_175m_600.crf.ser.gz";

itest/src/edu/stanford/nlp/ie/crf/TrainCRFClassifierSlowITest.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,12 @@ public void testGermanCRFClassifierTraining() throws Exception {
2323
StanfordRedwoodConfiguration.apply(PropertiesUtils.asProperties(
2424
"log.file", crfTrainingWorkingDir + "/german-crf.results"));
2525
// delete the model if present
26-
File originalModelFile = new File(crfTrainingWorkingDir, "german.distsim.crf.ser.gz");
26+
File originalModelFile = new File(crfTrainingWorkingDir, "german.hgc_175m_600.crf.ser.gz");
2727
originalModelFile.delete();
2828
// train the new model
2929
CRFClassifier.main(new String[] {
30-
"-props", "edu/stanford/nlp/models/ner/german.distsim.prop",
31-
"-serializeTo", crfTrainingWorkingDir+"/german.distsim.crf.ser.gz"
30+
"-props", "edu/stanford/nlp/models/ner/german-2018.hgc_175m_600.prop",
31+
"-serializeTo", crfTrainingWorkingDir+"/german.hgc_175m_600.crf.ser.gz"
3232
});
3333
List<String> germanTrainingResults = IOUtils.linesFromFile(crfTrainingWorkingDir + "/german-crf.results");
3434
String lastLineOfResults = germanTrainingResults.get(germanTrainingResults.size() - 1);

itest/src/edu/stanford/nlp/naturalli/OperatorScopeITest.java

Lines changed: 24 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import edu.stanford.nlp.ling.CoreLabel;
55
import edu.stanford.nlp.pipeline.Annotation;
66
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
7-
//import edu.stanford.nlp.trees.TreeCoreAnnotations;
87
import edu.stanford.nlp.util.StringUtils;
98
import org.junit.*;
109

@@ -39,7 +38,6 @@ public class OperatorScopeITest {
3938
private Optional<OperatorSpec>[] annotate(String text) {
4039
Annotation ann = new Annotation(text);
4140
pipeline.annotate(ann);
42-
//System.out.println(ann.get(CoreAnnotations.SentencesAnnotation.class).get(0).get(TreeCoreAnnotations.TreeAnnotation.class));
4341
List<CoreLabel> tokens = ann.get(CoreAnnotations.SentencesAnnotation.class).get(0).get(CoreAnnotations.TokensAnnotation.class);
4442
Optional<OperatorSpec>[] scopes = new Optional[tokens.size()];
4543
Arrays.fill(scopes, Optional.empty());
@@ -322,7 +320,7 @@ public void fracasSentencesWithEvery() {
322320
checkScope("{ Every } [ customer who owns a computer ] [ has a service contract for it ]");
323321
checkScope("{ Every } [ department ] [ rents a line from BT ]");
324322
checkScope("{ Every } [ executive who had a laptop computer ] [ brought it to take notes at the meeting ]");
325-
checkScope("{ Every } [ four - legged mammal ] [ is a four - legged animal ]");
323+
checkScope("{ Every } [ four-legged mammal ] [ is a four-legged animal ]");
326324
checkScope("{ Every } [ individual who has the right to live anywhere in Europe ] [ can travel freely within Europe ]");
327325
checkScope("{ Every } [ individual who has the right to live in Europe ] [ can travel freely within Europe ]");
328326
checkScope("{ Every } [ inhabitant of Cambridge ] [ voted for a Labour MP ]");
@@ -420,21 +418,21 @@ public void fracasSentencesWithSome() {
420418
public void fracasSentencesWithThe() {
421419
checkScope("{ The } [ Ancient Greeks ] [ were all noted philosophers ]");
422420
checkScope("{ The } [ Ancient Greeks ] [ were noted philosophers ]");
423-
checkScope("{ The } [ ITEL - XZ ] [ is fast ]");
424-
checkScope("{ The } [ ITEL - ZX ] [ is an ITEL computer ]");
425-
checkScope("{ The } [ ITEL - ZX ] [ is slower than 500 MIPS ]");
426-
checkScope("{ The } [ PC - 6082 ] [ is as fast as the ITEL-XZ ]");
427-
checkScope("{ The } [ PC - 6082 ] [ is fast ]");
428-
checkScope("{ The } [ PC - 6082 ] [ is faster than 500 MIPS ]");
429-
checkScope("{ The } [ PC - 6082 ] [ is faster than any ITEL computer ]");
430-
checkScope("{ The } [ PC - 6082 ] [ is faster than every ITEL computer ]");
431-
checkScope("{ The } [ PC - 6082 ] [ is faster than some ITEL computer ]");
432-
checkScope("{ The } [ PC - 6082 ] [ is faster than the ITEL - XZ ]");
433-
checkScope("{ The } [ PC - 6082 ] [ is faster than the ITEL - ZX ]");
434-
checkScope("{ The } [ PC - 6082 ] [ is faster than the ITEL - ZX and the ITEL - ZY ]");
435-
checkScope("{ The } [ PC - 6082 ] [ is faster than the ITEL - ZX or the ITEL - ZY ]");
436-
checkScope("{ The } [ PC - 6082 ] [ is slow ]");
437-
checkScope("{ The } [ PC - 6082 ] [ is slower than the ITEL - XZ ]");
421+
checkScope("{ The } [ ITEL-XZ ] [ is fast ]");
422+
checkScope("{ The } [ ITEL-ZX ] [ is an ITEL computer ]");
423+
checkScope("{ The } [ ITEL-ZX ] [ is slower than 500 MIPS ]");
424+
checkScope("{ The } [ PC-6082 ] [ is as fast as the ITEL-XZ ]");
425+
checkScope("{ The } [ PC-6082 ] [ is fast ]");
426+
checkScope("{ The } [ PC-6082 ] [ is faster than 500 MIPS ]");
427+
checkScope("{ The } [ PC-6082 ] [ is faster than any ITEL computer ]");
428+
checkScope("{ The } [ PC-6082 ] [ is faster than every ITEL computer ]");
429+
checkScope("{ The } [ PC-6082 ] [ is faster than some ITEL computer ]");
430+
checkScope("{ The } [ PC-6082 ] [ is faster than the ITEL-XZ ]");
431+
checkScope("{ The } [ PC-6082 ] [ is faster than the ITEL-ZX ]");
432+
checkScope("{ The } [ PC-6082 ] [ is faster than the ITEL-ZX and the ITEL-ZY ]");
433+
checkScope("{ The } [ PC-6082 ] [ is faster than the ITEL-ZX or the ITEL-ZY ]");
434+
checkScope("{ The } [ PC-6082 ] [ is slow ]");
435+
checkScope("{ The } [ PC-6082 ] [ is slower than the ITEL-XZ ]");
438436
checkScope("{ The } [ chairman of the department ] [ is a person ]");
439437
checkScope("{ The } [ chairman ] [ read out every item on the agenda ]");
440438
checkScope("{ The } [ chairman ] [ read out the items on the agenda ]");
@@ -492,8 +490,7 @@ public void fracasSentencesWithProperNouns() {
492490
checkScope("[ { APCOM } ] [ has been paying mortgage interest for a total of 15 years or more ]");
493491
checkScope("[ { APCOM } ] [ lost some orders ]");
494492
checkScope("[ { APCOM } ] [ lost ten orders ]");
495-
checkScope("[ { APCOM } ] [ signed the contract ]");
496-
checkScope("[ { APCOM } ] [ signed the contract ] Friday");
493+
checkScope("[ { APCOM } ] [ signed the contract Friday , 13th ]");
497494
checkScope("[ { APCOM } ] [ sold exactly 2500 computers ]");
498495
checkScope("[ { APCOM } ] [ won some orders ]");
499496
checkScope("[ { APCOM } ] [ won ten orders ]");
@@ -525,11 +522,11 @@ public void fracasSentencesWithProperNouns() {
525522
checkScope("[ { Bill } ] [ will speak to Mary ]");
526523
checkScope("[ { Bill } ] [ wrote a report ]");
527524

528-
checkScope("[ { Dumbo } ] [ is a four - legged animal ]");
525+
checkScope("[ { Dumbo } ] [ is a four-legged animal ]");
529526
checkScope("[ { Dumbo } ] [ is a large animal ]");
530527
checkScope("[ { Dumbo } ] [ is a small animal ]");
531528
checkScope("[ { Dumbo } ] [ is a small elephant ]");
532-
checkScope("[ { Dumbo } ] [ is four - legged ]");
529+
checkScope("[ { Dumbo } ] [ is four-legged ]");
533530
checkScope("[ { Dumbo } ] [ is larger than Mickey ]");
534531

535532
checkScope("[ { GFI } ] [ owns several computers ]");
@@ -550,7 +547,7 @@ public void fracasSentencesWithProperNouns() {
550547
checkScope("[ { ITEL } ] [ has developed a new editor since 1992 ]");
551548
checkScope("[ { ITEL } ] [ has expanded since 1992 ]");
552549
checkScope("[ { ITEL } ] [ has made a loss since 1992 ]");
553-
checkScope("[ { ITEL } ] [ has sent most of the reports which Smith needs ]");
550+
checkScope("[ { ITEL } ] [ has sent most of the reports Smith needs ]");
554551
checkScope("[ { ITEL } ] [ made a loss in 1993 ]");
555552
checkScope("[ { ITEL } ] [ maintains all the computers that GFI owns ]");
556553
checkScope("[ { ITEL } ] [ maintains them ]");
@@ -607,10 +604,8 @@ public void fracasSentencesWithProperNouns() {
607604
checkScope("[ { John } ] [ said Bill had been hurt ]");
608605
checkScope("[ { John } ] [ said Bill had hurt himself ]");
609606
checkScope("[ { John } ] [ said Bill wrote a report ]");
610-
// FIXME this should work even if the parse changed some, right?
611-
//checkScope("[ { John } ] [ said Mary wrote a report , and Bill did too ]"); // interesting example
612-
// TODO(gabor) fix me (bad scope)
613-
//checkScope("[ { John } ] [ said that Mary wrote a report ] , and that Bill did too");
607+
checkScope("[ { John } ] [ said Mary wrote a report ] , and Bill did too"); // interesting example
608+
// checkScope("[ { John } ] [ said that Mary wrote a report ] , and that Bill did too"); // TODO(gabor) fix me (bad scope)
614609
checkScope("[ { John } ] [ spoke to Mary ]");
615610
checkScope("[ { John } ] [ spoke to Mary at four o'clock ]");
616611
checkScope("[ { John } ] [ spoke to Mary on Friday ]");
@@ -623,11 +618,9 @@ public void fracasSentencesWithProperNouns() {
623618
checkScope("[ { John } ] [ wants to know how many women work part time ]");
624619
checkScope("[ { John } ] [ wants to know which men work part time ]");
625620
checkScope("[ { John } ] [ went to Paris by car ]");
626-
// FIXME should this encompass "and Bill by train"?
627-
// checkScope("[ { John } ] [ went to Paris by car , and Bill by train ]");
621+
checkScope("[ { John } ] [ went to Paris by car , and Bill by train ]");
628622
checkScope("[ { John } ] [ went to Paris by car , and Bill by train to Berlin ]");
629-
// FIXME should this encompass "and Bill to Berlin"?
630-
// checkScope("[ { John } ] [ went to Paris by car , and Bill to Berlin ]");
623+
checkScope("[ { John } ] [ went to Paris by car , and Bill to Berlin ]");
631624
checkScope("[ { John } ] [ wrote a report ]");
632625
// checkScope("[ { John } ] [ wrote a report ] , and Bill said Peter did too ]"); // TODO(gabor) fix me
633626

itest/src/edu/stanford/nlp/parser/nndep/CoNLLReadingITest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
public class CoNLLReadingITest extends TestCase {
2121

22-
String exampleCoNLLXPath = "/u/nlp/data/stanford-corenlp-testing/data/conllu/fr_gsd-ud-train.conllu.clean";
22+
String exampleCoNLLXPath = "/u/nlp/data/stanford-corenlp/test/data/conllu/fr_gsd-ud-train.conllu.clean";
2323
String exampleCoNLLUPath = "";
2424

2525
public static void loadConllFileOriginal(String inFile, List<CoreMap> sents, List<DependencyTree> trees, boolean unlabeled, boolean cPOS)

itest/src/edu/stanford/nlp/parser/shiftreduce/ShiftReduceParserITest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,14 +61,14 @@ public void testBasicConstraint() {
6161
pq.setConstraints(constraints);
6262
assertTrue(pq.parse(sentence));
6363
result = pq.getBestParse();
64-
assertEquals("(ROOT (S (NP (PRP It)) (VP (VBD was) (ADJP (NP (NP (NNP Carolina) (NNPS Reapers))))) (. .)))", result.toString());
64+
assertEquals("(ROOT (S (NP (PRP It)) (VP (VBD was) (ADJP (NP (NNP Carolina) (NNPS Reapers)))) (. .)))", result.toString());
6565

6666
constraint = new ParserConstraint(1, 3, "VP");
6767
constraints = Collections.singletonList(constraint);
6868
pq = englishParser.parserQuery();
6969
pq.setConstraints(constraints);
7070
assertTrue(pq.parse(sentence));
7171
result = pq.getBestParse();
72-
assertEquals("(ROOT (S (NP (PRP It)) (VP (VBD was) (NP (NNP Carolina))) (NP (NNPS Reapers)) (. .)))", result.toString());
72+
assertEquals("(ROOT (S (NP (PRP It)) (VP (VBD was) (NP (NNP Carolina))) (NNPS Reapers) (. .)))", result.toString());
7373
}
7474
}

itest/src/edu/stanford/nlp/pipeline/ChineseSegmenterRegressionITest.java

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@ public class ChineseSegmenterRegressionITest extends TestCase {
1313
public StanfordCoreNLP pipeline;
1414

1515
// strings to test on
16-
public List<String> inputStrings = new ArrayList<>();
16+
public ArrayList<String> inputStrings = new ArrayList<>();
1717

1818
// expected token lists
19-
List<List<String>> expectedTokenLists = new ArrayList<>();
19+
ArrayList<ArrayList<String>> expectedTokenLists = new ArrayList<>();
2020

2121
public void setUp() {
2222

@@ -31,33 +31,36 @@ public void setUp() {
3131

3232
// example 1
3333
inputStrings.add("巴拉克·奥巴马是美国总统。他在2008年当选");
34-
List<String> exampleOneTokenList =
35-
Arrays.asList("巴拉克·奥巴马","是","美国","总统","。","他","在","2008年","当选");
34+
ArrayList exampleOneTokenList = new ArrayList<>(
35+
Arrays.asList(
36+
new String[]{"巴拉克·奥巴马","是","美国","总统","。","他","在","2008年","当选"}));
3637
expectedTokenLists.add(exampleOneTokenList);
3738

3839
// example 2
3940
inputStrings.add("声明全文如下:\n" +
40-
" \n" +
41-
" \n" +
42-
"中国政府欢迎乌克兰销毁其境内全部核武器的决定,\n" +
43-
"对乌克兰议会于11月16日批准乌克兰作为无核武器国\n" +
44-
"家加入《不扩散核武器条约》表示赞赏。");
45-
List<String> exampleTwoTokenList =
46-
Arrays.asList("声明","全","文","如下",":","中国","政府","欢迎","乌克兰","销毁","其","境内",
47-
"全部","核武器","的","决定",",","对","乌克兰","议会","于","11月","16日","批准","乌克兰","作为",
48-
"无核武器","国家","加入","《","不扩散","核武器","条约","》","表示","赞赏","。");
41+
" \n" +
42+
" \n" +
43+
"中国政府欢迎乌克兰销毁其境内全部核武器的决定,\n" +
44+
"对乌克兰议会于11月16日批准乌克兰作为无核武器国\n" +
45+
"家加入《不扩散核武器条约》表示赞赏。");
46+
ArrayList exampleTwoTokenList =
47+
new ArrayList<>(
48+
Arrays.asList(new String[]{"声明","全","文","如下",":","中国","政府","欢迎","乌克兰","销毁","其","境内",
49+
"全部","核武器","的","决定",",","对","乌克兰","议会","于","11月","16日","批准","乌克兰","作为",
50+
"无核武器","国家","加入","《","不扩散","核武器","条约","》","表示","赞赏","。"}));
4951
expectedTokenLists.add(exampleTwoTokenList);
5052

5153
// example 3
5254
inputStrings.add("协定规定,自协定签署之日起一年后,缔约四国之间\n" +
53-
"实现澜沧江-湄公河商船通航,缔约任何一方的船舶均可\n" +
54-
"按照协定的规定在中国的思茅港和老挝的琅勃拉邦港之间\n" +
55-
"自由航行。");
56-
List<String> exampleThreeTokenList =
57-
Arrays.asList("协定","规定",",","自","协定","签署","之日起","一","年","后",",","缔约","四",
58-
"国","之间","实现","澜沧江","-","湄公河","商船","通航",",","缔约","任何","一","方","的","船舶",
59-
"均","可","按照","协定","的","规定","在","中国","的","思茅","港","和","老挝","的","琅勃拉邦","港",
60-
"之间","自由","航行","。");
55+
"实现澜沧江-湄公河商船通航,缔约任何一方的船舶均可\n" +
56+
"按照协定的规定在中国的思茅港和老挝的琅勃拉邦港之间\n" +
57+
"自由航行。");
58+
ArrayList exampleThreeTokenList =
59+
new ArrayList<>(
60+
Arrays.asList(new String[]{"协定","规定",",","自","协定","签署","之日起","一","年","后",",","缔约","四",
61+
"国","之间","实现","澜沧江","-","湄公河","商船","通航",",","缔约","任何","一","方","的","船舶",
62+
"均","可","按照","协定","的","规定","在","中国","的","思茅","港","和","老挝","的","琅勃拉邦","港",
63+
"之间","自由","航行","。"}));
6164
expectedTokenLists.add(exampleThreeTokenList);
6265
}
6366

itest/src/edu/stanford/nlp/pipeline/NERTokenizationITest.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ public void testEnglishNERTokenization() {
4747
List<String> docs = new ArrayList<>();
4848
List<List<String>> words = new ArrayList<>();
4949
List<List<String>> labels = new ArrayList<>();
50-
List<List<CoreLabel>> goldTokens = new ArrayList<>();
50+
List<List<CoreLabel>> goldTokens = new ArrayList();
5151
// basic example
5252
docs.add("Barack Obama was born in Hawaii.");
5353
words.add(Arrays.asList("Barack", "Obama", "was", "born", "in", "Hawaii", "."));
@@ -87,7 +87,7 @@ public void testEnglishNERTokenizationJustStatistical() {
8787
List<String> docs = new ArrayList<>();
8888
List<List<String>> words = new ArrayList<>();
8989
List<List<String>> labels = new ArrayList<>();
90-
List<List<CoreLabel>> goldTokens = new ArrayList<>();
90+
List<List<CoreLabel>> goldTokens = new ArrayList();
9191
// basic example
9292
docs.add("Barack Obama was born in Hawaii.");
9393
words.add(Arrays.asList("Barack", "Obama", "was", "born", "in", "Hawaii", "."));
@@ -128,7 +128,7 @@ public void testEnglishNERTokenizationWithPTB3EscapingJustStatistical() {
128128
List<String> docs = new ArrayList<>();
129129
List<List<String>> words = new ArrayList<>();
130130
List<List<String>> labels = new ArrayList<>();
131-
List<List<CoreLabel>> goldTokens = new ArrayList<>();
131+
List<List<CoreLabel>> goldTokens = new ArrayList();
132132
// basic example
133133
docs.add("Barack Obama was born in Hawaii.");
134134
words.add(Arrays.asList("Barack", "Obama", "was", "born", "in", "Hawaii", "."));
@@ -167,7 +167,7 @@ public void testEnglishNERTokenizationTurnedOff() {
167167
List<String> docs = new ArrayList<>();
168168
List<List<String>> words = new ArrayList<>();
169169
List<List<String>> labels = new ArrayList<>();
170-
List<List<CoreLabel>> goldTokens = new ArrayList<>();
170+
List<List<CoreLabel>> goldTokens = new ArrayList();
171171
// basic example with "-"
172172
docs.add("She traveled to Port-au-Prince over the summer with Jane Smith.");
173173
words.add(Arrays.asList("She", "traveled", "to", "Port", "-", "au", "-", "Prince", "over", "the", "summer", "with",
@@ -198,7 +198,7 @@ public void testGermanNERTokenization() {
198198
List<String> docs = new ArrayList<>();
199199
List<List<String>> words = new ArrayList<>();
200200
List<List<String>> labels = new ArrayList<>();
201-
List<List<CoreLabel>> goldTokens = new ArrayList<>();
201+
List<List<CoreLabel>> goldTokens = new ArrayList();
202202
// basic example with "-"
203203
docs.add("Die Microsoft-Aktie sank daraufhin an der Wall Street um über vier Dollar auf 89,87 Dollar.");
204204
words.add(Arrays.asList("Die", "Microsoft", "-", "Aktie", "sank", "daraufhin", "an", "der", "Wall", "Street",

itest/src/edu/stanford/nlp/pipeline/POSTaggerBenchmarkITest.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,13 +35,13 @@ public void testEnglishBiDirectionalWSJTestPOS() throws IOException {
3535

3636
public void testEnglishCaselessWSJDevPOS() throws IOException {
3737
runPOSTest("edu/stanford/nlp/models/pos-tagger/english-caseless-left3words-distsim.tagger",
38-
"tagSeparator=_,/u/nlp/data/pos-tagger/models-4.0.0/data/wsj-caseless/dev/wsj-caseless-dev.txt",
39-
95.80);
38+
"format=TSV,wordColumn=0,tagColumn=1,/u/nlp/data/pos-tagger/english/test-wsj-19-21.caseless.tsv",
39+
95.90);
4040
}
4141

4242
public void testEnglishCaselessWSJTestPOS() throws IOException {
4343
runPOSTest("edu/stanford/nlp/models/pos-tagger/english-caseless-left3words-distsim.tagger",
44-
"tagSeparator=_,/u/nlp/data/pos-tagger/models-4.0.0/data/wsj-caseless/test/wsj-caseless-test.txt",
44+
"format=TSV,wordColumn=0,tagColumn=1,/u/nlp/data/pos-tagger/english/test-wsj-22-24.caseless.tsv",
4545
95.92);
4646
}
4747

0 commit comments

Comments
 (0)