-
Notifications
You must be signed in to change notification settings - Fork 23
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Change tests for event rules #88
Merged
Merged
Changes from 17 commits
Commits
Show all changes
23 commits
Select commit
Hold shift + click to select a range
f8e5d4b
Add todos
gcgbarbosa 8949a2f
Add todo
gcgbarbosa ee99796
Rename file and address todos
gcgbarbosa 025ccc5
Change TestArgQuantifiers to use new spec
gcgbarbosa c722a53
Added missing `unwantedargs` part
gcgbarbosa 3d77010
Move TestRuleFile & use new eventspec
gcgbarbosa c9463cc
Change classes to extend EventSpec
gcgbarbosa 60929de
Move testing resources from test file to spec
gcgbarbosa 7c716f1
Move sentences up to the eventspec
gcgbarbosa 29b1c70
Move the rest of the documents up
gcgbarbosa 4d63815
Clean a bit
gcgbarbosa 012d11e
Dry test a bit
gcgbarbosa d8e590e
Dry tests a bit more
gcgbarbosa 74436d7
Change val to def
gcgbarbosa f117215
Merge master
gcgbarbosa e6b8308
Start the todolist based on codecov.io
gcgbarbosa 8ae2ac3
Delete README.md
gcgbarbosa 0a2d530
Ignore another name for ctags file
gcgbarbosa 007be61
Merge master (will merge Becky's later)
gcgbarbosa 681b25a
Add Becky's test
gcgbarbosa 05c4135
Name test documents
gcgbarbosa 0d6ac55
Merge master
gcgbarbosa 967e392
Remove duplicated test
gcgbarbosa File filter
Filter by extension
Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
package ai.lum.odinson.events | ||
|
||
import ai.lum.odinson.{BaseSpec, OdinsonMatch, EventMatch} | ||
|
||
import org.scalatest._ | ||
|
||
class EventSpec extends BaseSpec { | ||
def testEventTrigger(m: OdinsonMatch, start: Int, end: Int): Unit = { | ||
m shouldBe an [EventMatch] | ||
val em = m.asInstanceOf[EventMatch] | ||
val trigger = em.trigger | ||
trigger.start shouldEqual start | ||
trigger.end shouldEqual end | ||
} | ||
|
||
def testEventArguments(m: OdinsonMatch, desiredArgs: Seq[Argument]): Unit = { | ||
// extract match arguments from the mathing objects | ||
val matchArgs = for(nc <- m.namedCaptures) | ||
yield Argument(nc.name, nc.capturedMatch.start, nc.capturedMatch.end) | ||
// all desired args should be there, in the right number | ||
val groupedMatched = matchArgs.groupBy(_.name) | ||
val groupedDesired = desiredArgs.groupBy(_.name) | ||
// | ||
for ((desiredRole, desired) <- groupedDesired) { | ||
// there should be arg(s) of the desired label | ||
groupedMatched.keySet should contain (desiredRole) | ||
// should have the same number of arguments of that label | ||
val matchedForThisRole = groupedMatched(desiredRole) | ||
desired should have size matchedForThisRole.size | ||
for (d <- desired) { | ||
matchedForThisRole should contain (d) | ||
} | ||
// there shouldn't be any found arguments that we didn't want | ||
val unwantedArgs = groupedMatched.keySet.diff(groupedDesired.keySet) | ||
unwantedArgs shouldBe empty | ||
} | ||
} | ||
|
||
def createArgument(name: String, start: Int, end: Int): Argument = Argument(name, start, end) | ||
|
||
def getJsonDocument(id: String): String = { | ||
val json = Map( | ||
"1" -> """{"id":"48fb577b-f5ba-4e16-864f-f8ba20ba9cfa","metadata":[],"sentences":[{"numTokens":8,"fields":[{"$type":"ai.lum.odinson.TokensField","name":"raw","tokens":["The","consumption","of","gummy","bears","and","donuts","."],"store":true},{"$type":"ai.lum.odinson.TokensField","name":"word","tokens":["The","consumption","of","gummy","bears","and","donuts","."]},{"$type":"ai.lum.odinson.TokensField","name":"tag","tokens":["DT","NN","IN","NN","NNS","CC","NNS","."]},{"$type":"ai.lum.odinson.TokensField","name":"lemma","tokens":["the","consumption","of","gummy","bear","and","donut","."]},{"$type":"ai.lum.odinson.TokensField","name":"entity","tokens":["O","O","O","B-dessert","I-dessert","O","B-dessert","O"]},{"$type":"ai.lum.odinson.TokensField","name":"chunk","tokens":["B-NP","I-NP","B-PP","B-NP","I-NP","O","B-NP","O"]},{"$type":"ai.lum.odinson.GraphField","name":"dependencies","edges":[[1,0,"det"],[1,4,"nmod_of"],[1,7,"punct"],[4,2,"case"],[4,3,"compound"],[4,5,"cc"],[4,6,"conj"]],"roots":[1]}]}]}""", | ||
"2" -> """{"id":"56842e05-1628-447a-b440-6be78f669bf2","metadata":[],"sentences":[{"numTokens":5,"fields":[{"$type":"ai.lum.odinson.TokensField","name":"raw","tokens":["Becky","ate","gummy","bears","."],"store":true},{"$type":"ai.lum.odinson.TokensField","name":"word","tokens":["Becky","ate","gummy","bears","."]},{"$type":"ai.lum.odinson.TokensField","name":"tag","tokens":["NNP","VBD","JJ","NNS","."]},{"$type":"ai.lum.odinson.TokensField","name":"lemma","tokens":["becky","eat","gummy","bear","."]},{"$type":"ai.lum.odinson.TokensField","name":"entity","tokens":["I-PER","O","O","O","O"]},{"$type":"ai.lum.odinson.TokensField","name":"chunk","tokens":["B-NP","B-VP","B-NP","I-NP","O"]},{"$type":"ai.lum.odinson.GraphField","name":"dependencies","edges":[[1,0,"nsubj"],[1,3,"dobj"],[1,4,"punct"],[3,2,"amod"]],"roots":[1]}]}]}""", | ||
"3" -> """{"id":"3c1237c6-01d0-42a4-9459-c84fed223286","metadata":[],"sentences":[{"numTokens":9,"fields":[{"$type":"ai.lum.odinson.TokensField","name":"raw","tokens":["John","ate","ramen","with","chopsticks","and","a","spoon","."],"store":true},{"$type":"ai.lum.odinson.TokensField","name":"word","tokens":["John","ate","ramen","with","chopsticks","and","a","spoon","."]},{"$type":"ai.lum.odinson.TokensField","name":"tag","tokens":["NNP","VBD","NNS","IN","NNS","CC","DT","NN","."]},{"$type":"ai.lum.odinson.TokensField","name":"lemma","tokens":["john","eat","raman","with","chopstick","and","a","spoon","."]},{"$type":"ai.lum.odinson.TokensField","name":"entity","tokens":["I-PER","O","O","O","O","O","O","O","O"]},{"$type":"ai.lum.odinson.TokensField","name":"chunk","tokens":["B-NP","B-VP","B-NP","B-PP","B-NP","O","B-NP","I-NP","O"]},{"$type":"ai.lum.odinson.GraphField","name":"dependencies","edges":[[1,0,"nsubj"],[1,2,"dobj"],[1,4,"nmod_with"],[1,8,"punct"],[4,3,"case"],[4,5,"cc"],[4,7,"conj"],[7,6,"det"]],"roots":[1]}]},{"numTokens":7,"fields":[{"$type":"ai.lum.odinson.TokensField","name":"raw","tokens":["Daisy","ate","macaroni","at","her","house","."],"store":true},{"$type":"ai.lum.odinson.TokensField","name":"word","tokens":["Daisy","ate","macaroni","at","her","house","."]},{"$type":"ai.lum.odinson.TokensField","name":"tag","tokens":["NNP","VBD","NNS","IN","PRP$","NN","."]},{"$type":"ai.lum.odinson.TokensField","name":"lemma","tokens":["daisy","eat","macaroni","at","her","house","."]},{"$type":"ai.lum.odinson.TokensField","name":"entity","tokens":["I-PER","O","O","O","O","O","O"]},{"$type":"ai.lum.odinson.TokensField","name":"chunk","tokens":["B-NP","B-VP","B-NP","B-PP","B-NP","I-NP","O"]},{"$type":"ai.lum.odinson.GraphField","name":"dependencies","edges":[[1,0,"nsubj"],[1,5,"nmod_at"],[1,2,"dobj"],[1,6,"punct"],[5,3,"case"],[5,4,"nmod:poss"]],"roots":[1]}]},{"numTokens":15,"fields":[{"$type":"ai.lum.odinson.TokensField","name":"raw","tokens":["Gus","'s","pets","include","cats",",","dogs",",","parakeets",",","ponies",",","and","unicorns","."],"store":true},{"$type":"ai.lum.odinson.TokensField","name":"word","tokens":["Gus","'s","pets","include","cats",",","dogs",",","parakeets",",","ponies",",","and","unicorns","."]},{"$type":"ai.lum.odinson.TokensField","name":"tag","tokens":["NNP","POS","NNS","VBP","NNS",",","NNS",",","NNS",",","NNS",",","CC","NNS","."]},{"$type":"ai.lum.odinson.TokensField","name":"lemma","tokens":["gus","s","pet","include","cat",",","dog",",","parakeet",",","pony",",","and","unicorn","."]},{"$type":"ai.lum.odinson.TokensField","name":"entity","tokens":["I-LOC","O","O","O","O","O","O","O","O","O","O","O","O","O","O"]},{"$type":"ai.lum.odinson.TokensField","name":"chunk","tokens":["B-NP","B-NP","I-NP","B-VP","B-NP","O","B-NP","O","B-NP","O","B-NP","O","O","B-NP","O"]},{"$type":"ai.lum.odinson.GraphField","name":"dependencies","edges":[[0,1,"case"],[2,0,"nmod:poss"],[3,2,"nsubj"],[3,4,"dobj"],[3,6,"dobj"],[3,8,"dobj"],[3,10,"dobj"],[3,13,"dobj"],[3,14,"punct"],[4,5,"punct"],[4,6,"conj"],[4,7,"punct"],[4,8,"conj"],[4,9,"punct"],[4,10,"conj"],[4,11,"punct"],[4,12,"cc"],[4,13,"conj"]],"roots":[3]}]}]}""", | ||
"4" -> """{"id":"56842e05-1628-447a-b440-6be78f669bf2","metadata":[],"sentences":[{"numTokens":27,"fields":[{"$type":"ai.lum.odinson.TokensField","name":"raw","tokens":["Some","wild","animals","such","as","hedgehogs",",","coypu",",","and","any","wild","cloven-footed","animals","such","as","deer","and","zoo","animals","including","elephants","can","also","contract","it","."],"store":true},{"$type":"ai.lum.odinson.TokensField","name":"word","tokens":["Some","wild","animals","such","as","hedgehogs",",","coypu",",","and","any","wild","cloven-footed","animals","such","as","deer","and","zoo","animals","including","elephants","can","also","contract","it","."]},{"$type":"ai.lum.odinson.TokensField","name":"tag","tokens":["DT","JJ","NNS","JJ","IN","NNS",",","NN",",","CC","DT","JJ","JJ","NNS","JJ","IN","NNS","CC","NN","NNS","VBG","NNS","MD","RB","VB","PRP","."]},{"$type":"ai.lum.odinson.TokensField","name":"lemma","tokens":["some","wild","animal","such","as","hedgehog",",","coypu",",","and","any","wild","cloven-footed","animal","such","as","deer","and","zoo","animal","include","elephant","can","also","contract","it","."]},{"$type":"ai.lum.odinson.TokensField","name":"entity","tokens":["O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O"]},{"$type":"ai.lum.odinson.TokensField","name":"chunk","tokens":["B-NP","I-NP","I-NP","B-PP","I-PP","B-NP","O","B-NP","O","O","B-NP","I-NP","I-NP","I-NP","B-PP","I-PP","B-NP","O","B-NP","I-NP","B-PP","B-NP","B-VP","I-VP","I-VP","B-NP","O"]},{"$type":"ai.lum.odinson.GraphField","name":"dependencies","roots":[24],"edges":[[2,0,"det"],[2,1,"amod"],[2,5,"nmod_such_as"],[2,7,"nmod_such_as"],[2,13,"nmod_such_as"],[3,4,"mwe"],[5,3,"case"],[5,6,"punct"],[5,7,"conj_and"],[5,8,"punct"],[5,9,"cc"],[5,13,"conj_and"],[13,10,"det"],[13,11,"amod"],[13,12,"amod"],[13,16,"nmod_such_as"],[13,19,"nmod_such_as"],[14,15,"mwe"],[16,14,"case"],[16,17,"cc"],[16,19,"conj_and"],[16,21,"nmod_including"],[19,18,"compound"],[21,20,"case"],[24,2,"nsubj"],[24,22,"aux"],[24,23,"advmod"],[24,25,"dobj"],[24,26,"punct"]]}]}]}""", | ||
"5" -> """{"id":"56842e05-1628-447a-b440-6be78f669bf2","metadata":[],"sentences":[{"numTokens":5,"fields":[{"$type":"ai.lum.odinson.TokensField","name":"raw","tokens":["Becky","ate","gummy","bears","."],"store":true},{"$type":"ai.lum.odinson.TokensField","name":"word","tokens":["Becky","ate","gummy","bears","."]},{"$type":"ai.lum.odinson.TokensField","name":"tag","tokens":["NNP","VBD","JJ","NNS","."]},{"$type":"ai.lum.odinson.TokensField","name":"lemma","tokens":["becky","eat","gummy","bear","."]},{"$type":"ai.lum.odinson.TokensField","name":"entity","tokens":["I-PER","O","O","O","O"]},{"$type":"ai.lum.odinson.TokensField","name":"chunk","tokens":["B-NP","B-VP","B-NP","I-NP","O"]},{"$type":"ai.lum.odinson.GraphField","name":"dependencies","edges":[[1,0,"nsubj"],[1,3,"dobj"],[1,4,"punct"],[3,2,"amod"]],"roots":[1]}]}]}""", | ||
"7" -> """{"id":"56842e05-1628-447a-b440-6be78f669bf2","metadata":[],"sentences":[{"numTokens":58,"fields":[{"$type":"ai.lum.odinson.TokensField","name":"raw","tokens":["Much","of","the","diet","in","these","communities","is","made","up","of","food","eaten","long","before","the","Europeans","arrived",":","verdolagas","(","purslane",")","and","other","wild","greens",",","herbs",",","wild","mushrooms","and","berries",",","and","small","animals","such","as","rabbit",",","possum",",","quail",",","badger",",","iguana",",","armadillo","and","a","variety","of","river","fish","."],"store":true},{"$type":"ai.lum.odinson.TokensField","name":"word","tokens":["Much","of","the","diet","in","these","communities","is","made","up","of","food","eaten","long","before","the","Europeans","arrived",":","verdolagas","(","purslane",")","and","other","wild","greens",",","herbs",",","wild","mushrooms","and","berries",",","and","small","animals","such","as","rabbit",",","possum",",","quail",",","badger",",","iguana",",","armadillo","and","a","variety","of","river","fish","."]},{"$type":"ai.lum.odinson.TokensField","name":"tag","tokens":["JJ","IN","DT","NN","IN","DT","NNS","VBZ","VBN","RP","IN","NN","VBD","RB","IN","DT","NNPS","VBD",":","NNS","-LRB-","NN","-RRB-","CC","JJ","JJ","NNS",",","NNS",",","JJ","NNS","CC","NNS",",","CC","JJ","NNS","JJ","IN","NN",",","NN",",","NN",",","NN",",","NN",",","NN","CC","DT","NN","IN","NN","NN","."]},{"$type":"ai.lum.odinson.TokensField","name":"lemma","tokens":["much","of","the","diet","in","these","community","be","make","up","of","food","eat","long","before","the","Europeans","arrive",":","verdolaga","(","purslane",")","and","other","wild","green",",","herb",",","wild","mushroom","and","berry",",","and","small","animal","such","as","rabbit",",","possum",",","quail",",","badger",",","iguana",",","armadillo","and","a","variety","of","river","fish","."]},{"$type":"ai.lum.odinson.TokensField","name":"entity","tokens":["O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","MISC","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O","O"]},{"$type":"ai.lum.odinson.TokensField","name":"chunk","tokens":["B-NP","B-PP","B-NP","I-NP","B-PP","B-NP","I-NP","B-VP","I-VP","B-PRT","B-PP","B-NP","B-VP","B-ADVP","B-PP","B-NP","I-NP","B-VP","O","B-NP","I-NP","I-NP","I-NP","O","B-NP","I-NP","I-NP","I-NP","I-NP","I-NP","I-NP","I-NP","I-NP","I-NP","O","O","B-NP","I-NP","B-PP","I-PP","B-NP","O","B-NP","O","B-NP","O","B-NP","O","B-NP","O","B-NP","O","B-NP","I-NP","B-PP","B-NP","I-NP","O"]},{"$type":"ai.lum.odinson.GraphField","name":"dependencies","roots":[8],"edges":[[0,3,"nmod_of"],[3,1,"case"],[3,2,"det"],[3,6,"nmod_in"],[6,4,"case"],[6,5,"det"],[8,0,"nsubjpass"],[8,7,"auxpass"],[8,9,"compound:prt"],[8,11,"nmod_of"],[8,12,"xcomp"],[8,57,"punct"],[11,10,"case"],[12,17,"advcl_before"],[16,15,"det"],[17,13,"advmod"],[17,14,"mark"],[17,16,"nsubj"],[17,18,"punct"],[17,19,"dep"],[17,26,"dep"],[17,37,"dep"],[19,21,"appos"],[19,23,"cc"],[19,26,"conj_and"],[19,28,"conj_and"],[19,31,"conj_and"],[19,33,"conj_and"],[19,34,"punct"],[19,35,"cc"],[19,37,"conj_and"],[21,20,"punct"],[21,22,"punct"],[26,24,"amod"],[26,25,"amod"],[26,27,"punct"],[26,28,"conj_and"],[26,29,"punct"],[26,31,"conj_and"],[26,32,"cc"],[26,33,"conj_and"],[31,30,"amod"],[37,36,"amod"],[37,40,"nmod_such_as"],[37,42,"nmod_such_as"],[37,44,"nmod_such_as"],[37,46,"nmod_such_as"],[37,48,"nmod_such_as"],[37,50,"nmod_such_as"],[37,56,"nmod_such_as"],[38,39,"mwe"],[40,38,"case"],[40,41,"punct"],[40,42,"conj_and"],[40,43,"punct"],[40,44,"conj_and"],[40,45,"punct"],[40,46,"conj_and"],[40,47,"punct"],[40,48,"conj_and"],[40,49,"punct"],[40,50,"conj_and"],[40,51,"cc"],[40,56,"conj_and"],[52,53,"mwe"],[52,54,"mwe"],[56,52,"det:qmod"],[56,55,"compound"]]}]}]}""", | ||
) | ||
json(id) | ||
} | ||
} | ||
|
||
case class Argument(name: String, start: Int, end: Int) { | ||
override def toString: String = { | ||
s"Argument(name=$name, start=$start, end=$end)" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Having all the docs in one place is great, but can you give them names that make it easier to know which one we're grabbing? Maybe add as a comment a mapping from name to contents, i.e., the actual sentences in the doc.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Do you think using the id is enough?
I was planning to have a TAG svg of all documents used for test somewhere in a readme.
Meanwhile the only thing I could think would be informative would be the document id.jk