Skip to content

Commit

Permalink
drunk crawling
Browse files Browse the repository at this point in the history
  • Loading branch information
jarbasai committed Apr 7, 2017
1 parent f1f3232 commit ade8f29
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 15 deletions.
12 changes: 6 additions & 6 deletions mycroft/skills/LILACS/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,18 +86,18 @@ def handle_act_intent(self, message):
crawler = ConceptCrawler(concept_connector=knowledge, center_node="joana")

flag = crawler.drunk_crawl("joana", "frog")
print flag
self.log.info(flag)
self.speak("answer to is joana a frog is " + str(flag))

flag = crawler.drunk_crawl("joana", "animal")
print flag
self.log.info(flag)
self.speak("answer to is joana a animal is " + str(flag))

print flag
flag = crawler.drunk_crawl("joana", "human")
self.speak("answer to is joana a human is " + str(flag))
self.log.info(flag)
flag = crawler.drunk_crawl("joana", "mammal")
self.speak("answer to is joana a mammal is " + str(flag))

print flag
self.log.info(flag)
flag = crawler.drunk_crawl("joana", "alive")
self.speak("answer to is joana alive is " + str(flag))

Expand Down
20 changes: 11 additions & 9 deletions mycroft/skills/LILACS/concept.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,7 +371,9 @@ def drunk_crawl(self, center_node, target_node, direction="parents"):
next_node = self.choose_next_node(center_node, direction)
crawl_depth = 1
while True:

# check if we found answer
if target_node in self.crawled:
return True
if next_node is None:
if len(self.uncrawled) == 0:
#no more nodes to crawl
Expand All @@ -384,32 +386,32 @@ def drunk_crawl(self, center_node, target_node, direction="parents"):
if crawl_depth >= self.depth:
# do not crawl further
return False

self.logger.info("crawled: " + str(self.crawled))
# print "crawl_path: " + str(self.crawl_path)
self.logger.info( "uncrawled: " + str(self.uncrawled))
self.logger.info( "next: " + next_node)
self.logger.info( "depth: " + str(crawl_depth))
# check if we found answer
if target_node in self.crawled:
return True
# see if we already crawled this
if next_node in self.crawled:
self.logger.info("crawling this node again: " + next_node)
# increase visit counter
self.visits[next_node] += 1
self.logger.info("number of visits: " + str(self.visits[next_node]))
# add to crawl path
self.crawl_path.append(next_node)
# remove fom uncrawled list
i = 0
for node in self.uncrawled:
if node == next_node:
self.logger.info("removing node from uncrawled node list: " + node)
self.uncrawled.pop(i)
i += 1
# chose another to crawl
next_node = None
# crawl next node
self.logger.info("choosing next node")
next_node = self.choose_next_node(next_node, direction)
crawl_depth += 1 #went further
self.logger.info("crawled nodes: " + str(self.crawled))
# print "crawl_path: " + str(self.crawl_path)
self.logger.info("uncrawled nodes: " + str(self.uncrawled))
crawl_depth += 1 # went further



Expand Down

0 comments on commit ade8f29

Please sign in to comment.