-
Notifications
You must be signed in to change notification settings - Fork 1
/
main.py
93 lines (77 loc) · 2.44 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import httpx
import validators
import yaml
import argparse
from bs4 import BeautifulSoup
from neo4j import GraphDatabase
# from neo4j.debug import Watcher
class URLGraph:
def __init__(self, uri, user, password):
self.driver = GraphDatabase.driver(uri, auth=(user, password))
def close(self):
self.driver.close()
def add_node(self, node):
with self.driver.session() as session:
if node["parent"] == None:
n = session.execute_write(self._create_root, node)
else:
n = session.execute_write(self._create_node, node)
@staticmethod
def _create_node(tx, node):
result = tx.run(
"MATCH (existingNode:Node {url: $parent}) CREATE (newNode:Node {url: $url}) CREATE (existingNode)-[:parent]->(newNode) RETURN existingNode, newNode",
url=node["url"],
parent=node["parent"],
)
return result.values()
@staticmethod
def _create_root(tx, node):
result = tx.run("CREATE (n:Node) SET n.url = $url RETURN n", url=node["url"])
return result.values()
def crawl(url: str, depth: int, graph: URLGraph) -> None:
if depth == 0:
return
else:
depth -= 1
resp = httpx.get(url)
soup = BeautifulSoup(resp.text, "html.parser")
tags = soup.find_all("a")
valid_urls = [
tag["href"]
for tag in tags
if tag.has_attr("href") and validators.url(tag["href"])
]
for child_url in valid_urls:
node = {"url": child_url, "parent": url}
graph.add_node(node)
crawl(child_url, depth, graph)
if __name__ == "__main__":
# setup args
parser = argparse.ArgumentParser()
parser.add_argument(
"-u",
"--url",
type=str,
required=True,
help="Specify a URL to create a graph of.",
)
parser.add_argument(
"-d",
"--depth",
type=int,
default=1,
help="Specify the depth of the graph.",
)
args = parser.parse_args()
# read config
with open("config.yml", "r") as stream:
config_data = yaml.safe_load(stream)
uri = config_data["neo4j"]["uri"]
username = config_data["neo4j"]["username"]
password = config_data["neo4j"]["password"]
# construct graph
graph = URLGraph(uri, username, password)
print(args.url)
graph.add_node({"url": args.url, "parent": None})
crawl(args.url, args.depth, graph)
graph.close()