Skip to content

Commit

Permalink
Misc
Browse files Browse the repository at this point in the history
  • Loading branch information
staffanm committed Feb 19, 2012
1 parent 35e0d7f commit 24f8517
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 8 deletions.
10 changes: 7 additions & 3 deletions GraphBuilder.py
Expand Up @@ -12,7 +12,10 @@
from StringIO import StringIO
import xml.etree.cElementTree as ET

from rdflib.Graph import Graph
try:
from rdflib.Graph import Graph
except ImportError:
from rdflib import Graph
from rdflib import URIRef, Literal

import Util
Expand Down Expand Up @@ -138,7 +141,7 @@ def create_graph(f, engine="dot", arguments="", filename="tmp.png", filetype="pn
dotfile.close()
cmdline = "%s %s -T%s -o%s tmp.dot" % (engine, arguments, filetype, filename)
print "Running %s" % cmdline
p = subprocess.Popen(cmdline)
p = subprocess.Popen(cmdline, shell=True)
ret = p.wait()
print "Graph %s created in %.3f sec" % (filename, time() - start)

Expand All @@ -149,7 +152,8 @@ def build_csvfile_from_sparql_results(res):
writer.writerows(res)

def sparql_select(sq):
store = SesameStore("http://localhost:8080/openrdf-sesame", "lagen.nu")
# store = SesameStore("http://localhost:8080/openrdf-sesame", "lagen.nu")
store = SesameStore("http://localhost:8080/openrdf-sesame", "mysite")
results = store.select(sq)
tree = ET.fromstring(results)
res = []
Expand Down
41 changes: 40 additions & 1 deletion LegalSource.py
Expand Up @@ -333,6 +333,8 @@ def RelateAll(self,file=None):
for f in files:
c += 1
graph = self._extract_rdfa(f)
self._add_deps(graph)

triples += len(graph)
store.add_graph(graph)
store.commit()
Expand Down Expand Up @@ -657,7 +659,44 @@ def _extract_rdfa(self, filename):
self.__tidy_graph(g)

return g


_rf_lookup = None
def _add_deps(self, dependency, graph):
# Step 1: Filter all triples that has a URIRef object
for (o,p,s) in graph:
if type(s) != URIRef:
continue
uri = unicode(s)

# Step 2: Depending on the URI, transform into a
# dependency file name for that URI (eg
# "http://rinfo.lagrummet.se/publ/sfs/1973:877#P4" ->
# "data/sfs/intermediate/1973/877.deps")
if uri.startswith("http://rinfo.lagrummet.se/publ/sfs/"):
basefile = uri.split("/")[-1].split("#")[0].replace(":","/")
filename = "data/sfs/intermediate/%s.deps" % basefile
elif uri.startswith("http://rinfo.lagrummet.se/publ/rattsfall/"):
if not self._rf_lookup:
# load lookup table from data/dv/generated/uri.map
basefile = self._rf_lookup[uri[41:]]
filename = "data/dv/intermediate/%s.deps" % basefile
elif uri.startswith("http://lagen.nu/concept/"):
basefile = uri[24:].replace("_"," ")
filename = "data/wiki/intermediate/%s.deps" % basefile

# Step 3: Open file, add dependency if not already present
present = False
for line in open(filename):
if line.strip() == dependency:
present = True
if not present:
fp = open(filename,"w")
fp.write(dependency+"\n")
fp.close()




def _store_select(self,query):
"""Send a SPARQL formatted SELECT query to the Sesame
store. Returns the result as a list of dicts"""
Expand Down
10 changes: 6 additions & 4 deletions Manager.py
Expand Up @@ -233,24 +233,26 @@ def _prep_frontpage(self):
log.warning("Marker %s not found at %s" % (marker,url))

def _make_images(self):
for i in range(1,100):
self.make_image("K%d"%i,"%d kap."%i)
for i in range(1,150):
for j in ('','a','b'):
self.make_image("K%d%s"%(i,j),"%d%s kap."%(i,j))
for i in range(1,100):
self.make_image("S%d"%i,"%d st."%i)

def make_image(self,basename,label):
filename = "img/%s.png" % basename
if not os.path.exists(filename):
log.info("Creating img %s with label %s" % (basename,label))
cmd = 'convert -background transparent -fill Grey -font Arial -pointsize 10 -size 36x14 -gravity East label:"%s" %s' % (label,filename)
cmd = 'convert -background transparent -fill Grey -font Arial -pointsize 10 -size 44x14 -gravity East label:"%s " %s' % (label,filename)
os.system(cmd)

def _static_indexpages(self):
# make the front page and other static pages
log.info("Generating site global static pages")

self._make_images()

return

self._prep_frontpage()

# we need to copy the following four file to the base dir,
Expand Down

0 comments on commit 24f8517

Please sign in to comment.