Skip to content

Commit

Permalink
removed now-unused events.mapdata and a buncha code and queries that …
Browse files Browse the repository at this point in the history
…only it used
  • Loading branch information
Ben Ranker committed Apr 5, 2012
1 parent 080ea03 commit 873438a
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 215 deletions.
27 changes: 0 additions & 27 deletions georgia_lynchings/events/mapdata.py

This file was deleted.

124 changes: 0 additions & 124 deletions georgia_lynchings/events/models.py
Expand Up @@ -394,130 +394,6 @@ def get_victim_query():

return unicode(q)

def get_metadata_query():
'''Get the query to retrieve core metadata information.
:rtype: a unicode string of the SPARQL query
'''
q = SelectQuery(results=['macro', 'label', 'min_date', 'max_date', \
'vcounty_brdg'], distinct=True)
q.append((Variable('macro'), sxcxcxn.Event, Variable('event')))
q.append((Variable('macro'), dcx.Identifier, Variable('label')))
q.append((Variable('event'), ix_ebd.mindate, Variable('min_date')))
q.append((Variable('event'), ix_ebd.maxdate, Variable('max_date')))
q.append((Variable('macro'), sxcxcxn.Victim, Variable('victim')))
q.append((Variable('victim'), sxcxcxn.Victim_Brundage, Variable('victim_Brundage')))
q.append((Variable('victim_Brundage'), ssxn.County_of_lynching_Brundage, \
Variable('vcounty_brdg')))
return unicode(q)

def get_metadata(add_fields=[]):
'''Get the macro event core metadata from sesame triplestore, plus
any additional fields used in the timemap filter.
:param add_fields: add extra fields to the basic metadata set.
:rtype: a mapping dict of core metadata plus timemap field using
core data returned by
:meth:`~georgia_lynchings.rdf.sparqlstore.SparqlStore.query`.
joined with data from additional filter fields, and indexed
on the macro event id.
with the following bindings:
* `label`: the label of this Macro Event
* `min_date`: the minimum date related to this event
* `max_date`: the maximum date related to this event
* `victim_county_brundage`: the (Brundage) county of the Victim
plus any additional fields used by the timemap filter
'''
store = SparqlStore()
resultSet = store.query(sparql_query=get_metadata_query())

resultSetIndexed = indexOnMacroEvent(resultSet)

# Add any additional field requests
update_filter_fields(add_fields, resultSetIndexed)

return resultSetIndexed

def indexOnMacroEvent(resultSet=[]):
'''Create a new dictionary using the macroevent id as the key.
:param resultSet: a mapping list of the type returned by
:meth:`~georgia_lynchings.rdf.sparqlstore.SparqlStore.query`.
It returns metadata for all MacroEvents,
with the following bindings:
* `macro`: the URI of this Macro Event
* `label`: the label of this Macro Event
* `min_date`: the minimum date related to this event
* `max_date`: the maximum date related to this event
* `victim_county_brundage`: the (Brundage) county of the Victim
:rtype: a mapping dictionary of the resultSet using the macroevent
id as the key.
'''
indexedResultSet = {}
if resultSet:
fields = resultSet[0].keys()
fields.remove('macro')
for item in resultSet:
row_id = item['macro'].split('#r')[1]
del item['macro']
indexedResultSet[row_id]=item

return indexedResultSet

def update_filter_fields(add_fields=[], resultSetIndexed={}):
'''Update the core metadata with filter field data.
:param add_fields: add extra fields to the basic metadata set.
:param resultSetIndexed: a mapping dict of core metadata using the
macroevent id as the dict key.
'''

# Add `victim_allegedcrime_brundage`, if present in add_fields list
if 'victim_allegedcrime_brundage' in add_fields:
q = SelectQuery(results=['macro', 'victim_allegedcrime_brundage'], distinct=True)
q.append((Variable('macro'), sxcxcxn.Victim, Variable('victim')))
q.append((Variable('victim'), sxcxcxn.Victim_Brundage, Variable('victim_Brundage')))
q.append((Variable('victim_Brundage'), ssxn.Alleged_crime_Brundage, \
Variable('victim_allegedcrime_brundage')))
store = SparqlStore()
ac_resultSet = store.query(sparql_query=unicode(q))
join_data_on_macroevent(resultSetIndexed, ac_resultSet)

# Add `cities`, if present in add_fields list
if 'city' in add_fields:
query=query_bank.filters['city']
ss=SparqlStore()
city_resultSet = ss.query(sparql_query=query)
join_data_on_macroevent(resultSetIndexed, city_resultSet)

def join_data_on_macroevent(resultSetIndexed={}, filterdict=[]):
'''Join the filter resultSet to the metadata indexed dictionary
:param resultSetIndexed: a mapping dict of core metadata using the
macroevent id as the dict key.
:param filterdict: a list of dictionary with the following bindings:
* `macro`: the URI of this Macro Event
* filtername: the name of the filter
'''
if resultSetIndexed and filterdict:
filtername = filterdict[0].keys()
filtername.remove('macro')
filtername = filtername[0]
for item in filterdict:
try:
row_id = item['macro'].split('#r')[1]
if filtername in resultSetIndexed[row_id]:
if item[filtername] not in resultSetIndexed[row_id][filtername]:
resultSetIndexed[row_id][filtername].append(item[filtername].encode('ascii').title())
else:
resultSetIndexed[row_id][filtername] = [item[filtername].encode('ascii').title()]
except KeyError, err:
logger.debug("Filter[%s] not defined for MacroEvent[%s]" % (filtername, err))

def get_filters(filters):
'''Get the queries to retrieve filters tags and frequency.
Expand Down
64 changes: 0 additions & 64 deletions georgia_lynchings/query_bank.py
Expand Up @@ -413,70 +413,6 @@
ORDER BY DESC(?frequency)
"""

'Find the cities for all the macroevents.'
filters['city']="""
PREFIX dcx:<http://galyn.example.com/source_data_files/data_Complex.csv#>
PREFIX scxn:<http://galyn.example.com/source_data_files/setup_Complex.csv#name->
PREFIX ssxn:<http://galyn.example.com/source_data_files/setup_Simplex.csv#name->
PREFIX sxcxcxn:<http://galyn.example.com/source_data_files/setup_xref_Complex-Complex.csv#name->
SELECT DISTINCT ?macro ?city
WHERE {
# For all ?macro, find all of the
# Events for those macros, and all of the Triplets for those events.
# We'll be looking in these triplets for locations.
?macro a scxn:Macro_Event.
?macro sxcxcxn:Event ?event.
?event sxcxcxn:Semantic_Triplet ?_1.
# Every Triplet has a Process
?_1 sxcxcxn:Process ?_2.
# We need all of the places for that Process. There are four ways
# they might be expressed:
{
?_2 sxcxcxn:Simple_process ?_3.
?_3 sxcxcxn:Circumstances ?_4.
?_4 sxcxcxn:Space ?_5.
} UNION {
?_2 sxcxcxn:Complex_process ?_6.
?_6 sxcxcxn:Simple_process ?_3.
?_3 sxcxcxn:Circumstances ?_4.
?_4 sxcxcxn:Space ?_5.
} UNION {
?_2 sxcxcxn:Complex_process ?_6.
?_6 sxcxcxn:Other_process ?_7.
?_7 sxcxcxn:Simple_process ?_3.
?_3 sxcxcxn:Circumstances ?_4.
?_4 sxcxcxn:Space ?_5.
} UNION {
?_2 sxcxcxn:Complex_process ?_6.
?_6 sxcxcxn:Other_process ?_7.
?_7 sxcxcxn:Nominalization ?_8.
?_8 sxcxcxn:Space ?_5.
}
# Regardless of which way we came, ?_5 is some sort of place. If
# we're going to get from there to location simplex data, this
# is how we get there:
{
?_5 sxcxcxn:City ?_10.
}
# Grab the simplex data we're interested in, whichever are
# available (but note that "?" is equivalent to missing data)
OPTIONAL {
?_10 ssxn:City_name ?city.
FILTER (?city != "?")
}
# And grab only those records that have at least one data point.
FILTER (BOUND(?city))
}
ORDER BY ?macro
"""

'Find the city and frequency list'
filters['city_freq']="""
PREFIX dcx:<http://galyn.example.com/source_data_files/data_Complex.csv#>
Expand Down

0 comments on commit 873438a

Please sign in to comment.