Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge remote-tracking branch 'origin/master'

  • Loading branch information...
commit dbc71c1b61a41026fbf3bb32cd714d8c21990044 2 parents 88023c3 + df9c1bc
@jamesturk jamesturk authored
View
6 openstates/ga/__init__.py
@@ -12,10 +12,10 @@
'lower': {'name': 'House', 'title': 'Representative'},
},
'terms': [
- {'name': '2013-2014', 'start_year': 2013, 'end_year': 2014,
- 'sessions': ['2013_14']},
{'name': '2011-2012', 'start_year': 2011, 'end_year': 2012,
- 'sessions': ['2011_12', '2011_ss']}
+ 'sessions': ['2011_12', '2011_ss']},
+ {'name': '2013-2014', 'start_year': 2013, 'end_year': 2014,
+ 'sessions': ['2013_14']}
],
'session_details': {
'2013_14': {
View
8 openstates/ga/committees.py
@@ -14,7 +14,13 @@ def scrape_session(self, term, chambers, session):
sid = self.metadata['session_details'][session]['_guid']
committees = self.cservice.GetCommitteesBySession(
sid
- )['CommitteeListing']
+ )
+
+ #if committees.strip() == "":
+ # return # If we get here, it's a problem.
+ # Commenting this out for future debugging. - PRT
+
+ committees = committees['CommitteeListing']
for committee in committees:
cid = committee['Id']
committee = self.cservice.GetCommittee(cid)
View
6 openstates/in/__init__.py
@@ -13,6 +13,8 @@
terms=[
{'name': '2011-2012', 'start_year': 2011,
'end_year': 2012, 'sessions': ['2011', '2012']},
+ {'name': '2013-2014', 'start_year': 2013,
+ 'end_year': 2014, 'sessions': ['2013', '2014']},
],
session_details={
'2011': {'start_date': datetime.date(2011, 1, 5),
@@ -21,6 +23,10 @@
},
'2012': {'display_name': '2012 Regular Session',
'_scraped_name': '2012 Regular Session',},
+ '2013': {'display_name': '2013 Regular Session',
+ '_scraped_name': '2013 Regular Session',},
+ '2014': {'display_name': '2014 Regular Session',
+ '_scraped_name': '2014 Regular Session',},
},
feature_flags=['subjects', 'capitol_maps', 'influenceexplorer'],
capitol_maps=[
View
5 openstates/in/legislators.py
@@ -70,7 +70,10 @@ def scrape_upper_republican(self, chamber, term, href, page, party, leg):
about_url = profile.xpath('//a[contains(., "About Sen.")]/@href')[0]
about = self.urlopen(about_url)
- about = lxml.html.fromstring(about)
+ try:
+ about = lxml.html.fromstring(about)
+ except:
+ import ipdb;ipdb.set_trace()
about.make_links_absolute(about_url)
leg.add_source(about_url)
View
11 openstates/ma/__init__.py
@@ -21,6 +21,12 @@
'start_year': 2011,
'name': '187',
'sessions': [ '187th' ]
+ },
+ {
+ 'end_year': 2013,
+ 'start_year': 2014,
+ 'name': '188',
+ 'sessions': [ '188th' ]
}
],
'name': 'Massachusetts',
@@ -35,6 +41,11 @@
'type': 'primary',
'display_name': '187th Legislature',
'_scraped_name': '187th',
+ },
+ '188th': {
+ 'type': 'primary',
+ 'display_name': '188th Legislature',
+ '_scraped_name': '188th',
}
},
'legislature_name': 'Massachusetts General Court',
View
8 openstates/pa/__init__.py
@@ -22,6 +22,10 @@
end_year=2012,
sessions=[
'2011-2012']),
+ dict(name='2013-2014', start_year=2013,
+ end_year=2014,
+ sessions=[
+ '2013-2014']),
],
session_details={
'2009-2010': {'type': 'primary',
@@ -37,6 +41,10 @@
'display_name': '2011-2012 Regular Session',
'_scraped_name': '2011-2012 Regular Session',
},
+ '2013-2014': {'type': 'primary',
+ 'display_name': '2013-2014 Regular Session',
+ '_scraped_name': '2013-2014 Regular Session',
+ },
},
feature_flags=['events', 'influenceexplorer'],
_ignored_scraped_sessions=[
View
8 openstates/vt/__init__.py
@@ -17,6 +17,10 @@
'start_year': 2011,
'end_year': 2012,
'sessions': ['2011-2012']},
+ {'name': '2013-2014',
+ 'start_year': 2013,
+ 'end_year': 2014,
+ 'sessions': ['2013-2014']},
],
session_details={'2009-2010': {'type': 'primary',
'display_name': '2009-2010 Regular Session',
@@ -26,6 +30,10 @@
'display_name': '2011-2012 Regular Session',
'_scraped_name': '2011-2012 Session',
},
+ '2013-2014': {'type': 'primary',
+ 'display_name': '2013-2014 Regular Session',
+ '_scraped_name': '2013-2014 Session',
+ },
},
feature_flags=['influenceexplorer'],
_ignored_scraped_sessions=['2009 Special Session', '2007-2008 Session',
Please sign in to comment.
Something went wrong with that request. Please try again.