Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

2 minor fixes #8

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
79 changes: 73 additions & 6 deletions liclient/__init__.py
Expand Up @@ -13,7 +13,7 @@ def __init__(self, ck, cs):
self.consumer_key = ck self.consumer_key = ck
self.consumer_secret = cs self.consumer_secret = cs


self.api_profile_url = 'http://api.linkedin.com/v1/people/~' self.api_profile_url = 'http://api.linkedin.com/v1/people/'
self.api_profile_connections_url = 'http://api.linkedin.com/v1/people/~/connections' self.api_profile_connections_url = 'http://api.linkedin.com/v1/people/~/connections'
self.api_network_update_url = 'http://api.linkedin.com/v1/people/~/network' self.api_network_update_url = 'http://api.linkedin.com/v1/people/~/network'
self.api_comment_feed_url = 'http://api.linkedin.com/v1/people/~/network/updates/' + \ self.api_comment_feed_url = 'http://api.linkedin.com/v1/people/~/network/updates/' + \
Expand Down Expand Up @@ -83,7 +83,7 @@ def get_user_profile(self, access_token, selectors=None, **kwargs):


content = self.clean_dates(content) content = self.clean_dates(content)
return LinkedInXMLParser(content).results return LinkedInXMLParser(content).results

def get_user_connections(self, access_token, selectors=None, **kwargs): def get_user_connections(self, access_token, selectors=None, **kwargs):
""" """
Get the connections of the current user. Valid keyword arguments are Get the connections of the current user. Valid keyword arguments are
Expand Down Expand Up @@ -176,6 +176,20 @@ def search(self, access_token, data, field_selector_string=None):
# print content # useful for debugging... # print content # useful for debugging...
return LinkedInXMLParser(content).results return LinkedInXMLParser(content).results


def company_search(self, access_token, data, field_selector_string=None):
"""
Use the LinkedIn Search API to find users. The criteria for your search
should be passed as the 2nd positional argument as a dictionary of key-
value pairs corresponding to the paramters allowed by the API. Formatting
of arguments will be done for you (i.e. lists of keywords will be joined
with "+")
"""
srch = LinkedInCompanySearchAPI(data, access_token, field_selector_string)
client = oauth.Client(self.consumer, srch.user_token)
rest, content = client.request(srch.generated_url, method='GET')
# print content # useful for debugging...
return LinkedInXMLParser(content).results

def send_message(self, access_token, recipients, subject, body): def send_message(self, access_token, recipients, subject, body):
""" """
Send a message to a connection. "Recipients" is a list of ID numbers, Send a message to a connection. "Recipients" is a list of ID numbers,
Expand Down Expand Up @@ -227,6 +241,7 @@ def prepare_request(self, access_token, url, kws=[]):
prep_url = self.append_initial_arg(k, kws[k], prep_url) prep_url = self.append_initial_arg(k, kws[k], prep_url)
else: else:
prep_url = self.append_sequential_arg(k, kws[k], prep_url) prep_url = self.append_sequential_arg(k, kws[k], prep_url)
if not kws: prep_url += '~'
prep_url = re.sub('&&', '&', prep_url) prep_url = re.sub('&&', '&', prep_url)
print prep_url print prep_url
return user_token, prep_url return user_token, prep_url
Expand All @@ -243,13 +258,12 @@ def append_id_args(self, ids, prep_url):
return prep_url return prep_url


def append_initial_arg(self, key, args, prep_url): def append_initial_arg(self, key, args, prep_url):
assert '?' not in prep_url, 'Initial argument has already been applied to %s' % prep_url
if type(args) == type([]): if type(args) == type([]):
prep_url += '?' + key + '=' + str(args[0]) prep_url += key + '=' + str(args[0])
if len(args) > 1: if len(args) > 1:
prep_url += ''.join(['&' + key + '=' + str(arg) for arg in args[1:]]) prep_url += ''.join(['&' + key + '=' + str(arg) for arg in args[1:]])
else: else:
prep_url += '?' + key + '=' + str(args) prep_url += key + '=' + str(args)
return prep_url return prep_url


def append_sequential_arg(self, key, args, prep_url): def append_sequential_arg(self, key, args, prep_url):
Expand Down Expand Up @@ -358,12 +372,65 @@ def invitation_factory(self, recipient, subject, body, **kwargs):
auth auth
) )
return re.sub('_', '-', etree.tostring(mxml)) return re.sub('_', '-', etree.tostring(mxml))


class LinkedInCompanySearchAPI(LinkedInAPI):
def __init__(self, params, access_token, field_selector_string=None):
self.api_search_url = 'http://api.linkedin.com/v1/companies'
self.field_selector_string = field_selector_string
self.routing = {
'email-domain': self.email_domain,
'universal-name': self.universal_domain,
}
self.user_token, self.generated_url = self.do_process(access_token, params)
print "url:", self.generated_url

def do_process(self, access_token, params):
assert type(params) == type(dict()), 'The passed parameters to the Search API must be a dictionary.'
user_token = oauth.Token(access_token['oauth_token'], access_token['oauth_token_secret'])
url = self.api_search_url
for p in params:
try:
url = self.routing[p](url, params[p])
params[p] = None
except KeyError:
continue
remaining_params = {}
for p in params:
if params[p]:
remaining_params[p] = params[p]
url = self.process_remaining_params(url, remaining_params)
return user_token, url

def process_remaining_params(self, url, remaining_params):
return url

def email_domain(self, url, val):
prep_url = url
if self.field_selector_string:
prep_url += ':' + self.field_selector_string
prep_url += '?'
try:
prep_url = self.append_initial_arg('email-domain', val, prep_url)
except AssertionError:
prep_url = self.append_sequential_arg('email-domain', val, prep_url)
return prep_url

def universal_domain(self, url, val):
prep_url = url + '/'
try:
prep_url = self.append_initial_arg('universal-name', val, prep_url)
except AssertionError:
prep_url = self.append_sequential_arg('universal-name', val, prep_url)
if self.field_selector_string:
prep_url += ':' + self.field_selector_string
return prep_url

class LinkedInSearchAPI(LinkedInAPI): class LinkedInSearchAPI(LinkedInAPI):
def __init__(self, params, access_token, field_selector_string=None): def __init__(self, params, access_token, field_selector_string=None):
self.api_search_url = 'http://api.linkedin.com/v1/people-search' self.api_search_url = 'http://api.linkedin.com/v1/people-search'
if field_selector_string: if field_selector_string:
self.api_search_url += ':' + field_selector_string self.api_search_url += ':' + field_selector_string
self.api_search_url += '?'
self.routing = { self.routing = {
'keywords': self.keywords, 'keywords': self.keywords,
'name': self.name, 'name': self.name,
Expand Down
109 changes: 106 additions & 3 deletions liclient/parsers/lixml.py
Expand Up @@ -14,9 +14,14 @@ def __init__(self, content):
'position': self.__parse_position, 'position': self.__parse_position,
'skill': self.__parse_skills, 'skill': self.__parse_skills,
'education': self.__parse_education, 'education': self.__parse_education,
'people': self.__parse_people_collection, 'people-search': self.__parse_people_collection,
'twitter-account': self.__parse_twitter_accounts, 'twitter-account': self.__parse_twitter_accounts,
'member-url': self.__parse_member_url_resources 'member-url': self.__parse_member_url_resources,
'companies': self.__parse_company_collection,
'company': self.__parse_single_company,
'location': self.__parse_company_location,
'specialty': self.__parse_company_specialty,
'email-domain': self.__parse_company_email_domain,
} }
self.tree = etree.fromstring(content) self.tree = etree.fromstring(content)
self.root = self.tree.tag self.root = self.tree.tag
Expand Down Expand Up @@ -71,10 +76,33 @@ def __parse_people_collection(self, tree):
result_count = int(n.text) result_count = int(n.text)
content = [] content = []
for p in ppl: for p in ppl:
print p.getchildren()
rslts = LinkedInProfileParser(p).results rslts = LinkedInProfileParser(p).results
content.append(rslts) content.append(rslts)
return content return content


def __parse_company_collection(self, tree):
companies = tree.getchildren()
content = []
for c in companies:
rslts = LinkedInCompanyParser(c).results
content.append(rslts)
return content

def __parse_single_company(self, tree):
company = tree
rslts = LinkedInCompanyParser(company).results
return [rslts]

def __parse_company_location(self, tree):
return LinkedInLocationParser(tree).results

def __parse_company_specialty(self, tree):
return tree.text

def __parse_company_email_domain(self, tree):
return tree.text

class LinkedInNetworkUpdateParser(LinkedInXMLParser): class LinkedInNetworkUpdateParser(LinkedInXMLParser):
def __init__(self, content): def __init__(self, content):
self.xpath_collection = { self.xpath_collection = {
Expand Down Expand Up @@ -167,6 +195,81 @@ def __objectify(self, data, u_type, u):
obj = mappers.NetworkUpdate(data, u) obj = mappers.NetworkUpdate(data, u)
return obj return obj


class LinkedInCompanyParser(LinkedInXMLParser):
def __init__(self, content):
self.tree = content
self.results = self.__build_data(self.tree)

def __build_data(self, tree):
results = []
for c in tree.xpath('/company'):
company = {}
for item in c.getchildren():
company[re.sub(r'-', '_', item.tag)] = item.text
obj = mappers.Company(company, c)
results.append(obj)

# deal with hierarchical results in a somewhat kludgy way
def fix(s):
return re.sub(r'-', '_', s)
def build_name(parent, item):
s = ''
p = item.getparent()
while p != parent:
s = fix(p.tag) + '_' + s
p = p.getparent()
s += fix(item.tag)
return s
if not results:
company = {}
for item in tree.iterdescendants():
clean = item.text and item.text.strip()
if clean:
name = build_name(tree, item)
if name in company:
value = company[name]
if type(value) != list:
company[name] = [value, clean]
else:
company[name].append(clean)
else:
company[name] = clean
obj = mappers.Company(company, tree)
results.append(obj)
return results

class LinkedInLocationParser(LinkedInXMLParser):
def __init__(self, content):
self.tree = content
self.results = self.__build_data(self.tree)

def __build_data(self, tree):
def fix(s):
return re.sub(r'-', '_', s)
def build_name(parent, item):
s = ''
p = item.getparent()
while p != parent:
s = fix(p.tag) + '_' + s
p = p.getparent()
s += fix(item.tag)
return s
location = {}
for item in tree.iterdescendants():
clean = item.text and item.text.strip()
if clean:
name = build_name(tree, item)
if name in location:
value = location[name]
if type(value) != list:
location[name] = [value, clean]
else:
location[name].append(clean)
else:
location[name] = clean
obj = mappers.Location(location, tree)
return obj

class LinkedInProfileParser(LinkedInXMLParser): class LinkedInProfileParser(LinkedInXMLParser):
def __init__(self, content): def __init__(self, content):
self.tree = content self.tree = content
Expand Down