Skip to content

Commit

Permalink
Merge branch 'develop' of github.com:trane/salt into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
Andrew Kuhnhausen committed Mar 22, 2012
2 parents 5c0a238 + 89d773e commit f20be14
Show file tree
Hide file tree
Showing 3 changed files with 227 additions and 2 deletions.
216 changes: 216 additions & 0 deletions salt/comparison.py
@@ -0,0 +1,216 @@
import re
'''
Comparison Engine for -X
'''
class Comparitor(object):
class Rule(object):
'''
Rules are used by the Matcher to determine if prefix matches
'''
def __init__(self, regex, action):
self.regex = re.compile('('+regex+')')
self.action = action

def prefix(self, inpt):
match = self.regex.search(inpt)
if match:
return match.group(1)
else:
return False

class Matcher(object):
def __init__(self, rules = []):
self.rules = rules

def add_rule(self, regex, action):
'''
Add a rule to the matcher
'''
self.rules.append(Comparitor.Rule(regex, action))

def prefixes(self, inpt):
'''
Get the longest matching prefix for each rule
'''
prefixen = []
for rule in self.rules:
prefixen.insert(0, {'rule': rule, 'prefix': rule.prefix(inpt)})
return prefixen

def winner(self, inpt):
'''
Find winning rule for the provided input
'''
prefixen = self.prefixes(inpt)

maxP = 0
winner = None

for prefix in prefixen:
if prefix['prefix'] and len(prefix['prefix']) >= maxP:
maxP = len(prefix['prefix'])
winner = prefix

return winner

class Tokenizer(object):
'''
This is a Tokenizer without need for Tokens, just provides the basic
lexical needs of skip, peek, next and eat.
'''
def __init__(self, inpt = ''):
self.inpt = inpt
self.matcher = Comparitor.Matcher()
self.next_token = None
self.next_text = None

def skip(self, matched):
'''
Skips current character
'''
print "Skipping"
self.inpt = self.inpt[len(matched)::]
self.next_token = None
self.next_text = None

def token(self, matched):
self.inpt = self.inpt[len(matched)::]
self.next_token = matched
self.next_text = matched

def peek(self):
'''
Returns next char without consuming it
'''
if self.next_token:
return self.next_token

while (self.next_token == None) and (self.inpt != ""):
winner = self.matcher.winner(self.inpt)
if not winner:
return "No rule matching {0}".format(self.inpt)
winner['rule'].action(winner['prefix'])

return self.next_token

def next(self):
'''
Returns and consumes next text
'''
if not self.next_token:
self.peek()
t = self.next_token
self.next_token = None
return t

def eat(self, text):
'''
Consumes text without returning it, throwing an error if incorrect
syntax
'''
self.next()
if self.next_text != text:
return "Parse error: expected {0}, got {1}".format(text, self.next_text)

def add_rule(self, regex, action):
self.matcher.add_rule(regex, action)



class Parser(object):
'''
Parser will parse expressions passed in -X with the following grammar:
<Exp> ::= <Term> <Symbol> <Exp> | <Term>
<Term> ::= ( <Exp> )
| <Factor>
<Factor> ::= <Grain>
| <Primitive>
| <Module>
| <Pillar>
<Symbol> ::= '>'
| '<'
| '>='
| '<='
| '&&'
| '||'
<Grain> ::= grains[<String>]
<Module> ::= salt[<String>.<String>]
<Pillar> ::= pillar[<String>]
<Primitive> ::= Int
| String
'''
def __init__(self, inpt):
t = Comparitor.Tokenizer(inpt)
t.add_rule(r'[A-Za-z0-9]+', t.token)
t.add_rule(r'\b(?:grains|salt|pillar)\[[^\]]+\]', t.token)
t.add_rule(r'[()]', t.token)
t.add_rule(r'>=?|<=?|&&|\|\|', t.token)
t.add_rule(r'[ \n\t\r]', t.skip)
self.t = t

def parse_exp(self):
term = self.parse_term()
symbol = self.parse_symbol()

# <Term>
if not symbol:
return term

# <Term> <Symbol> <Exp>
if symbol == '>':
self.t.eat(symbol)
return term > self.parse_exp()
elif symbol == '<':
self.t.eat(symbol)
return term < self.parse_exp()
elif symbol == '>=':
self.t.eat(symbol)
return term >= self.parse_exp()
elif symbol == '<=':
self.t.eat(symbol)
return term <= self.parse_exp()
elif symbol == '&&':
self.t.eat(symbol)
return term and self.parse_exp()
elif symbol == '||':
self.t.eat(symbol)
return term or self.parse_exp()
else:
return "Undefined symbol {0}".format(symbol)

def parse_term(self):
if self.t.peek() == '(':
self.t.eat('(')
exp = self.parse_exp()
self.t.eat(')')
return exp
return self.parse_factor()

def parse_symbol(self):
return self.t.next()

def parse_factor(self):
factor = self.t.next()
return factor
if factor.startswith('grains'):
return self.parse_grain()
elif factor.startswith('salt'):
return self.parse_module()
elif factor.startswith('pillar'):
return self.parse_pillar()
else:
return self.parse_primitive()

def parse_grain(self):
return self.t.next()

def parse_module(self):
return self.t.next()

def parse_pillar(self):
return self.t.next()

def parse_primitive(self):
return self.t.next()

6 changes: 6 additions & 0 deletions salt/exceptions.py
Expand Up @@ -64,3 +64,9 @@ class PkgParseError(SaltException):
the CLI tool (pacman, yum, apt, aptitude, etc)
'''
pass

class SSHInvalidKey(SaltException):
'''
Used when an invalid ssh key is used with the ssh module
'''
pass
7 changes: 5 additions & 2 deletions salt/modules/ssh.py
@@ -1,9 +1,10 @@
'''
j'''
Manage client ssh components
'''

import os
import re
from salt.exceptions import SSHInvalidKey


def _refine_enc(enc):
Expand Down Expand Up @@ -39,6 +40,8 @@ def _format_auth_line(
'''
Properly format user input.
'''
if len(key.split()) > 1:
raise SSHInvalidKey('SSH key contains spaces')
line = ''
if options:
line += '{0} '.format(','.join(options))
Expand Down Expand Up @@ -143,7 +146,7 @@ def _validate_keys(key_file):
continue

# get "{options} key"
ln = re.search(linere, line)
ln = linere(line)
if not ln:
# not an auth ssh key, perhaps a blank line
continue
Expand Down

0 comments on commit f20be14

Please sign in to comment.