Large diffs are not rendered by default.

@@ -110,7 +110,7 @@ def _connect(self):
raise AccessError()
else:
self._pop3.user(self.user)
self._pop3.pass_(self.password)
self._pop3.pass_(self.password.encode('utf-8'))
except poplib.error_proto:
raise AccessError()

@@ -200,7 +200,11 @@ def unquote(self, string, charset_order=None):
def uq(m):
cs, how, data = m.group(1), m.group(2), m.group(3)
if how in ('b', 'B'):
return base64.b64decode(data).decode(cs)
try:
return base64.b64decode(''.join(data.split())+'===').decode(cs)
except TypeError:
print 'FAILED TO B64DECODE: %s' % data
return data
else:
return quopri.decodestring(data, header=True).decode(cs)

@@ -383,7 +383,7 @@ def encoded_hdr(self, msg, hdr, value=None):
def Create(cls, idx, mbox_id, mbx,
msg_to=None, msg_cc=None, msg_bcc=None, msg_from=None,
msg_subject=None, msg_text='', msg_references=None,
msg_id=None, msg_atts=None,
msg_id=None, msg_atts=None, msg_headers=None,
save=True, ephemeral_mid='not-saved', append_sig=True,
use_default_from=True):
msg = MIMEMultipart(boundary=MakeBoundary())
@@ -444,6 +444,9 @@ def Create(cls, idx, mbox_id, mbx,
msg.attach(tp)
del tp['MIME-Version']

for k, v in (msg_headers or []):
msg[k] = v

if msg_atts:
for att in msg_atts:
att = copy.deepcopy(att)
@@ -927,7 +930,7 @@ def get_msg(self, pgpmime='default', crypto_state_feedback=True):
self.msg_parsed = self._get_parsed_msg(pgpmime)
result = self.msg_parsed
if not result:
raise IndexError(_('Message not found?'))
raise IndexError(_('Message not found'))
return result

def is_thread(self):
@@ -1105,7 +1108,7 @@ def get_message_tags(self):
def get_message_tree(self, want=None, tree=None, pgpmime='default'):
msg = self.get_msg(pgpmime=pgpmime)
want = list(want) if (want is not None) else None
tree = tree or {}
tree = tree or {'_cleaned': []}
tree['id'] = self.get_msg_info(self.index.MSG_ID)

if want is not None:
@@ -1137,6 +1140,9 @@ def get_message_tree(self, want=None, tree=None, pgpmime='default'):
convs.append(Email(self.index, int(rid, 36)
).get_msg_summary())

if (want is None or 'headerprints' in want):
tree['headerprints'] = self.get_headerprints()

if (want is None or 'headers' in want) and 'headers' not in tree:
tree['headers'] = {}
for hdr in msg.keys():
@@ -1209,17 +1215,24 @@ def get_message_tree(self, want=None, tree=None, pgpmime='default'):
tree['text_parts'].extend(text_parts)

elif want is None or 'attachments' in want:
filename_org = safe_decode_hdr(hdr=part.get_filename() or '')
filename = CleanText(filename_org,
banned=(CleanText.HTML +
CleanText.CRLF + '\\/'),
replace='_').clean
att = {
'mimetype': mimetype,
'count': count,
'part': part,
'length': len(part.get_payload(None, True) or ''),
'content-id': part.get('content-id', ''),
'filename': safe_decode_hdr(hdr=part.get_filename() or ''),
'filename': filename,
'crypto': crypto
}
att['aid'] = self._attachment_aid(att)
tree['attachments'].append(att)
if filename_org != filename:
tree['_cleaned'].append('att: %s' % att['aid'])

if want is None or 'text_parts' in want:
if tree.get('html_parts') and not tree.get('text_parts'):
@@ -3,38 +3,73 @@
Mailpile. If you find yourself checking which platform the app runs on, adding
a function here instead is probably The Right Thing.
"""
import copy
import os
import subprocess
import sys


# This is a cache of discovered binaries and their paths.
BINARIES = {}


# These are the binaries we want, and the test we use to detect whether
# they are available/working.
BINARIES_WANTED = {
'GnuPG': ['gpg', '--version'],
'OpenSSL': ['openssl', 'version'],
'Tor': ['tor', '--version']}


def _assert_file_exists(path):
if not os.path.exists(path):
raise OSError('Not found: %s' % path)
return path


def DetectBinaries(which=None, use_cache=True, preferred={}, _raise=None):
global BINARIES
if which and use_cache and which in BINARIES:
return BINARIES[which]

def GetDefaultGnuPGCommand():
# FIXME: Detect if we are running from a package, use bundled binaries.
if sys.platform.startswith('win'):
return _assert_file_exists('GnuPG\\gpg.exe')
else:
return 'gpg'
for binary, binary_test in BINARIES_WANTED.iteritems():
if (which is None) or (binary == which):
if preferred.get(binary):
binary_test = copy.copy(binary_test)
binary_test[0] = preferred[binary]
try:
p = subprocess.check_call(binary_test,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE)
BINARIES[binary] = binary_test[0]
except (subprocess.CalledProcessError, OSError):
if binary in BINARIES:
del BINARIES[binary]

if which:
if _raise not in (None, False):
if not BINARIES.get(which):
raise _raise('%s not found' % which)
return BINARIES.get(which)

def GetDefaultOpenSSLCommand():
# FIXME: Detect if we are running from a package, use bundled binaries.
if sys.platform.startswith('win'):
# FIXME: This should maybe be a bit smarter?
return _assert_file_exists('OpenSSL\\bin\\openssl.exe')
else:
# Rely on the PATH to find the way
return 'openssl'
elif _raise not in (None, False):
for binary, binary_test in BINARIES_WANTED.iteritems():
if not BINARIES.get(binary):
raise _raise('%s not found' % binary)

return BINARIES


def GetDefaultTorPath():
# FIXME: Detect if we are running from a package, use bundled binaries.
return 'tor'
def GetDefaultGnuPGCommand(_raise=OSError):
return DetectBinaries(which='GnuPG', _raise=_raise)


def GetDefaultOpenSSLCommand(_raise=OSError):
return DetectBinaries(which='OpenSSL', _raise=_raise)


def GetDefaultTorPath(_raise=OSError):
return DetectBinaries(which='Tor', _raise=_raise)


def InDesktopEnvironment():
@@ -84,3 +119,33 @@ def GetAppDataDirectory():
# Assume other platforms are Unixy
return os.getenv('XDG_DATA_HOME', os.path.expanduser('~/.local/share'))


def RestrictReadAccess(path):
"""
Restrict access to a file or directory so only the user can read it.
"""
# FIXME: Windows code goes here!
if os.path.isdir(path):
os.chmod(path, 0700)
else:
os.chmod(path, 0600)


def RandomListeningPort(count=1, host='127.0.0.1'):
socks = []
ports = []
try:
import socket
for port in range(0, count):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, 0))
socks.append(sock)
ports.append(sock.getsockname()[1])
if count == 1:
return ports[0]
else:
return ports
finally:
for sock in socks:
sock.close()
@@ -27,7 +27,7 @@
'setup_magic', 'oauth', 'exporters', 'plugins', 'motd', 'backups',
'vcard_carddav', 'vcard_gnupg', 'vcard_gravatar', 'vcard_libravatar',
'vcard_mork', 'html_magic', 'migrate', 'smtp_server', 'crypto_policy',
'keylookup', 'webterminal'
'keylookup', 'webterminal', 'crypto_autocrypt'
]
PLUGINS = __all__

@@ -157,6 +157,10 @@ def available(self):
def loadable(self):
return self.BUILTIN[:] + self.RENAMED.keys() + self.DISCOVERED.keys()

def loadable_early(self):
return [k for k, (n, m) in self.DISCOVERED.iteritems()
if not m.get('require_login', True)]

def _import(self, full_name, full_path):
# create parents as necessary
parents = full_name.split('.')[2:] # skip mailpile.plugins
@@ -326,6 +330,10 @@ def _process_manifest_pass_one(self, full_name,

self.register_commands(cls)

# Register worker threads
for thr in manifest_path('threads'):
self.register_worker(self._get_class(full_name, thr))

# Register mailboxes
package = str(full_name)
for mailbox in manifest_path('mailboxes'):
@@ -762,7 +770,10 @@ def get_web_asset(self, path, default=None):

# These are the elements that exist at the moment
UI_ELEMENTS = {
'settings': [],
'activities': [],
'email_activities': [], # Activities on e-mails
'thread_activities': [], # Activities on e-mails in a thread
'display_modes': [],
'display_refiners': [],
'selection_actions': []
@@ -42,7 +42,7 @@ def _gunzip(data):

def _decrypt(data, config):
with DecryptingStreamer(cStringIO.StringIO(data),
mep_key=config.master_key) as fd:
mep_key=config.get_master_key()) as fd:
data = fd.read()
fd.verify(_raise=IOError)
return data
@@ -121,7 +121,7 @@ def _add_file(realfile, zipname):
# The .ZIP is unencrypted, so generated contents needs protecting
def _encrypt_and_add_data(filename, data):
tempfile = os.path.join(config.tempfile_dir(), filename)
with EncryptingStreamer(config.master_key,
with EncryptingStreamer(config.get_master_key(),
dir=config.tempfile_dir()) as fd:
fd.write(data)
fd.save(tempfile)
@@ -488,7 +488,8 @@ def CreateReply(cls, idx, session, refs, msgid,
del headers['cc']

ref_ids = [t['headers_lc'].get('message-id') for t in trees]
ref_subjs = [t['headers_lc'].get('subject') for t in trees]
ref_subjs = [(t['summary'][4] or t['headers_lc'].get('subject'))
for t in trees]
msg_bodies = []
for t in trees:
# FIXME: Templates/settings for how we quote replies?
@@ -517,24 +518,37 @@ def CreateReply(cls, idx, session, refs, msgid,
fmt = _('Composing a reply from %(from)s to %(to)s')
session.ui.debug(fmt % headers)

extra_headers = []
for tree in trees:
try:
if 'decrypted' in tree['crypto']['encryption']['status']:
extra_headers.append(('x-mp-internal-should-encrypt', 'Y'))
extra_headers.append(('Encryption', 'openpgp-sign-encrypt'))
break
except KeyError:
pass

if cid:
# FIXME: Instead, we should use placeholders in the template
# and insert the quoted bits in the right place (or
# nowhere if the template doesn't want them).
msg_bodies[:0] = [cls._get_canned(idx, cid)]

return (Email.Create(idx, local_id, lmbox,
email = Email.Create(idx, local_id, lmbox,
msg_text='\n\n'.join(msg_bodies),
msg_subject=cls.prefix_subject(
ref_subjs[-1], 'Re:', cls._RE_REGEXP),
msg_from=headers.get('from', None),
msg_to=headers.get('to', []),
msg_cc=headers.get('cc', []),
msg_references=[i for i in ref_ids if i],
msg_headers=extra_headers,
msg_id=msgid,
save=(not ephemeral),
ephemeral_mid=ephemeral and ephemeral[0]),
ephemeral)
ephemeral_mid=ephemeral and ephemeral[0])


return (email, ephemeral)

def command(self):
session, config, idx = self.session, self.session.config, self._idx()
@@ -996,6 +1010,9 @@ def command(self, create=True, outbox=False):
return self._error(_('Failed to attach files'))

for email, update_string in email_updates:
if not email:
return self._error(_('Cannot find message'))
break
email.update_from_string(session, update_string, final=outbox)

emails = [e for e, u in email_updates]
@@ -1038,25 +1055,39 @@ def command(self, create=True, outbox=True):
class UnThread(CompositionCommand):
"""Remove a message from a thread."""
SYNOPSIS = (None, 'unthread', 'message/unthread', None)
HTTP_CALLABLE = ('POST', 'UPDATE')
HTTP_POST_VARS = {'mid': 'message-id'}
HTTP_CALLABLE = ('GET', 'POST')
HTTP_QUERY_VARS = {
'mid': 'message-id'}
HTTP_POST_VARS = {
'subject': 'Update the metadata subject as well'}

def command(self):
session, config, idx = self.session, self.session.config, self._idx()
args = list(self.args)

# On the CLI, anything after -- is the new metadata subject.
if '--' in args:
subject = ' '.join(args[(args.index('--')+1):])
args = args[:args.index('--')]
else:
subject = self.data.get('subject', [None])[0]

# Message IDs can come from post data
args = list(self.args)
for mid in self.data.get('mid', []):
args.append('=%s' % mid)
emails = [self._actualize_ephemeral(i) for i in
self._choose_messages(args, allow_ephemeral=True)]

if emails:
for email in emails:
idx.unthread_message(email.msg_mid())
self._background_save(index=True)
return self._return_search_results(
_('Unthreaded %d messages') % len(emails), emails)
if self.data.get('_method', 'POST') == 'POST':
for email in emails:
idx.unthread_message(email.msg_mid(), new_subject=subject)
self._background_save(index=True)
return self._return_search_results(
_('Unthreaded %d messages') % len(emails), emails)
else:
return self._return_search_results(
_('Unthread %d messages') % len(emails), emails)
else:
return self._error(_('Nothing to do!'))

@@ -7,6 +7,7 @@
import mailpile.security as security
from mailpile.crypto.gpgi import GnuPG
from mailpile.crypto.gpgi import GnuPGBaseKeyGenerator, GnuPGKeyGenerator
from mailpile.crypto.autocrypt_utils import generate_autocrypt_setup_code
from mailpile.plugins import EmailTransform, PluginManager
from mailpile.commands import Command, Action
from mailpile.eventlog import Event
@@ -961,7 +962,7 @@ def make_new_source():
else:
disco.policy = 'move'
disco.local_copy = True
disco.paths = ['']
disco.paths = ['/']
else:
disco.policy = 'ignore'
disco.local_copy = False
@@ -1020,10 +1021,7 @@ def _new_key_created(self, event, vcard_rid, passphrase):
config.event_log.log_event(event)

def _create_new_key(self, vcard, keytype):
passphrase = okay_random(20, self.session.config.master_key
).lower()
passphrase = '-'.join([passphrase[i:i+4] for i in
range(0, len(passphrase), 4)])
passphrase = generate_autocrypt_setup_code()
random_uid = vcard.random_uid
bits = int(keytype.replace('RSA', ''))
key_args = {
@@ -1144,11 +1142,10 @@ def _form_defaults(self):
'source-NEW-copy-local': True,
'source-NEW-delete-source': False,
'security-best-effort-crypto': True,
'security-use-autocrypt': False,
'security-always-sign': False,
'security-always-encrypt': False,
'security-always-encrypt': False,
'security-attach-keys': True, # FIXME: Autocrypt changes this
'security-use-autocrypt': True,
'security-attach-keys': False,
'security-prefer-inline': False,
'security-prefer-pgpmime': False,
'security-obscure-metadata': False,
@@ -1212,8 +1209,7 @@ class EditProfile(AddProfile):
"""Edit a profile"""
SYNOPSIS = (None, None, 'profiles/edit', None)
HTTP_QUERY_VARS = dict_merge(AddProfile.HTTP_QUERY_VARS, {
'rid': 'update by x-mailpile-rid',
})
'rid': 'update by x-mailpile-rid'})

def _vcard_to_post_vars(self, vcard):
cp = vcard.crypto_policy or ''
@@ -358,21 +358,49 @@ def command(self, slowly=False):
args.remove('--keep')
keep += 1

deleted, failed, mailboxes = [], [], []
for msg_idx in self._choose_messages(args):
e = Email(idx, msg_idx)
del_ok, mboxes = e.delete_message(self.session,
flush=False, keep=keep)
mailboxes.extend(mboxes)
if del_ok:
deleted.append(msg_idx)
else:
failed.append(msg_idx)
# We group messages by mailbox and delete in batches. This should
# avoid loading all the mailboxes into RAM at once, which is a big
# deal on larger setups.
targets = [Email(idx, mi) for mi in self._choose_messages(args)]
msg_ptr_pairs = [
(e, e.index.unique_mbox_ids(e.get_msg_info())) for e in targets]

if 'deletion' in self.session.config.sys.debug:
self.session.ui.debug('Targets: %s' % msg_ptr_pairs)

# Message are sorted so the ones present in the most mailboxes
# are listed first.
msg_ptr_pairs.sort(key=lambda mpp: (-len(mpp[1]), mpp[0]))

deleted, failed = [], []
while msg_ptr_pairs:
# Pick the largest set of mailboxes we have yet to delete from
mid_set = msg_ptr_pairs[0][1]

# Pick all the messages contained in this set of mailboxes
messages = [e for e, mids in msg_ptr_pairs
if ((mid_set | mids) == mid_set)]

# Go delete them!
mailboxes = []
for e in messages:
msg_idx = e.msg_idx_pos
del_ok, mboxes = e.delete_message(self.session,
flush=False, keep=keep)
mailboxes.extend(mboxes)
if del_ok:
deleted.append(msg_idx)
else:
failed.append(msg_idx)

# This will actually delete from mboxes, etc.
for m in set(mailboxes):
with m:
m.flush()
# This will actually delete from mboxes, etc.
for m in set(mailboxes):
with m:
m.flush()

# OK, these are done, reduce our target list
msg_ptr_pairs = [(e, mids) for e, mids in msg_ptr_pairs
if ((mid_set | mids) != mid_set)]

# FIXME: Trigger a background rescan of affected mailboxes, as
# the flush() above may have broken our pointers.
@@ -755,6 +783,20 @@ def _create_event(self):
self.event.data['healthy'] = True
HealthCheck.health_event = self.event

# Cancel any obsolete HealthCheck events we find
if self.session.config.event_log:
for ev in self.session.config.event_log.events():
if (ev.source == self.event.source and
ev.event_id != self.event.event_id):
ev.flags = ev.COMPLETE
self.session.config.event_log.log_event(ev)

@classmethod
def _mem_check(cls, session, config):
if config.detected_memory_corruption:
return _('Memory corruption detected') + '!'
return False

@classmethod
def _disk_check(cls, session, config):
if config.need_more_disk_space():
@@ -783,6 +825,7 @@ def check(cls, session, config):

now_healthy = True
for crit, name, check in ((True, 'disk', cls._disk_check),
(True, 'memcheck', cls._mem_check),
(True, 'readonly', cls._readonly_check)):
message = check(session, config)
if message:
@@ -975,7 +1018,8 @@ class ChangeDir(ListDir):
def command(self, args=None):
try:
args = list((args is None) and self.args or args or [])
os.chdir(os.path.expanduser(args.pop(0).encode('utf-8')))
os.chdir(FilePath.unalias(
os.path.expanduser(args.pop(0).encode('utf-8'))))
return ListDir.command(self, args=['.'])
except (OSError, IOError, UnicodeEncodeError), e:
return self._error(_('Failed to change directories: %s') % e)
@@ -1123,7 +1167,7 @@ def command(self):
fb = security.forbid_config_change(config, path)
if fb:
return self._error(fb)
elif path == 'master_key' and config.master_key:
elif path == 'master_key' and config.get_master_key():
return self._error(_('I refuse to change the master key!'))

# We don't have transactions really, but making sure the HTTPD
@@ -1132,7 +1176,7 @@ def command(self):
updated = {}
for path, value in ops:
if not force:
if path == 'master_key' and config.master_key:
if path == 'master_key' and config.get_master_key():
raise ValueError('Need --force to change master key.')
if path == 'sys.http_no_auth':
raise ValueError('Need --force to change auth policy.')
@@ -1208,7 +1252,7 @@ def command(self):
fb = security.forbid_config_change(config, path)
if fb:
return self._error(fb)
elif path == 'master_key' and config.master_key:
elif path == 'master_key' and config.get_master_key():
return self._error(_('I refuse to change the master key!'))

# We don't have transactions really, but making sure the HTTPD
@@ -1264,7 +1308,7 @@ def unset(cfg, key):
fb = security.forbid_config_change(config, v)
if fb:
return self._error(fb)
elif v == 'master_key' and config.master_key:
elif v == 'master_key' and config.get_master_key():
return self._error(_('I refuse to change the master key!'))

# We don't have transactions really, but making sure the HTTPD
@@ -0,0 +1,360 @@
import base64
import datetime
import re
import time
import urllib2
from email import encoders
from email.mime.base import MIMEBase

import mailpile.security as security
from mailpile.conn_brokers import Master as ConnBroker
from mailpile.i18n import gettext as _
from mailpile.i18n import ngettext as _n
from mailpile.commands import Command
from mailpile.crypto.autocrypt_utils import *
from mailpile.crypto.gpgi import GnuPG
from mailpile.crypto.gpgi import OpenPGPMimeSigningWrapper
from mailpile.crypto.gpgi import OpenPGPMimeEncryptingWrapper
from mailpile.crypto.gpgi import OpenPGPMimeSignEncryptWrapper
from mailpile.crypto.mime import UnwrapMimeCrypto, MessageAsString
from mailpile.crypto.state import EncryptionInfo, SignatureInfo
from mailpile.eventlog import GetThreadEvent
from mailpile.mailutils.emails import Email, ExtractEmails, ClearParseCache
from mailpile.mailutils.emails import MakeContentID
from mailpile.plugins import PluginManager, EmailTransform
from mailpile.plugins.vcard_gnupg import PGPKeysImportAsVCards
from mailpile.plugins.search import Search
from mailpile.plugins.keylookup.email_keylookup import get_pgp_key_keywords
from mailpile.util import sha1b64

_plugins = PluginManager(builtin=__file__)


##[ Misc. AutoCrypt-related API commands ]####################################


# FIXME: This really should be a record store, not an in-memory dict
def save_AutoCrypt_DB(config):
if config.autocrypt_db:
config.save_pickle(config.autocrypt_db, 'autocrypt_db')


def get_AutoCrypt_DB(config):
if not config.real_hasattr('autocrypt_db'):
try:
db = config.load_pickle('autocrypt_db')
except (IOError, EOFError):
db = {'state': {}}
config.real_setattr('autocrypt_db', db)
return config.autocrypt_db


class AutoCryptRecord(dict):
INIT_ORDER = ('key', 'ts-message-date', 'prefer-encrypt',
'count', 'mid', 'ts-last-seen')

def __init__(self, to,
key=None, ts_message_date=None, prefer_encrypt=None,
count=1, mid=None, ts_last_seen=None):
self['to'] = to
self['ts-message-date'] = ts_message_date or int(time.time())
self['ts-last-seen'] = ts_last_seen or self['ts-message-date']
self['key'] = key # Signature of key data (not key itself)
self['mid'] = mid # MID of most recent message with this key.
self['count'] = count # How many times have we seen this key?
self['prefer-encrypt'] = prefer_encrypt

def should_encrypt(self):
return (self['prefer-encrypt'] == 'mutual')

def save_to(self, db):
db[self['to']] = [self[k] for k in self.INIT_ORDER]
return self

@classmethod
def Load(cls, db, to):
return cls(to, *db[to])


def AutoCrypt_process_email(config, msg, msg_mid, msg_ts, sender_email,
autocrypt_header=None):
autocrypt_header = (
autocrypt_header or
extract_autocrypt_header(msg, to=sender_email))
gossip_headers = extract_autocrypt_gossip_headers(msg, to=sender_email)

db = get_AutoCrypt_DB(config)['state']
if autocrypt_header:
ts = msg_ts
to = autocrypt_header['addr']
mid = msg_mid
key_data = autocrypt_header['keydata']

# Trying to save RAM: we don't store full keys, just hashes of
# them. When or if we actually decide to use the key it must
# either be findable in e-mail (not deleted) or in a keychain.
# Since AutoCrypt is opportunistic, missing some chances to encrypt
# is by definition acceptable! We also deliberately do not use
# the key fingerprint here, as we would still like to detect and
# capture updates when subkeys change.
key = sha1b64(key_data).strip()
pe = autocrypt_header.get('prefer-encrypt')

try:
existing = AutoCryptRecord.Load(db, to)
if existing['key'] == key and existing['mid'] != mid:
# This is the same key! Count it.
existing['count'] += 1

# If and only if this header is newer than what we have on
# file: update some of our attributes.
if existing['ts-last-seen'] < ts:
existing['ts-last-seen'] = ts
existing['mid'] = mid
existing['prefer-encrypt'] = pe

# If it's old and provides us with an earlier date for
# the "origin" of this key, make note of that as well.
elif existing['ts-message-date'] > ts:
existing['ts-message-date'] = ts

# Add the raw key data (for use downstream), save, return.
return existing.save_to(db)

elif existing['ts-last-seen'] >= ts:
if existing['ts-message-date'] < ts:
# FIXME: This is evidence sender has multiple clients
# doing AutoCrypt at once. That's a problem! We might
# want to make a note of this and do something about it.
# This is a point to discuss with the AutoCrypt group.
pass

# Header is older than what we already have on file, ignore!
# But... return the parsed record, even if this is a no-op.
# This allows the keyword extractor to use the data, at
# the expense of things seeming more exciting than they
# really are when run manually.
return AutoCryptRecord(
to, key=key, ts=ts, prefer_encrypt=pe, mid=mid)

except (TypeError, KeyError):
pass

# Create a new record, yay!
record = AutoCryptRecord(
to, key=key, ts_message_date=ts, prefer_encrypt=pe, mid=mid)

return record.save_to(db)

# If we get this far, we have no valid AutoCrypt header (new or old).
# Remove address from our database to save resources. We don't care
# about the null states at the moment.
if sender_email in db:
del db[sender_email]
return False

return None


##[ AutoCrypt debugging and API commands ]#####################################

class AutoCryptSearch(Command):
"""Search for the AutoCrypt database."""
ORDER = ('', 0)
SYNOPSIS = (None, 'crypto/autocrypt/search', 'crypto/autocrypt/search', '<emails>')
HTTP_CALLABLE = ('GET', )
HTTP_QUERY_VARS = {'q': 'emails'}

class CommandResult(Command.CommandResult):
def as_text(self):
if self.result:
r = self.result
return '\n'.join(["%s: %s (%s)" % (
to, r[to], r[to].should_encrypt())
for to in sorted(r.keys())])
else:
return _("No results")

def command(self):
args = list(self.args)
for q in self.data.get('q', []):
args.extend(q.split())

db = get_AutoCrypt_DB(self.session.config)['state']
results = dict((e, AutoCryptRecord.Load(db, e))
for e in args if e in db)

if results:
return self._success(_("Found %d results") % len(results.keys()),
results)
else:
return self._error(_("Not found"), results)


class AutoCryptForget(Command):
"""Forget all AutoCrypt state for a list of e-mail address."""
ORDER = ('', 0)
SYNOPSIS = (None, 'crypto/autocrypt/forget', 'crypto/autocrypt/forget', '<emails>')
HTTP_CALLABLE = ('POST', )
HTTP_QUERY_VARS = {'email': 'emails'}

def command(self):
args = list(self.args)
args.extend(self.data.get('email', []))

forgot = []
changes = 0
db = get_AutoCrypt_DB(self.session.config)['state']
for e in args:
if e in db:
changes += 1
del db[e]
forgot.append(e)

if changes:
save_AutoCrypt_DB(self.session.config)
return self._success(_("Forgot %d recipients") % changes, forgot)
else:
return self._error(_("Not found"))


class AutoCryptParse(Command):
"""Parse the AutoCrypt header from a message (or messages)."""
ORDER = ('', 0)
SYNOPSIS = (None, 'crypto/autocrypt/parse', 'crypto/autocrypt/parse', '<emails>')
HTTP_CALLABLE = ('POST', )

def command(self):
session, config, idx = self.session, self.session.config, self._idx()
args = list(self.args)

emails = [Email(idx, i) for i in self._choose_messages(args)]
db = get_AutoCrypt_DB(config)['state']
updated = []

for e in emails:
msg = e.get_msg()
if 'autocrypt' in msg:
sender = e.get_sender()
update = AutoCrypt_process_email(
config, e.get_msg(), e.msg_mid(),
int(e.get_msg_info(e.index.MSG_DATE), 36), sender)
if update is not None:
# Note: update==False means an entry was removed, which
# is an interesting event!
updated.append(sender)

if updated:
save_AutoCrypt_DB(config)

return self._success("Updated %d records" % len(updated), updated)


class AutoCryptPeers(Command):
"""List known AutoCrypt Peers and their state."""
ORDER = ('', 0)
SYNOPSIS = (None, 'crypto/autocrypt/peers', 'crypto/autocrypt/peers', None)
HTTP_CALLABLE = ('POST', )

def command(self):
session, config, idx = self.session, self.session.config, self._idx()
args = list(self.args)

db = get_AutoCrypt_DB(config)['state']

return self._success(_("Found %d peers") % len(db), db)


def autocrypt_meta_kwe(index, msg_mid, msg, msg_size, msg_ts, body_info=None):
keywords = set([])
config = index.config

if 'autocrypt' in msg:
sender = ExtractEmails(msg['from'])[0]
autocrypt_header = extract_autocrypt_header(msg, to=sender)

if autocrypt_header:
keywords.add('pgp:has')
keywords.add('autocrypt:has')
key_data = autocrypt_header.get('keydata')
if key_data:
keywords |= set(get_pgp_key_keywords(key_data))

AutoCrypt_process_email(config, msg, msg_mid, msg_ts, sender,
autocrypt_header=autocrypt_header)

save_AutoCrypt_DB(config)

return keywords


class AutoCryptTxf(EmailTransform):
"""
This is an outgoing email content transform for adding autocrypt headers.
Note: This transform relies on Memory Hole code elsewhere to correctly obscure
the Gossip headers. Priorities must be set accordingly.
"""
def TransformOutgoing(self, sender, rcpts, msg, **kwargs):
matched = False
keydata = mutual = sender_keyid = key_binary = None

gnupg = GnuPG(self.config, event=GetThreadEvent())
profile = self._get_sender_profile(sender, kwargs)
vcard = profile['vcard']
if vcard is not None:
crypto_format = vcard.crypto_format
sender_keyid = vcard.pgp_key
if sender_keyid and 'autocrypt' in crypto_format:
key_binary = gnupg.get_minimal_key(key_id=sender_keyid,
user_id=sender)

if key_binary:
mutual = 'E' in crypto_format.split('+')[0].split(':')[-1]
msg["Autocrypt"] = make_autocrypt_header(
sender, key_binary, prefer_encrypt_mutual=mutual)

if 'encrypt' in msg.get('Encryption', '').lower():
gossip_list = []
for rcpt in rcpts:
# FIXME: Check if any of the recipients are in the BCC
# header; omit their keys if so?
try:
# This *should* always succeed: if we are encrypting,
# then the key we encrypt to should already be in
# the keychain.
if '#' in rcpt:
rcpt, rcpt_keyid = rcpt.split('#')
else:
# This happens when composing in the CLI.
rcpt_keyid = rcpt
if (rcpt != sender) and rcpt_keyid:
kb = gnupg.get_minimal_key(key_id=rcpt_keyid,
user_id=rcpt)
if kb:
gossip_list.append(make_autocrypt_header(
rcpt, kb, prefix='Autocrypt-Gossip'))
except (ValueError, IndexError):
pass
if len(gossip_list) > 1:
# No point gossiping peoples keys back to them alone.
for hdr in gossip_list:
msg.add_header('Autocrypt-Gossip', hdr)

matched = True

return sender, rcpts, msg, matched, True


_plugins.register_meta_kw_extractor('autocrypt', autocrypt_meta_kwe)
_plugins.register_commands(
AutoCryptSearch,
AutoCryptForget,
AutoCryptParse,
AutoCryptPeers)

# Note: we perform our transformations BEFORE the GnuPG transformations
# (prio 500), so the memory hole transformation can take care of hiding
# the Autocrypt-Gossip headers.
_plugins.register_outgoing_email_content_transform(
'400_autocrypt', AutoCryptTxf)
@@ -1,3 +1,4 @@
import copy
import datetime
import re
import time
@@ -16,7 +17,7 @@
from mailpile.crypto.gpgi import OpenPGPMimeSignEncryptWrapper
from mailpile.crypto.mime import UnwrapMimeCrypto, MessageAsString
from mailpile.crypto.mime import OBSCURE_HEADERS_MILD, OBSCURE_HEADERS_EXTREME
from mailpile.crypto.mime import ObscureSubject
from mailpile.crypto.mime import OBSCURE_HEADERS_REQUIRED, ObscureSubject
from mailpile.crypto.state import EncryptionInfo, SignatureInfo
from mailpile.eventlog import GetThreadEvent
from mailpile.mailutils.addresses import AddressHeaderParser
@@ -52,6 +53,7 @@ def _wrap_key_in_html_vars(self, title, keydata):
"key": keydata}

def TransformOutgoing(self, sender, rcpts, msg, **kwargs):
# *** msg is email.mime.multipart.MIMEMultipart
matched = False
gnupg = None
sender_keyid = None
@@ -158,9 +160,10 @@ def TransformOutgoing(self, sender, rcpts, msg,
elif 'obscure_meta' in crypto_format:
obscured = OBSCURE_HEADERS_MILD
elif self.config.prefs.encrypt_subject:
obscured = {'subject': ObscureSubject}
obscured = copy.copy(OBSCURE_HEADERS_REQUIRED)
obscured['subject'] = ObscureSubject
else:
obscured = {}
obscured = OBSCURE_HEADERS_REQUIRED

if 'sign' in crypto_policy and 'encrypt' in crypto_policy:
wrapper = OpenPGPMimeSignEncryptWrapper
@@ -384,14 +387,21 @@ def command(self):


class GPGKeyListSecret(Command):
"""List Secret GPG Keys"""
"""List secret GPG Keys, --usable omits disabled, revoked, expired."""
ORDER = ('', 0)
SYNOPSIS = (None, 'crypto/gpg/keylist/secret',
'crypto/gpg/keylist/secret', '<address>')
'crypto/gpg/keylist/secret', '[--usable]')
HTTP_CALLABLE = ('GET', )

def command(self):
res = self._gnupg().list_secret_keys()

all = self._gnupg().list_secret_keys()
if '--usable' in self.args:
res = {fprint : all[fprint] for fprint in all if not (
all[fprint]['disabled'] or all[fprint]['revoked'] or
all[fprint]['expired'])}
else:
res = all
return self._success("Searched for secret keys", res)


@@ -69,7 +69,9 @@ class CryptoPolicy(CryptoPolicyBaseAction):
SYNOPSIS = (None, 'crypto_policy', 'crypto_policy', '[<emailaddresses>]')
ORDER = ('Internals', 9)
HTTP_CALLABLE = ('GET',)
HTTP_QUERY_VARS = {'email': 'e-mail addresses'}
HTTP_QUERY_VARS = {
'email': 'e-mail addresses',
'should-encrypt': 'Assume a base-line policy of wanting encryption'}

@classmethod
def ShouldAttachKey(cls, config, vcards=None, emails=None, ttl=90):
@@ -142,7 +144,7 @@ def _encryption_ratio(self, session, idx, email, minimum=5):
return float(len(crypto)) / len(recent)

@classmethod
def crypto_policy(cls, session, idx, emails):
def crypto_policy(cls, session, idx, emails, should_encrypt=False):
config = session.config
for i in range(0, len(emails)):
if '<' in emails[i]:
@@ -156,6 +158,11 @@ def crypto_policy(cls, session, idx, emails):
'best-effort', 'send_keys')
cpolicy = default[-2]
cformat = default[-1]
if should_encrypt and ('encrypt' not in cpolicy):
if 'sign' in cpolicy or 'best-effort' == cpolicy:
cpolicy = 'sign-encrypt'
else:
cpolicy = 'encrypt'

# Try and merge all the user policies into one. This may lead
# to conflicts which cannot be resolved.
@@ -249,10 +256,12 @@ def crypto_policy(cls, session, idx, emails):

def command(self):
emails = list(self.args) + self.data.get('email', [])
should_encrypt = self.data.get('should-encrypt', False)
if len(emails) < 1:
return self._error('Please provide at least one email address!')

result = self.crypto_policy(self.session, self._idx(), emails)
result = self.crypto_policy(self.session, self._idx(), emails,
should_encrypt=should_encrypt)
return self._success(result['reason'], result=result)


@@ -11,6 +11,7 @@
from mailpile.i18n import gettext as _
from mailpile.i18n import ngettext as _n
from mailpile.plugins import PluginManager
from mailpile.security import GetUserSecret
from mailpile.ui import Session
from mailpile.util import *

@@ -24,11 +25,6 @@ def UpdateGUIState():
gui.change_state()


def GetUserSecret(config):
"""Return a secret that only this Unix user could know."""
return 'FIXME12345'


class GuiOMaticConnection(threading.Thread):
def __init__(self, config, sock, main=False):
threading.Thread.__init__(self)
@@ -279,7 +275,7 @@ class ConnectToGuiOMatic(Command):
def command(self):
if self.data.get('_method'):
secret, style, port = self.args
if secret != GetUserSecret(self.session.config):
if secret != GetUserSecret(self.session.config.workdir):
raise AccessError('Invalid User Secret')
elif len(self.args) == 2:
style, port = self.args
@@ -112,6 +112,18 @@ def command(self, save=True, auto=False):
return self._success(_('Generated Javascript API'), result=res)


class ProgressiveWebApp(RenderPage):
"""Output PWA Manifest"""
SYNOPSIS = (None, None, 'jsapi/pwa', None)
ORDER = ('Internals', 0)
HTTP_CALLABLE = ('GET', )
HTTP_AUTH_REQUIRED = False
HTTP_QUERY_VARS = {'ts': 'Cache busting timestamp'}

def command(self):
return self._success(_('Rendered Progressive Web App Data'), result={})


class HttpProxyGetRequest(Command):
"""HTTP GET content from the public web"""
SYNOPSIS = (None, None, 'http_proxy', None)
@@ -166,4 +178,4 @@ def command(self):
raise SuppressHtmlOutput()


_plugins.register_commands(JsApi, HttpProxyGetRequest)
_plugins.register_commands(JsApi, ProgressiveWebApp, HttpProxyGetRequest)
@@ -11,7 +11,7 @@
from mailpile.vcard import AddressInfo


__all__ = ['email_keylookup', 'nicknym', 'dnspka']
__all__ = ['email_keylookup', 'nicknym', 'wkd'] # Disabled: dnspka

KEY_LOOKUP_HANDLERS = []

@@ -21,7 +21,8 @@
def register_crypto_key_lookup_handler(handler):
if handler not in KEY_LOOKUP_HANDLERS:
KEY_LOOKUP_HANDLERS.append(handler)
KEY_LOOKUP_HANDLERS.sort(key=lambda h: (h.LOCAL and 0 or 1, h.PRIORITY))
KEY_LOOKUP_HANDLERS.sort(
key=lambda h: (0 if h.LOCAL else 1, h.PRIORITY, -h.SCORE))


def _score_validity(validity, local=False):
@@ -69,13 +70,13 @@ def _update_scores(session, key_id, key_info, known_keys_list):
bits = int(key_info["keysize"])
score = bits // 1024

if bits >= 4096:
if bits >= 4096:
key_strength = _('Encryption key is very strong')
elif bits >= 3072:
elif bits >= 3072:
key_strength = _('Encryption key is strong')
elif bits >= 2048:
key_strength = _('Encryption key is good')
else:
else:
key_strength = _('Encryption key is weak')

key_info['scores']['Encryption key strength'] = [score, key_strength]
@@ -131,6 +132,7 @@ def lookup_crypto_keys(session, address,
handlers = KEY_LOOKUP_HANDLERS

ungotten = get and get[:] or []
progress = [ ]

for handler in handlers:
if get and not ungotten:
@@ -142,11 +144,19 @@ def lookup_crypto_keys(session, address,
if not allowremote and not h.LOCAL:
continue

if found_keys and (not h.PRIVACY_FRIENDLY) and (not origins):
# We only try the privacy-hostile methods if we haven't
# found any keys (unless origins were specified).
if not ungotten:
continue

progress.append(h.NAME)
if event:
ordered_keys.sort(key=lambda k: -k["score"])
event.message = _('Searching for encryption keys in: %s'
) % _(h.NAME)
event.private_data = {"result": ordered_keys,
"progress": progress,
"runningsearch": h.NAME}
session.config.event_log.log_event(event)

@@ -162,8 +172,9 @@ def lookup_crypto_keys(session, address,
strict_email_match=strict_email_match,
get=(wanted if (get is not None) else None))
ungotten[:] = wanted
except (TimedOut, IOError, KeyError, ValueError, TypeError,
AttributeError):
except KeyboardInterrupt:
raise
except:
if session.config.sys.debug:
traceback.print_exc()
results = {}
@@ -214,23 +225,30 @@ class KeyLookup(Command):
HTTP_QUERY_VARS = {
'email': 'The address to find a encryption key for (strict)',
'address': 'The nick or address to find a encryption key for (fuzzy)',
'allowremote': 'Whether to permit remote key lookups (defaults to true)'
}
'allowremote': 'Whether to permit remote key lookups (default=Yes)',
'origins': 'Specify which origins to check (or * for all)'}

def command(self):
if len(self.args) > 1:
allowremote = self.args.pop()
else:
allowremote = self.data.get('allowremote', True)
allowremote = self.data.get('allowremote', ['Y'])[0]
if allowremote.lower()[:1] in ('n', 'f'):
allowremote = False

origins = self.data.get('origins')
if '*' in (origins or []):
origins = [h.NAME for h in KEY_LOOKUP_HANDLERS]

email = " ".join(self.data.get('email', []))
address = " ".join(self.data.get('address', self.args))
result = lookup_crypto_keys(self.session, email or address,
strict_email_match=email,
event=self.event,
allowremote=allowremote)
allowremote=allowremote,
origins=origins)
return self._success(_n('Found %d encryption key',
'Found %d encryption keys',
'Found %d encryption keys',
len(result)) % len(result),
result=result)

@@ -308,7 +326,8 @@ def _seen_enough_signatures(self, idx, email, keyinfo):

def command(self):
emails = set(list(self.args)) | set(self.data.get('email', []))
safe_assert(emails)
if not emails:
return self._success('Nothing Happened')

idx = self._idx()
gnupg = self._gnupg(dry_run=True)
@@ -375,8 +394,8 @@ def command(self):
ClearParseCache(pgpmime=True)

# i18n note: Not translating things here, since messages are not
# generally use-facing and we want to reduce load on our
# translators.
# generally user-facing and we want to reduce load on
# our translators.
return self._success('Evaluated key TOFU', result={
'missing_keys': missing,
'imported_keys': imported,
@@ -394,7 +413,9 @@ class LookupHandler:
NAME = "NONE"
TIMEOUT = 2
PRIORITY = 10000
PRIVACY_FRIENDLY = False
LOCAL = False
SCORE = 0

def __init__(self, session, known_keys_list):
self.session = session
@@ -419,10 +440,11 @@ def _lookup(self, address, strict_email_match=False):
def lookup(self, address, strict_email_match=False, get=None):
all_keys = self._lookup(address, strict_email_match=strict_email_match)
keys = {}
if get is not None:
get = [unicode(g).upper() for g in get]
for key_id, key_info in all_keys.iteritems():
fprint = key_info.get('fingerprint', '')
fprint = unicode(key_info.get('fingerprint', '')).upper()
if (get is None) or (fprint and fprint in get):

score, reason = self._score(key_info)
if 'validity' in key_info:
vscore, vreason = _score_validity(key_info['validity'])
@@ -450,10 +472,12 @@ def key_import(self, address):
class KeychainLookupHandler(LookupHandler):
NAME = "GnuPG keychain"
LOCAL = True
PRIVACY_FRIENDLY = True
PRIORITY = 0
SCORE = 8

def _score(self, key):
return (1, _('Found encryption key in keychain'))
return (self.SCORE, _('Found encryption key in keychain'))

def _getkey(self, key):
return False # Already on keychain
@@ -476,20 +500,36 @@ def _lookup(self, address, strict_email_match):
results[key_id][k] = key_info[k]
return results

def _getkey(self, key):
pass


class KeyserverLookupHandler(LookupHandler):
NAME = "PGP Keyservers"
LOCAL = False
TIMEOUT = 20 # We know these are slow...
PRIVACY_FRIENDLY = False
PRIORITY = 200
SCORE = 1

def _score(self, key):
return (1, _('Found encryption key in keyserver'))
return (self.SCORE, _('Found encryption key in keyserver'))

def _allowed_by_config(self):
# FIXME: When direct keyserver contact works, change this.
config = self.session.config
return (
config.sys.proxy.protocol in ('none', 'unknown', 'system')
or config.sys.proxy.fallback)

def _lookup(self, address, strict_email_match=False):
# FIXME: We should probably just contact the keyservers directly.
#
# Queries look like this:
#
# https://hkps.pool.sks-keyservers.net/pks/lookup?
# search=EMAIL&op=index&fingerprint=on&options=mr
#
if not self._allowed_by_config():
return {}

results = self._gnupg().search_key(address)
if strict_email_match:
for key in results.keys():
@@ -500,6 +540,15 @@ def _lookup(self, address, strict_email_match=False):
return results

def _getkey(self, key):
# FIXME: We should probably just contact the keyservers directly.
#
# Key downloads look like this:
#
# https://hkps.pool.sks-keyservers.net/pks/lookup?
# search=0xFINGERPRINT&op=get&options=mr
#
if not self._allowed_by_config():
return {}
return self._gnupg().recv_key(key['fingerprint'])


@@ -509,4 +558,5 @@ def _getkey(self, key):
# We do this down here, as that seems to make the Python module loader
# things happy enough with the circular dependencies...
from mailpile.plugins.keylookup.email_keylookup import EmailKeyLookupHandler
from mailpile.plugins.keylookup.dnspka import DNSPKALookupHandler
from mailpile.plugins.keylookup.wkd import WKDLookupHandler
# Disabled: from mailpile.plugins.keylookup.dnspka import DNSPKALookupHandler
@@ -1,3 +1,13 @@
# Note, this code is not used by default, because:
#
# 1. DNS is almost entirely insecure and not private
# 2. Our current code-base cannot route DNS lookups over Tor
# 3. Nobody uses this. Even Werner himself has an obsolete key in DNS.
# 4. Web Key Directory solves the same problem, properly.
#
# The code is left here in the repo as a historic oddity; a resource for
# people to explore and learn.

try:
import DNS
except:
@@ -21,6 +31,8 @@ class DNSPKALookupHandler(LookupHandler):
NAME = _("DNS PKA records")
TIMEOUT = 10
PRIORITY = 100
PRIVACY_FRIENDLY = False # Bypasses Tor, currently.
SCORE = 3

def __init__(self, *args, **kwargs):
LookupHandler.__init__(self, *args, **kwargs)
@@ -30,7 +42,7 @@ def __init__(self, *args, **kwargs):
self.req = DNS.Request(qtype="TXT")

def _score(self, key):
return (9, _('Found key in DNS PKA'))
return (self.SCORE, _('Found key in DNS PKA'))

def _lookup(self, address, strict_email_match=True):
"""
@@ -74,9 +86,7 @@ def _keyinfo(self, entry):
"pkaver": pkaver}}

def _getkey(self, key):
if key["fingerprint"] and not key["url"]:
res = self._gnupg().recv_key(key["fingerprint"])
elif key["url"]:
if key["url"] and key["url"][:6].lower() in ('http:/', 'https:'):
with ConnBroker.context(need=[ConnBroker.OUTGOING_HTTP]):
r = urllib2.urlopen(key["url"])
result = r.readlines()
@@ -91,8 +101,10 @@ def _getkey(self, key):
result = "".join(result[start:end])
res = self._gnupg().import_keys(result)
return res
elif key["fingerprint"]:
res = self._gnupg().recv_key(key["fingerprint"])
else:
raise ValueError("Need a fingerprint or a URL")
raise ValueError("Need a fingerprint or a URL. key=%s" % key)


_ = gettext
@@ -1,17 +1,16 @@
import datetime
import time
import copy
from pgpdump.utils import PgpdumpException

from mailpile.crypto.autocrypt_utils import *
from mailpile.crypto.keydata import get_keydata
from mailpile.i18n import gettext
from mailpile.plugins import PluginManager
from mailpile.plugins.keylookup import LookupHandler
from mailpile.plugins.keylookup import register_crypto_key_lookup_handler
from mailpile.plugins.search import Search
from mailpile.mailutils.emails import Email

import pgpdump


_ = lambda t: t
_plugins = PluginManager(builtin=__file__)
@@ -36,63 +35,13 @@ def _might_be_pgp_key(filename, mimetype):
'signature' not in filename))


def _get_creation_time(m):
"""Compatibility shim, for differing versions of pgpdump"""
try:
return m.creation_time
except AttributeError:
try:
return m.datetime
except AttributeError:
return datetime.datetime(1970, 1, 1, 00, 00, 00)


def _get_keydata(data):
results = []
try:
if "-----BEGIN" in data:
ak = pgpdump.AsciiData(data)
else:
ak = pgpdump.BinaryData(data)
packets = list(ak.packets())
except (TypeError, IndexError, PgpdumpException):
return []

now = time.time()
for m in packets:
try:
if isinstance(m, pgpdump.packet.PublicKeyPacket):
size = str(int(1.024 *
round(len('%x' % (m.modulus or 0)) / 0.256)))
validity = ('e'
if (0 < (int(m.expiration_time or 0)) < now)
else '')
results.append({
"fingerprint": m.fingerprint,
"created": _get_creation_time(m),
"validity": validity,
"keytype_name": (m.pub_algorithm or '').split()[0],
"keysize": size,
"uids": [],
})
if isinstance(m, pgpdump.packet.UserIDPacket) and results:
# FIXME: This used to happen with results=[], does that imply
# UIDs sometimes come before the PublicKeyPacket?
results[-1]["uids"].append({"name": m.user_name,
"email": m.user_email})
except (TypeError, AttributeError, KeyError, IndexError, NameError):
import traceback
traceback.print_exc()

# This will only return keys that have UIDs
return [k for k in results if k['uids']]


class EmailKeyLookupHandler(LookupHandler, Search):
NAME = _("E-mail keys")
PRIORITY = 5
TIMEOUT = 25 # 5 seconds per message we are willing to parse
LOCAL = True
PRIVACY_FRIENDLY = True
SCORE = 1

def __init__(self, session, *args, **kwargs):
LookupHandler.__init__(self, session, *args, **kwargs)
@@ -103,7 +52,7 @@ def __init__(self, session, *args, **kwargs):
_PRUNE_GLOBAL_KEY_CACHE()

def _score(self, key):
return (1, _('Found key in local e-mail'))
return (self.SCORE, _('Found key in local e-mail'))

def _lookup(self, address, strict_email_match=False):
results = {}
@@ -136,36 +85,55 @@ def _get_message_keys(self, messageid):
keys = self.key_cache.get(messageid, [])
if not keys:
email = Email(self._idx(), messageid)

# First we check the Autocrypt headers
msg = email.get_msg(pgpmime='all')
for ach in ([extract_autocrypt_header(msg)] +
extract_autocrypt_gossip_headers(msg)):
if 'keydata' in ach:
for keydata in get_keydata(ach['keydata'],
autocrypt_header=ach,
include_subkeys=False):
keys.append((keydata, ach['keydata']))

# Then go looking at the attachments
attachments = email.get_message_tree(want=["attachments"]
)["attachments"]
for part in attachments:
if len(keys) > 100: # Just to set some limit...
break
if _might_be_pgp_key(part["filename"], part["mimetype"]):
key = part["part"].get_payload(None, True)
for keydata in _get_keydata(key):
for keydata in get_keydata(key, include_subkeys=False):
keys.append((keydata, key))
if len(keys) > 5: # Just to set some limit...
break
self.key_cache[messageid] = keys
return keys


def has_pgpkey_data_kw_extractor(index, msg, mimetype, filename, part, loader,
body_info=None, **kwargs):
def get_pgp_key_keywords(data):
kws = []
if _might_be_pgp_key(filename, mimetype):
data = _get_keydata(part.get_payload(None, True))
if data:
data = get_keydata(data, include_subkeys=True)
if data:
for keydata in data:
for uid in keydata.get('uids', []):
if uid.get('email'):
kws.append('%s:pgpkey' % uid['email'].lower())
if data:
body_info['pgp_key'] = filename
kws += ['pgpkey:has']
fingerprint = keydata["fingerprint"].lower()
kws.append('pgpkey:has')
kws.append('%s:pgpkey' % fingerprint)
kws.append('%s:pgpkey' % fingerprint[-16:])
return kws

# FIXME: If this is a PGP key, make all the key IDs searchable so
# we can find this file again later! Searching by e-mail is lame.
# This is issue #655 ?

def has_pgpkey_data_kw_extractor(index, msg, mimetype, filename, part, loader,
body_info=None, **kwargs):
kws = []
if _might_be_pgp_key(filename, mimetype):
new_kws = get_pgp_key_keywords(part.get_payload(None, True))
if new_kws:
body_info['pgp_key'] = filename
kws += new_kws
return kws


@@ -0,0 +1,94 @@
import hashlib
import urllib2

from mailpile.conn_brokers import Master as ConnBroker
from mailpile.crypto.keydata import get_keydata
from mailpile.i18n import gettext
from mailpile.plugins.keylookup import LookupHandler
from mailpile.plugins.keylookup import register_crypto_key_lookup_handler

ALPHABET = "ybndrfg8ejkmcpqxot1uwisza345h769"
SHIFT = 5
MASK = 31

#
# Encodes data using ZBase32 encoding
# See: https://tools.ietf.org/html/rfc6189#section-5.1.6
#
def _zbase_encode(data):
if len(data) == 0:
return ""
buffer = ord(data[0])
index = 1
bitsLeft = 8
result = ""
while bitsLeft > 0 or index < len(data):
if bitsLeft < SHIFT:
if index < len(data):
buffer = buffer << 8
buffer = buffer | (ord(data[index]) & 0xFF)
bitsLeft = bitsLeft + 8
index = index + 1
else:
pad = SHIFT - bitsLeft
buffer = buffer << pad
bitsLeft = bitsLeft + pad
bitsLeft = bitsLeft - SHIFT
result = result + ALPHABET[MASK & (buffer >> bitsLeft)]
return result

_ = lambda t: t

#
# Support for Web Key Directory (WKD) lookup for keys.
# See: https://wiki.gnupg.org/WKD and https://datatracker.ietf.org/doc/draft-koch-openpgp-webkey-service/
#
class WKDLookupHandler(LookupHandler):
NAME = _("Web Key Directory")
TIMEOUT = 10
PRIORITY = 50 # WKD is better than keyservers and better than DNS
PRIVACY_FRIENDLY = True # These lookups can go over Tor
SCORE = 5

def __init__(self, *args, **kwargs):
LookupHandler.__init__(self, *args, **kwargs)
self.key_cache = { }

def _score(self, key):
return (self.SCORE, _('Found key in Web Key Directory'))

def _lookup(self, address, strict_email_match=True):
local, _, domain = address.partition("@")
local_part_encoded = _zbase_encode(
hashlib.sha1(local.lower().encode('utf-8')).digest())

url = ("https://%s/.well-known/openpgpkey/hu/%s"
% (domain, local_part_encoded))

# This fails A LOT, so just swallow the most common errors.
try:
with ConnBroker.context(need=[ConnBroker.OUTGOING_HTTPS]):
r = urllib2.urlopen(url)
except urllib2.URLError:
# This gets thrown on TLS key mismatch
return {}
except urllib2.HTTPError as e:
if e.code == 404:
return {}
raise

result = r.read()
keydata = get_keydata(result)[0]
self.key_cache[keydata["fingerprint"]] = result
return {keydata["fingerprint"]: keydata}

def _getkey(self, keydata):
data = self.key_cache.pop(keydata["fingerprint"])
if data:
return self._gnupg().import_keys(data)
else:
raise ValueError("Key not found")


_ = gettext
register_crypto_key_lookup_handler(WKDLookupHandler)
@@ -51,6 +51,7 @@ class OAuth2(TestableWebbable):
'username': 'User name',
'code': 'Authorization code',
'error': 'Error code',
'scope': 'OAuth2 scope (ignored)',
'state': 'State token'
}
HARD_CODED_OAUTH2 = {
@@ -15,6 +15,7 @@ class Plugins(mailpile.commands.Command):
SYNOPSIS = (None, 'plugins', None, '[<plugins>]')
ORDER = ('Config', 9)
HTTP_CALLABLE = ('GET',)
CONFIG_REQUIRED = False

def command(self):
pm = self.session.config.plugins
@@ -55,7 +56,12 @@ def command(self):
for plugin in args:
try:
# FIXME: This fails to update the ConfigManger
# FIXME: This fails to start workers
discovered = plugins.DISCOVERED
if plugins.load(plugin, process_manifest=True, config=config):
if (plugin in discovered and not
discovered[plugin][1].get('require_login', True)):
config.sys.plugins_early.append(plugin)
config.sys.plugins.append(plugin)
else:
raise ValueError('Loading failed')
@@ -93,6 +99,8 @@ def command(self):
for plugin in args:
while plugin in config.sys.plugins:
config.sys.plugins.remove(plugin)
while plugin in config.sys.plugins_early:
config.sys.plugins_early.remove(plugin)

config.save()
return self._success(_('Disabled plugins: %s (restart required)'
@@ -14,7 +14,7 @@
from mailpile.mailutils.emails import Email, ExtractEmails, ExtractEmailAndName
from mailpile.plugins import PluginManager
from mailpile.search import MailIndex
from mailpile.security import evaluate_signature_key_trust
from mailpile.security import evaluate_sender_trust
from mailpile.urlmap import UrlMap
from mailpile.util import *
from mailpile.ui import SuppressHtmlOutput
@@ -149,6 +149,9 @@ def _metadata(self, msg_info):
if sender_vcard:
if sender_vcard.kind == 'profile':
expl['flags']['from_me'] = True
else:
expl['flags']['from_contact'] = True

tag_types = [self.idx.config.get_tag(t).type for t in expl['tag_tids']]
for t in self.TAG_TYPE_FLAG_MAP:
if t in tag_types:
@@ -300,7 +303,8 @@ def render(prefix, mid, first=False):
return thread

WANT_MSG_TREE = ('attachments', 'html_parts', 'text_parts', 'header_list',
'editing_strings', 'crypto')
'headerprints', 'editing_strings', 'crypto', '_cleaned',
'trust')
PRUNE_MSG_TREE = ('headers', ) # Added by editing_strings

def _prune_msg_tree(self, tree):
@@ -368,8 +372,8 @@ def _message(self, email):
problem = _('Failed process message crypto (decrypt, etc).')
email.evaluate_pgp(tree, decrypt=True)

problem = _("Failed to evalute key trust")
evaluate_signature_key_trust(self.session.config, email, tree)
problem = _("Failed to evalute sender trust")
evaluate_sender_trust(self.session.config, email, tree)

editing_strings = tree.get('editing_strings')
if editing_strings:
@@ -441,6 +445,7 @@ def __init__(self, session, idx,
'end': start + min(num, len(results)-start),
'total': len(results),
},
'search_order': session.order,
'search_terms': session.searched,
'index_capabilities': dict((c, True) for c in idx.CAPABILITIES),
'tag_capabilities': {},
@@ -4,6 +4,7 @@
import random
import socket
import sys
import time
from urllib import urlencode
from urllib2 import urlopen
from lxml import objectify
@@ -119,12 +120,6 @@ class SetupMagic(Command):
'auto_action': '-spam +trash',
'auto_tag': 'fancy'
},
'MaybeSpam': {
'display': 'invisible',
'icon': 'icon-spam',
'label_color': '10-orange',
'name': _('MaybeSpam'),
},
'Ham': {
'type': 'ham',
'display': 'invisible',
@@ -309,7 +304,6 @@ def basic_app_config(self, session,
len(session.config.prefs.autotag) == 0):
session.config.prefs.autotag.append({
'match_tag': 'spam',
'unsure_tag': 'maybespam',
'tagger': 'spambayes',
'trainer': 'spambayes'
})
@@ -322,12 +316,6 @@ def basic_app_config(self, session,
session.config.save()
session.config.prepare_workers(session, daemons=want_daemons)

# Scan GnuPG keychain in background
from mailpile.plugins.vcard_gnupg import PGPKeysImportAsVCards
session.config.slow_worker.add_unique_task(
session, 'initialpgpkeyimport',
lambda: PGPKeysImportAsVCards(session).run())

# Enable Tor in the background, if we have it...
session.config.slow_worker.add_unique_task(
session, 'tor-autoconfig', lambda: SetupTor.autoconfig(session))
@@ -338,7 +326,7 @@ def basic_app_config(self, session,
def make_master_key(self):
session = self.session
if (session.config.prefs.gpg_recipient not in (None, '', '!CREATE')
and not session.config.master_key
and not session.config.get_master_key()
and not session.config.prefs.obfuscate_index):
#
# This secret is arguably the most critical bit of data in the
@@ -357,10 +345,10 @@ def make_master_key(self):
# import math
# math.log((25 + 25 + 8) ** (12 * 4), 2) == 281.183...
#
session.config.master_key = okay_random(12 * 4,
'%s' % session.config,
'%s' % self.session,
'%s' % self.data)
session.config.set_master_key(okay_random(12 * 4,
'%s' % session.config,
'%s' % self.session,
'%s' % self.data))
if self._idx() and self._idx().INDEX:
session.ui.warning(_('Unable to obfuscate search index '
'without losing data. Not indexing '
@@ -1212,28 +1200,56 @@ def setup_command(self, session):

class SetupTor(TestableWebbable):
"""Check for Tor and auto-configure if possible."""
SYNOPSIS = (None, 'setup/tor', 'setup/tor', "[--auto]")
SYNOPSIS = (None, 'setup/tor', 'setup/tor', "[--auto] [--shared]")
HTTP_CALLABLE = ('POST',)
HTTP_POST_VARS = {
'prefer_shared': 'If set, prefer a shared Tor instance'}

@classmethod
def autoconfig(cls, session):
cls(session, arg=['--auto']).run()

def auto_configure_tor(self, session, hostport=None):
need_raw = [ConnBroker.OUTGOING_RAW]
hostport = hostport or ('127.0.0.1', 9050)
def auto_configure_tor(self, session):
if session.config.tor_worker is not None:
if session.config.tor_worker.isReady(wait=True):
time.sleep(0.1)
hostport = ('127.0.0.1', session.config.tor_worker.socks_port)
success, message = self._configure_tor(session, hostport,
port_zero=True)
if success:
return message

if session.config.sys.tor.systemwide:
hostport = ('127.0.0.1', 9050)
success, message = self._configure_tor(session, hostport)
if success:
return message

if session.config.tor_worker is None:
if session.config.start_tor_worker().isReady(wait=True):
time.sleep(0.1)
hostport = ('127.0.0.1', session.config.tor_worker.socks_port)
success, message = self._configure_tor(session, hostport,
port_zero=True)
if success:
session.config.sys.tor.systemwide = False

return message

def _configure_tor(self, session, hostport, port_zero=False):
try:
with ConnBroker.context(need=need_raw) as context:
with ConnBroker.context(need=[ConnBroker.OUTGOING_RAW]) as ctx:
tor = socket.create_connection(hostport, timeout=10)
except IOError:
return _('Failed to connect to Tor on %s:%s. Is it installed?'
) % hostport
return (False,
_('Failed to connect to Tor on %s:%s. Is it installed?')
% hostport)

# If that succeeded, we might have Tor!
old_proto = session.config.sys.proxy.protocol
session.config.sys.proxy.protocol = 'tor'
session.config.sys.proxy.host = hostport[0]
session.config.sys.proxy.port = hostport[1]
session.config.sys.proxy.port = 0 if port_zero else hostport[1]
session.config.sys.proxy.fallback = True

# Configure connection broker, revert settings while we test
@@ -1248,12 +1264,12 @@ def auto_configure_tor(self, session, hostport=None):
data=None, timeout=10).read()
safe_assert(motd.strip().endswith('}'))
session.config.sys.proxy.protocol = 'tor'
message = _('Successfully configured and enabled Tor!')
return (True, _('Successfully configured and enabled Tor!'))
except (IOError, AssertionError):
ConnBroker.configure()
message = _('Failed to configure Tor on %s:%s. Is the network down?'
) % hostport
return message
return (False,
_('Failed to configure Tor on %s:%s. Is the network down?')
% hostport)

def setup_command(self, session):
if ("--auto" not in self.args
@@ -1290,7 +1306,7 @@ def _CHECKPOINTS(self, config):
('language', lambda: config.prefs.language, SetupWelcome),

# Stage 1: Basic security - a password
('security', lambda: config.master_key, SetupPassword)
('security', lambda: config.get_master_key(), SetupPassword)
]

@classmethod
@@ -135,7 +135,7 @@ def save(self, split=True):
return

t = [time.time()]
encryption_key = self.config.master_key
encryption_key = self.config.get_master_key()
outfile = self._SaveFile(self.config, self.sig)
with self.lock:
# Optimizing for fast loads, so deletion only happens on save.
@@ -158,7 +158,7 @@ def save(self, split=True):
dir=self.config.tempfile_dir(),
header_data={'subject': subj},
name='PLC/%s' % self.sig) as fd:
fd.write(output)
fd.write(output.encode('utf-8'))
fd.save(outfile)
else:
with open(outfile, 'wb') as fd:
@@ -318,7 +318,7 @@ def _WordSig(cls, word, config):
return strhash(word, cls.HASH_LEN,
obfuscate=((config.prefs.obfuscate_index or
config.prefs.encrypt_index) and
config.master_key))
config.get_master_key()))


##############################################################################
@@ -442,7 +442,7 @@ def WordSig(cls, word, config):
return strhash(word, cls.HASH_LEN,
obfuscate=((config.prefs.obfuscate_index or
config.prefs.encrypt_index) and
config.master_key))
config.get_master_key()))

@classmethod
def SaveFile(cls, session, prefix):
@@ -544,12 +544,12 @@ def save(self, prefix=None, compact=True, mode='wb'):
outfile))
if output:
if self.config.prefs.encrypt_index:
encryption_key = self.config.master_key
encryption_key = self.config.get_master_key()
with EncryptingStreamer(encryption_key,
delimited=True,
dir=self.config.tempfile_dir(),
name='PostingList') as efd:
efd.write(output)
efd.write(output.encode('utf-8'))
efd.save(outfile, mode=mode)
else:
with open(outfile, mode) as fd:
@@ -37,6 +37,20 @@
SERIALIZE_POPEN_ALWAYS = False
SERIALIZE_POPEN_LOCK = threading.Lock()

THREAD_LOCAL = threading.local()


def PresetSafePopenArgs(**kwargs):
"""
Make it possible to preset Popen arguments, for injecting tweaks into
third-party code. We do this using thread-local data, so as to avoid
the need for yet another lock.
"""
if hasattr(THREAD_LOCAL, 'preset_args'):
THREAD_LOCAL.preset_args.append(kwargs)
else:
THREAD_LOCAL.preset_args = [kwargs]


class Safe_Pipe(object):
"""
@@ -63,6 +77,12 @@ def close(self):


class Safe_Popen(Unsafe_Popen):
def _preset_args(self):
if hasattr(THREAD_LOCAL, 'preset_args') and THREAD_LOCAL.preset_args:
return THREAD_LOCAL.preset_args.pop(-1)
else:
return {}

def __init__(self, args, bufsize=0,
executable=None,
stdin=None,
@@ -79,6 +99,39 @@ def __init__(self, args, bufsize=0,
keep_open=None,
long_running=False):

self._internal_fds = []

# Windows-work around: Console Handles can't be inherited, so if no
# source is passed, simulate stdin as a closed pipe. Not ideal, but
# stops pythonw crashing.
#
# See: https://bugs.python.org/issue3905
#
if stdin is None:
stdin = open(os.devnull, 'r')
self._internal_fds.append(stdin)

if stdout is None:
stdout = open(os.devnull, 'w')
self._internal_fds.append(stdout)

if stderr is None:
stderr = open(os.devnull, 'w')
self._internal_fds.append(stderr)

# This lets us inject Popen args into libraries
preset = self._preset_args()
if preset: print 'PRESET[%s]: %s' % (args, preset)
cwd = preset.get('cwd', cwd)
env = preset.get('env', env)
stdin = preset.get('stdin', stdin)
stdour = preset.get('stdout', stdout)
stderr = preset.get('stderr', stderr)
bufsize = preset.get('bufsize', bufsize)
close_fds = preset.get('close_fds', close_fds)
executable = preset.get('executable', executable)
long_running = preset.get('long_running', long_running)

# Set our default locking strategy
self._SAFE_POPEN_hold_lock = SERIALIZE_POPEN_ALWAYS

@@ -99,6 +152,8 @@ def __init__(self, args, bufsize=0,
# 2. Prevent signals from propagating
#
if mailpile.platforms.WindowsPopenSemantics():
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
creationflags = subprocess.CREATE_NEW_PROCESS_GROUP # 2.
if (stdin is not None or
stdout is not None or
@@ -180,6 +235,8 @@ def wait(self, *args, **kwargs):
return rv

def __del__(self):
for handle in self._internal_fds:
handle.close()
if Unsafe_Popen is not None:
Unsafe_Popen.__del__(self)
self._SAFE_POPEN_unlock()
@@ -192,6 +249,7 @@ def MakePopenUnsafe():


def MakePopenSafe():
THREAD_LOCAL.preset_args = []
subprocess.Popen = Safe_Popen
return Safe_Popen

Large diffs are not rendered by default.

@@ -17,14 +17,34 @@
from mailpile.i18n import gettext as _
from mailpile.i18n import ngettext as _n
from mailpile.util import *
import mailpile.platforms

DISABLE_LOCKDOWN = False

##[ These are the sys.lockdown restrictions ]#################################

##[ This is a secret only user owning Mailpile could know ]###################

def GetUserSecret(workdir):
"""Return a secret that only this Unix user could know."""
secret_file = os.path.join(workdir, 'mailpile.sec')
try:
return open(secret_file).read().strip()
except (OSError, IOError):
pass

# FIXME: Does this work reasonably on Windows? Does chmod do anything?
random_secret = okay_random(64, __file__)
with open(secret_file, 'w') as fd:
fd.write(random_secret)
mailpile.platforms.RestrictReadAccess(secret_file)
return random_secret


##[ These are the sys.lockdown restrictions ]#################################

def _lockdown(config):
if DISABLE_LOCKDOWN: return False
if config.detected_memory_corruption: return 99 # FIXME: Breaks demos?
lockdown = config.sys.lockdown or 0
try:
return int(lockdown)
@@ -66,6 +86,14 @@ def _lockdown_config(config):
return False


def _lockdown_quit(config):
if DISABLE_LOCKDOWN: return False
# The user is always allowed to quit, except in demo mode.
if _lockdown(config) < 0:
return _('In lockdown, doing nothing.')
return False


def _lockdown_basic(config):
if DISABLE_LOCKDOWN: return False
return _lockdown_config(config) or in_disk_lockdown(config)
@@ -90,7 +118,7 @@ def _lockdown_strict(config):
CC_CPU_INTENSIVE = [_lockdown_basic]
CC_LIST_PRIVATE_DATA = [_lockdown_minimal]
CC_TAG_EMAIL = [_lockdown_strict]
CC_QUIT = [_lockdown_minimal]
CC_QUIT = [_lockdown_quit]
CC_WEB_TERMINAL = [_lockdown_config]

CC_CONFIG_MAP = {
@@ -548,10 +576,10 @@ def tls_wrap_socket(org_wrap, sock, *args, **kwargs):

##[ Key Trust ]#############################################################

def evaluate_signature_key_trust(config, email, tree):
def evaluate_sender_trust(config, email, tree):
"""
This uses historic data from the search engine to refine and expand
upon the states we get back from GnuPG.
upon the states we get back from GnuPG and attempt to detect forgeries.
The new potential signature states are:
@@ -576,28 +604,55 @@ def evaluate_signature_key_trust(config, email, tree):
if not sender:
return

tree['trust'] = {}
trust = tree["trust"]

# If this mail didn't come from outside, skip all this.
# We don't vet ourselves for forgeries.
# FIXME: THIS IS INSECURE. We need to fix this mechanism globally.
message = email.get_msg()
if 'x-mp-internal-sender' in message:
trust["status"] = _("We trust ourselves")
return tree

# Calculate the default window we search for information. Don't include
# the same day as the message was received, to not be fooled by other
# junk that arrived the same day.
days = config.prefs.key_trust.window_days
msgts = long(email.get_msg_info(config.index.MSG_DATE), 36)
scope = ['dates:%d..%d' % (msgts - (days * 24 * 3600), msgts),
'from:%s' % sender]
end = msgts - (24 * 3600)
begin = end - (days * 24 * 3600)
scope = ['dates:%d..%d' % (begin, end), 'from:%s' % sender]

messages_per_key = {}
trust['counts'] = messages_per_key
def count(name, terms):
if name not in messages_per_key:
# Note: using .as_set() will exclude spam and trash, which
# is almsot certainly a good thing.
msgs = config.index.search(config.background, scope + terms)
messages_per_key[name] = len(msgs)
messages_per_key[name] = len(msgs.as_set())
return messages_per_key[name]

signed = lambda: count('signed', ['has:signature'])
if signed() < config.prefs.key_trust.threshold:
return

total = lambda: count('total', [])
if total() < min(5, config.prefs.key_trust.threshold):
# If we have too few messages within our desired window, try
# expanding the window...
scope[1] = 'dates:1970..%d' % end
del messages_per_key['total']

# Still too few? Abort.
if total() < min(5, config.prefs.key_trust.threshold):
trust["trust_unknown"] = True
trust["warning"] = _("This sender's reputation is unknown")
return tree

signed = lambda: count('signed', ['has:signature'])
swr = config.prefs.key_trust.sig_warn_pct / 100.0
ktr = config.prefs.key_trust.key_trust_pct / 100.0
knr = config.prefs.key_trust.key_new_pct / 100.0

def update_siginfo(si):
def update_siginfo(si, trust):
stat = si["status"]
keyid = si.get('keyinfo', '')[-16:].lower()

@@ -606,6 +661,15 @@ def update_siginfo(si):
# and warn the user if they're not present.
if (stat == 'none') and (signed() > swr * total()):
si["status"] = 'unsigned'
trust["missing_signature"] = True

# Compare email timestamp with the signature timestamp.
# If they differ by a great deal, treat the signature as
# invalid. This makes it much harder to copy old signed
# content (undetected) into new messages.
elif abs(msgts - si.get("timestamp", msgts)) > 7 * 24 * 3600:
si["status"] = 'invalid'
trust["invalid_signature"] = True

# Signed by unverified key: Signal that we trust this key if
# this is the key we've seen most of the time for this user.
@@ -614,6 +678,7 @@ def update_siginfo(si):
('unverified' in stat) and
(count(keyid, ['sig:%s' % keyid]) > ktr * signed())):
si["status"] = stat.replace('unverified', 'signed')
trust["signed"] = True

# Signed by a key we have seen very rarely for this user. Gently
# warn the user that something unsual is going on.
@@ -622,19 +687,43 @@ def update_siginfo(si):
(count(keyid, ['sig:%s' % keyid]) < knr * signed())):
changed = "mixed-changed" if ("mixed" in stat) else "changed"
si["status"] = changed
trust["key_changed"] = True

# FIXME: Compare email timestamp with the signature timestamp.
# If they differ by a great deal, treat the signature as
# invalid? This would make it much harder to copy old signed
# content (undetected) into new messages.

if 'crypto' in tree:
update_siginfo(tree['crypto']['signature'])

for skey in ('text_parts', 'html_parts', 'attachments'):
for i, part in enumerate(tree[skey]):
if 'crypto' in part:
update_siginfo(part['crypto']['signature'])
else:
trust["%s_signature" % si["status"].replace("mixed-", "")] = True

if signed() >= config.prefs.key_trust.threshold:
if 'crypto' in tree:
update_siginfo(tree['crypto']['signature'], tree["trust"])

for skey in ('text_parts', 'html_parts', 'attachments'):
for i, part in enumerate(tree[skey]):
if 'crypto' in part:
update_siginfo(part['crypto']['signature'], {})

if 'received' in message:
headerprints = email.get_headerprints()
term = 'hps:%s' % headerprints['sender']
hps = count(term, [term])
if hps < 2:
trust["mua_or_mta_changed"] = True

# Translate accumulated state into a "problem" if applicable
problem = "problem" if (total() > 20) else "warning"
if trust.get("invalid_signature") or trust.get("revoked_signature"):
trust[problem] = _("The digital signature is invalid")
elif trust.get("missing_signature"):
trust[problem] = _("This person usually signs their mail")
elif trust.get("key_changed"):
trust[problem] = _("This was signed by an unexpected key")
elif trust.get("expired_signature"):
trust[problem] = _("This was signed by an expired key")
elif trust.get("verified_signature") or trust.get("signed"):
trust["status"] = _("Good signature, we are happy")
elif trust.get("mua_or_mta_changed"):
trust["warning"] = _("This came from an unexpected source")
else:
trust["status"] = _("No problems detected.")

return tree

@@ -26,6 +26,7 @@
from jinja2 import TemplatesNotFound, TemplateAssertionError, UndefinedError

import mailpile.commands
import mailpile.platforms
import mailpile.util
from mailpile.i18n import gettext as _
from mailpile.i18n import ngettext as _n
@@ -486,6 +487,8 @@ def _render_error(self, cfg, error_info):
emsg += "<h3>DATA:</h3><pre>%(data)s</pre>"
if 'config' in error_info.get('data'):
del error_info['data']['config']
if 'platforms' in error_info.get('data'):
del error_info['data']['platforms']
ei = {}
for kw in ('error', 'details', 'traceback', 'source', 'data'):
value = error_info.get(kw, '')
@@ -499,6 +502,7 @@ def render_web(self, cfg, tpl_names, data):
"""Render data as HTML"""
alldata = default_dict(self.html_variables)
alldata['config'] = cfg
alldata['platforms'] = mailpile.platforms
alldata.update(data)
try:
template = self._web_template(cfg, tpl_names)
@@ -55,6 +55,8 @@
MAIN_PID = os.getpid()
DEFAULT_PORT = 33411

# Warning: this is duplicated in the javascript, grep for WORD_REGEXP
# to keep any changes in sync.
WORD_REGEXP = re.compile('[^\s!@#$%^&*\(\)_+=\{\}\[\]'
':\"|;`\'\\\<\>\?,\.\/\-]{2,}')

@@ -71,10 +73,12 @@
'that', 'the', 'this', 'td', 'to', 'tr',
'was', 'we', 'were', 'you'])

BORING_HEADERS = ('received', 'received-spf', 'date',
BORING_HEADERS = ('received', 'received-spf', 'date', 'autocrypt',
'content-type', 'content-disposition', 'mime-version',
'list-archive', 'list-help', 'list-unsubscribe',
'dkim-signature', 'domainkey-signature')
'dkim-signature', 'domainkey-signature',
'arc-message-signature', 'arc-seal',
'arc-authentication-results', 'authentication-results')

# For the spam classifier, if these headers are missing a special
# note is made of that in the message keywords.
@@ -727,7 +731,7 @@ def decrypt_and_parse_lines(fd, parser, config,
passphrase=None, gpgi=None,
_raise=IOError, error_cb=None):
import mailpile.crypto.streamer as cstrm
symmetric_key = config and config.master_key or 'missing'
symmetric_key = config and config.get_master_key() or 'missing'
passphrase_reader = (passphrase.get_reader()
if (passphrase is not None) else
(config.passphrases['DEFAULT'].get_reader()
@@ -1044,6 +1048,7 @@ class CleanText:
"""
FS = ':/.\'\"\\'
CRLF = '\r\n'
HTML = '<>&"\''
WHITESPACE = '\r\n\t '
NONALNUM = ''.join([chr(c) for c in (set(range(32, 127)) -
set(range(ord('0'), ord('9') + 1)) -
@@ -811,9 +811,9 @@ def __init__(self, *lines, **kwargs):

def configure_encryption(self, config):
if config:
dec = lambda: config.master_key
dec = lambda: config.get_master_key()
enc = lambda: (config.prefs.encrypt_vcards and
config.master_key)
config.get_master_key())
self.config = config
else:
enc = dec = lambda: None
@@ -1266,7 +1266,7 @@ def load_vcards(self, session=None):

try:
prfs = self.config.prefs
key_func = lambda: self.config.master_key
key_func = lambda: self.config.get_master_key()
paths = [(fn, os.path.join(self.vcard_dir, fn))
for fn in os.listdir(self.vcard_dir)
if fn.endswith('.vcf')]
@@ -100,6 +100,7 @@ def __init__(self, environment):
e.filters['thread_upside_down'] = s._thread_upside_down
e.globals['fix_urls'] = s._fix_urls
e.filters['fix_urls'] = s._fix_urls
e.globals['stoplist'] = STOPLIST

# See utils.py for these functions:
e.globals['elapsed_datetime'] = elapsed_datetime
@@ -741,7 +742,7 @@ def _url_path_fix(self, *urlparts):
url = ''.join([unicode(p) for p in urlparts])
if url[:1] in ('/', ):
http_path = self.env.session.config.sys.http_path or ''
if not url.startswith(http_path):
if not url.startswith(http_path+'/'):
url = http_path + url
return self._safe(url)

@@ -80,8 +80,7 @@ Depends: ${misc:Depends},
python,
screen,
net-tools,
sudo,
expect
sudo
Description: multi-user mailpile web server
Mailpile is a modern, fast web-mail client with user-friendly encryption and
privacy features. Mailpile places great emphasis on providing a clean, elegant
@@ -90,6 +89,8 @@ Description: multi-user mailpile web server
.
This package configures Apache with "Multipile", a thin wrapper that allows
system users to launch their Mailpile by logging in on the web interface.
.
Details: https://github.com/mailpile/Mailpile/tree/master/shared-data/multipile

Package: mailpile-desktop
Architecture: all
@@ -1,3 +1,4 @@
mailpile-apache etc/sudoers.d
mailpile-apache.conf etc/mailpile
shared-data/multipile/multipile.rc.sample etc/mailpile
shared-data/multipile usr/share/mailpile
@@ -7,7 +7,7 @@ apache_install() {
mkdir -p /etc/apache2/conf-available

mkdir -p /var/lib/mailpile/apache
/usr/share/mailpile/multipile/mailpile-admin.py --generate-apache-rewritemap > /var/lib/mailpile/apache/usermap.txt
/usr/share/mailpile/multipile/mailpile-admin.py --configure-apache-usermap
chown -R root:www-data /var/lib/mailpile/apache
chmod -R 770 /var/lib/mailpile/apache

@@ -27,6 +27,9 @@ override_dh_auto_build:
--mailpile-theme /usr/share/mailpile/default-theme \
--multipile-www /usr/share/mailpile/multipile/www \
> mailpile-apache.conf
python shared-data/multipile/mailpile-admin.py --generate-apache-sudoers \
--mailpile-share /usr/share/mailpile \
> mailpile-apache

dh_auto_build

@@ -17,7 +17,6 @@ The following lists the files contained within this directory. The packaging scr
| **build.sh** | A script which builds Mailpile.app. |
| configurator.sh | A script which is used by the built Mailpile.app, at runtime. It configures Mailpile.app's GUI. (Used by build.sh.)|
| mailpile | A script which is used by the built Mailpile.app, at runtime. It sets environment variables and launches Mailpile. |
| **package.sh** | A script which packages Mailpile.app (Mailpile.app is built by build.sh) into a signed .dmg file.|
| README.md | This file. |

## Usage
@@ -30,9 +29,16 @@ The following software must be installed prior to running the packaging scripts.
- Xcode 9.3 (or later) - Available in the App Store.
- Command Line Tools for Xcode - Install them by executing `xcode-select --install` in Terminal.app.
- JDK 10 (or later) - Available on [Oracle's website](http://www.oracle.com/technetwork/java/javase/downloads/index.html).

And either:

- Node.js - Available on [nodejs.org](https://nodejs.org/en/). (Provides the following dependency, namely appdmg.)
- appdmg - Install it by executing `npm install -g appdmg` in Terminal.app. (Make sure to add it's install target to *PATH*.)

Or:

- dmgbuild - Available from PyPI (pip install dmgbuild)

### Requirements
An internet connection is required as the packaging scripts use [Homebrew](https://brew.sh) and git to fetch dependencies.

@@ -49,9 +55,8 @@ Before executing the package scripts, ensure that the following statements are t
### Packaging Mailpile
Packaging Mailpile is a three step process.

1. Execute `./build.sh` in the directory which contains build.sh. This outputs ~/build/Mailpile.app
2. Execute `export DMG_SIGNING_IDENTITY=4P78A94863` after replacing 4P78A94863 with your Developer Certificate's ID.
3. Execute `./package.sh` in the directory which contains package.sh. This outputs ~/build/Mailpile.dmg.
1. Execute `export DMG_SIGNING_IDENTITY=4P78A94863` after replacing 4P78A94863 with your Developer Certificate's ID.
2. Execute `./build.sh` in the directory which contains build.sh. This outputs ~/build/Mailpile.app and ~/build/Mailpile.dmg.

You might want to run ~/build/Mailpile.app to test the build before shipping ~/build/Mailpile.dmg.

@@ -1,7 +1,11 @@
{
"title": "Mailpile",
"background": "BACKGROUND",
"window": { "width": 400, "height": 313 },
"icon-size": "112pt",
"window": {
"position": {"x": 250, "y": 250},
"size": {"width": 440, "height": 320}
},
"contents": [
{ "x": 300, "y": 160, "type": "link", "path": "/Applications" },
{ "x": 80, "y": 160, "type": "file", "path": "APP", "name": "Mailpile.app" }
@@ -0,0 +1,23 @@
class Symlinks < Formula
desc "scan/change symbolic links"
homepage "http://www.ibiblio.org/pub/Linux/utils/file/symlinks.lsm"
url "http://www.ibiblio.org/pub/Linux/utils/file/symlinks-1.4.tar.gz"
sha256 "b0bb689dd0a2c46d9a7dd111b053707aba7b9cf29c4f0bad32984b14bdbe0399"

def install
inreplace "Makefile", "/usr/local/bin", "#{bin}/"
inreplace "Makefile", "/usr/local/man/man8", "#{man8}/"
inreplace "Makefile", "-o root -g root", ""

mkdir_p "#{bin}"
mkdir_p "#{man8}"

ENV["CFLAGS"]="-I/usr/include/malloc"
system "make", "CFLAGS=#{ENV.cflags}"
system "make", "install"
end

test do
system "#{bin}/symlinks", "."
end
end
@@ -0,0 +1,18 @@
#!/bin/bash
set -e
set -x

# Some of the homebrew build stuff depends on a java compiler.
if ! javac -version&>/dev/null ; then
echo "This script depends on javac"
echo "Please install version 10, or later, of the Java Developer Kit."
exit 1
fi

# The GUI-o-MacTic build fails without this
if [ "$DMG_SIGNING_IDENTITY" = "" ]; then
echo "Please set the DMG_SIGNING_IDENTITY environment variable."
exit 1
fi


@@ -0,0 +1,20 @@
#!/bin/bash
set -e
set -x

[ "$SKIP_GUI_O_MAC_TIC" = "" ] || exit 0

mkdir -p $ICONSET_DIR
if [ ! -e "$ICONSET_DIR/Icon-1024.png" ]; then
export Icon1024="../icons/1024x1024.png"
sips -z 16 16 $Icon1024 --out $ICONSET_DIR/Icon-16.png
sips -z 32 32 $Icon1024 --out $ICONSET_DIR/Icon-32.png
sips -z 32 32 $Icon1024 --out $ICONSET_DIR/Icon-33.png
sips -z 64 64 $Icon1024 --out $ICONSET_DIR/Icon-64.png
sips -z 128 128 $Icon1024 --out $ICONSET_DIR/Icon-128.png
sips -z 256 256 $Icon1024 --out $ICONSET_DIR/Icon-256.png
sips -z 256 256 $Icon1024 --out $ICONSET_DIR/Icon-257.png
sips -z 512 512 $Icon1024 --out $ICONSET_DIR/Icon-512.png
sips -z 512 512 $Icon1024 --out $ICONSET_DIR/Icon-513.png
cp $Icon1024 $ICONSET_DIR/Icon-1024.png
fi