Skip to content

Commit

Permalink
Autocrlf in settings. Closes biicode/biicode#16
Browse files Browse the repository at this point in the history
  • Loading branch information
lasote committed Apr 22, 2015
1 parent 3c56b92 commit 6cbaa33
Show file tree
Hide file tree
Showing 5 changed files with 174 additions and 98 deletions.
173 changes: 94 additions & 79 deletions edition/checkin.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,91 +8,105 @@
from biicode.common.edition import changevalidator
from biicode.common.edition.processors.processor_changes import ProcessorChanges
from biicode.common.model.brl.block_name import BlockName
from biicode.common.settings.settings import Settings


class CheckinManager(object):

def __init__(self, hive_holder, settings, biiout):
'''
Params:
hive_holder: HiveHolder
settings: Settings
biiout: biiout
'''
self.hive_holder = hive_holder
self.settings = settings or Settings()
self.biiout = biiout

def checkin_files(self, files):
'''
Params:
files: dict{BlockCellName: Item (str or bytes loaded from file)}
Returns: ProcessorChanges
'''
logger.debug("----------- checkin ---------------")
hive = self.hive_holder.hive
hive.settings = self.settings

processor_changes = ProcessorChanges()
if files is None:
return processor_changes

block_files = {}
for block_cell_name, filecontent in files.iteritems():
block_files.setdefault(block_cell_name.block_name,
{})[block_cell_name.cell_name] = filecontent

for block_name, files in block_files.iteritems():
self.checkin_block_files(block_name, files, processor_changes)

for block_holder in self.hive_holder.block_holders:
if block_holder.block_name not in block_files:
processor_changes.deleted.update(block_holder.block_cell_names)
self.hive_holder.add_holder(BlockHolder(block_holder.block_name, []))

self.hive_holder.delete_empty_blocks()
hive.update(processor_changes)

# Raises if max is overtaken
changevalidator.check_hive_num_cells(hive)
return processor_changes


def checkin_block_files(hive_holder, block_name, files, processor_changes, biiout):
'''
Params:
hive_holder: HiveHolder
block_name: BlockName
files: {cell_name: content}
processor_changes: ProcessorChanges
biiout: biiout
'''
block_name = BlockName(block_name)
types_blobs = obtain_types_blobs(files) # {cell_name: (TYPE, Content/CellType/None)}
# FIXME: What happens if merge result is larger than individual files, reject???
changevalidator.remove_large_cells(types_blobs, biiout)
try:
block_holder = hive_holder[block_name]
except KeyError:
block_holder = BlockHolder(block_name, [])
hive_holder.add_holder(block_holder)

for cell_name, (biitype, blob) in types_blobs.iteritems():
block_cell_name = block_name + cell_name
cell = SimpleCell(block_cell_name, biitype)
def checkin_block_files(self, block_name, files, processor_changes):
'''
Params:
block_name: BlockName
files: {cell_name: content}
processor_changes: ProcessorChanges
'''
block_name = BlockName(block_name)
normalize = self.settings.user.get("autocrlf", True)
if not normalize:
self.biiout.info("Skipping file normalizing")

types_blobs = obtain_types_blobs(files, normalize) # {cell_name: (TYPE, Content/CellType/None)}
# FIXME: What happens if merge result is larger than individual files, reject???
changevalidator.remove_large_cells(types_blobs, self.biiout)
try:
resource = block_holder[cell_name]
block_holder = self.hive_holder[block_name]
except KeyError:
content = Content(block_cell_name, load=blob)
processor_changes.upsert(block_cell_name, content)
else:
content = resource.content
if content is None or blob != content.load:
block_holder = BlockHolder(block_name, [])
self.hive_holder.add_holder(block_holder)

for cell_name, (biitype, blob) in types_blobs.iteritems():
block_cell_name = block_name + cell_name
cell = SimpleCell(block_cell_name, biitype)
try:
resource = block_holder[cell_name]
except KeyError:
content = Content(block_cell_name, load=blob)
processor_changes.upsert(block_cell_name, content)
else:
content = resource.content
if content is None or blob != content.load:
content = Content(block_cell_name, load=blob)
processor_changes.upsert(block_cell_name, content)

resource = Resource(cell, content)
block_holder.add_resource(resource)

for cell_name, resource in block_holder.resources.items():
if cell_name not in types_blobs:
if resource.content is not None:
processor_changes.delete(resource.name)
block_holder.delete_resource(cell_name)
hive_holder.hive.update(processor_changes)


def checkin_files(hive_holder, settings, files, biiout):
'''
Params:
hive_holder: HiveHolder
files: dict{BlockCellName: Item (str or bytes loaded from file)}
biiout: biiout
Returns: ProcessorChanges
'''
logger.debug("----------- checkin ---------------")
hive = hive_holder.hive
hive.settings = settings

processor_changes = ProcessorChanges()
if files is None:
return processor_changes

block_files = {}
for block_cell_name, filecontent in files.iteritems():
block_files.setdefault(block_cell_name.block_name,
{})[block_cell_name.cell_name] = filecontent

for block_name, files in block_files.iteritems():
checkin_block_files(hive_holder, block_name, files, processor_changes, biiout)

for block_holder in hive_holder.block_holders:
if block_holder.block_name not in block_files:
processor_changes.deleted.update(block_holder.block_cell_names)
hive_holder.add_holder(BlockHolder(block_holder.block_name, []))

hive_holder.delete_empty_blocks()
hive.update(processor_changes)
resource = Resource(cell, content)
block_holder.add_resource(resource)

# Raises if max is overtaken
changevalidator.check_hive_num_cells(hive)
return processor_changes
for cell_name, resource in block_holder.resources.items():
if cell_name not in types_blobs:
if resource.content is not None:
processor_changes.delete(resource.name)
block_holder.delete_resource(cell_name)
self.hive_holder.hive.update(processor_changes)


def obtain_types_blobs(files):
def obtain_types_blobs(files, normalize):
"""files: dict{BlockCellName: Item (str or bytes loaded from file)}
normalize: Boolean, normalize Blobs (crlf) or not
return: {BlockCellName: (BiiType, Blob)}
"""
tree = {}
Expand All @@ -105,15 +119,16 @@ def obtain_types_blobs(files):
base_tree[tokens[-1]] = block_cell_name, filecontent

result = {}
apply_types(tree, TypeFilter(), result)
apply_types(tree, TypeFilter(), result, normalize)
return result


def apply_types(base_tree, current_types, result):
def apply_types(base_tree, current_types, result, normalize):
'''
Params:
base_tree: {block_name: (CellName, content_str)}
current_types: TypeFilter
normalize: Boolean, normalize Blobs (crlf) or not
result: dict
'''
types_resource = base_tree.get('types.bii')
Expand All @@ -123,11 +138,11 @@ def apply_types(base_tree, current_types, result):
for value in base_tree.itervalues():
if isinstance(value, dict):
base_tree = value
apply_types(base_tree, current_types, result)
apply_types(base_tree, current_types, result, normalize)
else:
block_cell_name, filecontent = value
bii_type = _get_type_cell(filecontent, current_types, block_cell_name)
blob = Blob(filecontent, is_binary=bii_type.is_binary())
blob = Blob(filecontent, is_binary=bii_type.is_binary(), normalize=normalize)
result[block_cell_name] = bii_type, blob


Expand Down
19 changes: 11 additions & 8 deletions edition/hive_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from biicode.common.edition.hiveprocessor import (blocks_process, deps_process,
compute_src_graph, compute_common_table)
from biicode.common.edition.processors.processor_changes import ProcessorChanges
from biicode.common.edition.checkin import checkin_files, checkin_block_files
from biicode.common.edition.checkin import CheckinManager
from biicode.common.exception import BiiException, UpToDatePublishException,\
PublishException
from biicode.common.publish.publish_manager import block_changed
Expand Down Expand Up @@ -118,7 +118,7 @@ def closure(self):
hive = self.hive
return hive.hive_dependencies.closure

def update(self, block_name=None, time=None):
def update(self, settings, block_name=None, time=None):
""" a block is outdated, because someone has published from another location,
and parent is not the last one in the block anymore.
update is able to merge with the given time
Expand Down Expand Up @@ -146,7 +146,10 @@ def update(self, block_name=None, time=None):

# Extra "process" after the update
proc_changes = ProcessorChanges()
checkin_block_files(hive_holder, block_name, files, proc_changes, self._biiout)

checkin_manager = CheckinManager(hive_holder, settings, self._biiout)
checkin_manager.checkin_block_files(block_name, files, proc_changes)

blocks_process(hive_holder, proc_changes, self._biiout)
deps_process(self._biiapi, hive_holder, proc_changes, self._biiout)
block_holder = hive_holder[block_name]
Expand All @@ -158,12 +161,12 @@ def update(self, block_name=None, time=None):
return block_name

def process(self, settings, files):
hive_holder = self.hive_holder
delete_migration = migrate_bii_config(files, self._biiout)
processor_changes = checkin_files(hive_holder, settings, files, self._biiout)
blocks_process(hive_holder, processor_changes, self._biiout)
deps_process(self._biiapi, hive_holder, processor_changes, self._biiout, settings)
self._edition.save_hive_changes(hive_holder.hive, processor_changes)
checkin_manager = CheckinManager(self.hive_holder, settings, self._biiout)
processor_changes = checkin_manager.checkin_files(files)
blocks_process(self.hive_holder, processor_changes, self._biiout)
deps_process(self._biiapi, self.hive_holder, processor_changes, self._biiout, settings)
self._edition.save_hive_changes(self.hive_holder.hive, processor_changes)
return delete_migration

def find(self, policy=None, **find_args):
Expand Down
10 changes: 6 additions & 4 deletions model/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,11 @@ class Blob(object):
SERIAL_COMPRESSED_BIN_KEY = "c"
SERIAL_SIZE_KEY = "sz"

def __init__(self, blob=None, is_binary=False, path=None):
def __init__(self, blob=None, is_binary=False, path=None, normalize=True):
''' public constructor with a string as parameter, there is no-guarantee that the input is
really Sys-compliant, so better normalize it, so call setText(), no setNormalizedText()!!!
'''
self.normalize = normalize # Just configuration for this instance. Not serialized or stored
self._binary = None # The real load of the blob
self._compressed_bin = None # The load, but compressed
self._sys_text = None # Transient, the load text as system CRLF requires
Expand All @@ -67,10 +68,11 @@ def __init__(self, blob=None, is_binary=False, path=None):
if blob is not None:
assert path is None
assert isinstance(blob, basestring)
if is_binary:
if is_binary or not normalize:
self._binary = blob
else:
self._binary = normalize_text(blob)

if path is not None:
assert blob is None
if is_binary:
Expand Down Expand Up @@ -160,7 +162,7 @@ def _system_text(self):
@return a string with the CRLF combination specific for the current OS
"""
if self._sys_text is None:
self._sys_text = systemize_text(self.text)
self._sys_text = systemize_text(self.text) if self.normalize else self.text
return self._sys_text

@property
Expand Down Expand Up @@ -211,6 +213,6 @@ def deserialize(data):
c = Blob(is_binary=is_binary)
c._compressed_bin = data[Blob.SERIAL_COMPRESSED_BIN_KEY]
c._size = data.get(Blob.SERIAL_SIZE_KEY)
#c.is_binary = data[Blob.SERIAL_IS_BINARY_KEY]
# c.is_binary = data[Blob.SERIAL_IS_BINARY_KEY]
c._sha = SHA.deserialize(data[Blob.SERIAL_SHA_KEY])
return c
13 changes: 12 additions & 1 deletion settings/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,24 @@


class UserSettings(dict):

def __init__(self, *args, **kwargs):
self["autocrlf"] = True # Default, overrided if exists in args dict
dict.__init__(self, *args, **kwargs)

def serialize(self):
return dict(self)

@staticmethod
def deserialize(data):
deserializer = DictDeserializer(str, str)
return UserSettings(deserializer.deserialize(data))
tmp = deserializer.deserialize(data)
for key, value in tmp.iteritems():
convert = {"false": False, "False": False,
"True": True, "true": True}
value = convert.get(value, value)
tmp[key] = value
return UserSettings(tmp)


class Settings(object):
Expand Down
57 changes: 51 additions & 6 deletions test/edition/checkin_test.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,61 @@
import unittest
from biicode.common.edition.checkin import obtain_types_blobs
from biicode.common.edition.checkin import obtain_types_blobs, CheckinManager
from biicode.common.model.bii_type import CPP
from biicode.common.model.blob import Blob
import os
from biicode.common.settings.settings import Settings
from mock import Mock
from biicode.common.model.brl.block_cell_name import BlockCellName
from biicode.common.edition.hive_holder import HiveHolder
from biicode.common.edition.hive import Hive


class ChekinTest(unittest.TestCase):

def obtain_types_blobs_test(self):
files = {'afile1.c': 'Hello',
'bii/file2.c': 'Bye'}
result = obtain_types_blobs(files)
self.assertEqual({'afile1.c': (CPP, Blob('Hello')),
'bii/file2.c': (CPP, Blob('Bye'))},
files = {'afile1.c': 'Hello\r\n',
'bii/file2.c': 'Bye\n'}
normalize = True
result = obtain_types_blobs(files, normalize)
self.assertEqual({'afile1.c': (CPP, Blob('Hello%s' % os.linesep, normalize=normalize)),
'bii/file2.c': (CPP, Blob('Bye%s' % os.linesep, normalize=normalize))},
result)

# Now not normalize
normalize = False
result = obtain_types_blobs(files, normalize)
self.assertEqual(result["afile1.c"][1].binary, 'Hello\r\n')
self.assertEqual(result["bii/file2.c"][1].binary, 'Bye\n')

def checkin_manager_test(self):
'''
Tests CheckinManager and normalizing.
Instance a ChekinManager and checkin files, first normalizing and
then without normalizing
'''
settings = Mock(Settings())
settings.user = {"autocrlf": True}
hive_holder = HiveHolder(Hive(), {}, {})
biiout = Mock()
manager = CheckinManager(hive_holder, settings, biiout)

files = {BlockCellName('user/block/file1.c'): 'Hello\r\n',
BlockCellName('user/block2/file2.c'): 'Bye\n'}

manager.checkin_files(files)

file1_content = hive_holder.resources["user/block/file1.c"].content.load.binary
self.assertEqual(file1_content, 'Hello%s' % os.linesep)

file2_content = hive_holder.resources["user/block2/file2.c"].content.load.binary
self.assertEqual(file2_content, 'Bye%s' % os.linesep)

# Now not normalize
settings.user = {"autocrlf": False}
manager.checkin_files(files)

file1_content = hive_holder.resources["user/block/file1.c"].content.load.binary
self.assertEqual(file1_content, 'Hello\r\n')

file2_content = hive_holder.resources["user/block2/file2.c"].content.load.binary
self.assertEqual(file2_content, 'Bye\n')

0 comments on commit 6cbaa33

Please sign in to comment.