Skip to content

Commit

Permalink
Merge PR #102 into 12.0
Browse files Browse the repository at this point in the history
Signed-off-by guewen
  • Loading branch information
OCA-git-bot committed Nov 17, 2019
2 parents 91ee4a6 + b6de817 commit 1ad2d56
Show file tree
Hide file tree
Showing 12 changed files with 46 additions and 26 deletions.
5 changes: 3 additions & 2 deletions base_import_async/__manifest__.py
Expand Up @@ -7,7 +7,7 @@
{
'name': 'Asynchronous Import',
'summary': 'Import CSV files in the background',
'version': '11.0.1.0.0',
'version': '12.0.1.0.0',
'author': 'Akretion, ACSONE SA/NV, Odoo Community Association (OCA)',
'license': 'AGPL-3',
'website': 'https://github.com/OCA/queue',
Expand All @@ -22,5 +22,6 @@
'qweb': [
'static/src/xml/import.xml',
],
'installable': False,
'installable': True,
'development_status': 'Stable',
}
12 changes: 6 additions & 6 deletions base_import_async/models/base_import_import.py
Expand Up @@ -34,11 +34,11 @@ class BaseImportImport(models.TransientModel):
_inherit = 'base_import.import'

@api.multi
def do(self, fields, options, dryrun=False):
def do(self, fields, columns, options, dryrun=False):
if dryrun or not options.get(OPT_USE_QUEUE):
# normal import
return super(BaseImportImport, self).do(
fields, options, dryrun=dryrun)
fields, columns, options, dryrun=dryrun)

# asynchronous import
try:
Expand Down Expand Up @@ -89,9 +89,9 @@ def _create_csv_attachment(self, fields, data, options, file_name):
# write csv
f = StringIO()
writer = csv.writer(f,
delimiter=str(options.get(OPT_SEPARATOR)),
delimiter=str(options.get(OPT_SEPARATOR)) or ',',
quotechar=str(options.get(OPT_QUOTING)))
encoding = options.get(OPT_ENCODING, 'utf-8')
encoding = options.get(OPT_ENCODING) or 'utf-8'
writer.writerow(fields)
for row in data:
writer.writerow(row)
Expand All @@ -107,10 +107,10 @@ def _create_csv_attachment(self, fields, data, options, file_name):
@api.model
def _read_csv_attachment(self, attachment, options):
decoded_datas = base64.decodebytes(attachment.datas)
encoding = options.get(OPT_ENCODING, 'utf-8')
encoding = options.get(OPT_ENCODING) or 'utf-8'
f = TextIOWrapper(BytesIO(decoded_datas), encoding=encoding)
reader = csv.reader(f,
delimiter=str(options.get(OPT_SEPARATOR)),
delimiter=str(options.get(OPT_SEPARATOR)) or ',',
quotechar=str(options.get(OPT_QUOTING)))

fields = next(reader)
Expand Down
4 changes: 2 additions & 2 deletions base_import_async/readme/HISTORY.rst
@@ -1,4 +1,4 @@
11.0.1.0.0 (2018-06-26)
12.0.1.0.0 (2018-10-20)
~~~~~~~~~~~~~~~~~~~~~~~

* [BREAKING] In the `do` method the `use_connector` option has changed to `use_queue`.
* [MIGRATION] from 11.0 branched at rev. b0945be
4 changes: 3 additions & 1 deletion base_import_async/static/src/js/import.js
Expand Up @@ -19,8 +19,10 @@ odoo.define('base_import_async.import', function (require) {
_t("Your request is being processed"),
_t("You can check the status of this job in menu 'Queue / Jobs'.")
);
this.exit();
} else {
this._super.apply(this, arguments);
}
this._super.apply(this, arguments);
},

});
Expand Down
1 change: 1 addition & 0 deletions setup/base_import_async/odoo/addons/base_import_async
6 changes: 6 additions & 0 deletions setup/base_import_async/setup.py
@@ -0,0 +1,6 @@
import setuptools

setuptools.setup(
setup_requires=['setuptools-odoo'],
odoo_addon=True,
)
6 changes: 6 additions & 0 deletions setup/test_base_import_async/setup.py
@@ -0,0 +1,6 @@
import setuptools

setuptools.setup(
setup_requires=['setuptools-odoo'],
odoo_addon=True,
)
5 changes: 3 additions & 2 deletions test_base_import_async/__manifest__.py
Expand Up @@ -4,7 +4,7 @@

{
'name': 'Test suite for base_import_async',
'version': '11.0.1.0.0',
'version': '12.0.1.0.0',
'author': 'ACSONE SA/NV, Odoo Community Association (OCA)',
'license': 'AGPL-3',
'website': 'https://github.com/OCA/queue',
Expand All @@ -20,5 +20,6 @@
'data': [
'tests/data.xml',
],
'installable': False,
'installable': True,
'development_status': 'Stable',
}
1 change: 1 addition & 0 deletions test_base_import_async/readme/CONTRIBUTORS.rst
@@ -1,2 +1,3 @@
* Stéphane Bidoul (ACSONE)
* Dennis Sluijk (Onestein)
* Guewen Baconnier (Camptocamp)
2 changes: 1 addition & 1 deletion test_base_import_async/tests/data.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<odoo>
<!-- hack to set an xmlid on some demo data that have non in v9 -->
<!-- hack to set an xmlid on some demo data that have non -->
<record id="testjournal" model="ir.model.data">
<field name="res_id" search="[('code','=','MISC')]" model="account.journal"/>
<field name="model">ir.model.fields</field>
Expand Down
25 changes: 13 additions & 12 deletions test_base_import_async/tests/test_base_import_async.py
Expand Up @@ -29,6 +29,7 @@ class TestBaseImportAsync(common.TransactionCase):
OPT_SEPARATOR: ',',
OPT_QUOTING: '"',
OPT_HAS_HEADER: True,
'date_format': '%Y-%m-%d',
}

def setUp(self):
Expand All @@ -51,7 +52,7 @@ def _do_import(self, file_name, use_queue, chunk_size=None):
options = dict(self.OPTIONS)
options[OPT_USE_QUEUE] = use_queue
options[OPT_CHUNK_SIZE] = chunk_size
return importer.do(self.FIELDS, options)
return importer.do(self.FIELDS, self.FIELDS, options)

def _check_import_result(self):
move_count = self.move_obj.search_count(
Expand All @@ -61,7 +62,7 @@ def _check_import_result(self):
def test_normal_import(self):
""" Test the standard import still works. """
res = self._do_import('account.move.csv', use_queue=False)
self.assertFalse(res, repr(res))
self.assertFalse(res['messages'], repr(res))
self._check_import_result()

def test_async_import(self):
Expand All @@ -77,14 +78,14 @@ def test_async_import(self):
self.assertEqual(len(split_job), 1)
# job names are important
self.assertEqual(split_job.name,
"Import Account Entry from file account.move.csv")
"Import Journal Entries from file account.move.csv")
# perform job
Job.load(self.env, split_job.uuid).perform()
# check one job has been generated to load the file (one chunk)
load_job = self.job_obj.search([('id', '!=', split_job.id)])
self.assertEqual(len(load_job), 1)
self.assertEqual(load_job.name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#0 - lines 2 to 10")
# perform job
Job.load(self.env, load_job.uuid).perform()
Expand All @@ -105,10 +106,10 @@ def test_async_import_small_misaligned_chunks(self):
[('id', '!=', split_job.id)], order='name')
self.assertEqual(len(load_jobs), 2)
self.assertEqual(load_jobs[0].name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#0 - lines 2 to 7")
self.assertEqual(load_jobs[1].name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#1 - lines 8 to 10")
# perform job
Job.load(self.env, load_jobs[0].uuid).perform()
Expand All @@ -130,13 +131,13 @@ def test_async_import_smaller_misaligned_chunks(self):
[('id', '!=', split_job.id)], order='name')
self.assertEqual(len(load_jobs), 3)
self.assertEqual(load_jobs[0].name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#0 - lines 2 to 4")
self.assertEqual(load_jobs[1].name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#1 - lines 5 to 7")
self.assertEqual(load_jobs[2].name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#2 - lines 8 to 10")
# perform job
Job.load(self.env, load_jobs[0].uuid).perform()
Expand All @@ -160,13 +161,13 @@ def test_async_import_smaller_aligned_chunks(self):
[('id', '!=', split_job.id)], order='name')
self.assertEqual(len(load_jobs), 3)
self.assertEqual(load_jobs[0].name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#0 - lines 2 to 4")
self.assertEqual(load_jobs[1].name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#1 - lines 5 to 7")
self.assertEqual(load_jobs[2].name,
"Import Account Entry from file account.move.csv - "
"Import Journal Entries from file account.move.csv - "
"#2 - lines 8 to 10")
# perform job
Job.load(self.env, load_jobs[0].uuid).perform()
Expand Down

0 comments on commit 1ad2d56

Please sign in to comment.