Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

HRM Team Membership prepop, move BulkImporter to s3import as more log…

…ical/easier to find. Re-enable Responsive in Bootstrap.
  • Loading branch information...
commit 87a9d53f0cf821b1e6011de40383049c659a0133 1 parent e433400
Fran Boon authored
Showing with 460 additions and 445 deletions.
  1. +1 −1  VERSION
  2. +6 −8 controllers/pr.py
  3. +2 −1  models/zzz_1st_run.py
  4. +12 −3 modules/eden/hrm.py
  5. +1 −1  modules/eden/pr.py
  6. +1 −1  modules/s3/codecs/xls.py
  7. +396 −7 modules/s3/s3import.py
  8. +0 −384 modules/s3/s3utils.py
  9. +1 −1  private/templates/ADAT/tasks.cfg
  10. +1 −1  private/templates/ARC/tasks.cfg
  11. +1 −1  private/templates/CERT/tasks.cfg
  12. +1 −1  private/templates/CSN/tasks.cfg
  13. +1 −1  private/templates/Crisis/tasks.cfg
  14. +5 −5 private/templates/DRMP/event.csv
  15. +1 −1  private/templates/DRMP/tasks.cfg
  16. +1 −1  private/templates/DRRPP/tasks.cfg
  17. +1 −1  private/templates/Delphi/tasks.cfg
  18. +1 −1  private/templates/EUROSHA/tasks.cfg
  19. +1 −1  private/templates/Georgia/tasks.cfg
  20. +1 −1  private/templates/IFRC/config.py
  21. +1 −1  private/templates/IFRC/tasks.cfg
  22. +1 −1  private/templates/IFRC_Demo/tasks.cfg
  23. +4 −0 private/templates/IFRC_Train/hrm_group_membership.csv
  24. +2 −1  private/templates/IFRC_Train/tasks.cfg
  25. +1 −1  private/templates/NYC/tasks.cfg
  26. +1 −1  private/templates/OCHA/tasks.cfg
  27. +1 −1  private/templates/Oxfam/tasks.cfg
  28. +1 −1  private/templates/RGIMS/tasks.cfg
  29. +1 −1  private/templates/SSF/tasks.cfg
  30. +1 −1  private/templates/SSF_Test/tasks.cfg
  31. +1 −1  private/templates/Sandy/tasks.cfg
  32. +1 −1  private/templates/SandyRelief/tasks.cfg
  33. +1 −1  private/templates/Shipment/tasks.cfg
  34. +1 −1  private/templates/Standard/tasks.cfg
  35. +1 −1  private/templates/default/tasks.cfg
  36. +1 −1  private/templates/regression/tasks.cfg
  37. +1 −1  private/templates/roles/tasks.cfg
  38. +1 −1  static/scripts/tools/build.sahana.py
  39. +3 −6 views/bootstrap_css.html
2  VERSION
View
@@ -1 +1 @@
-4d95f2d (2013-03-19 16:27:28)
+e433400 (2013-03-20 10:57:23)
14 controllers/pr.py
View
@@ -132,10 +132,9 @@ def prep(r):
elif r.interactive:
if r.representation == "popup":
# Hide "pe_label" and "missing" fields in person popups
- r.table.pe_label.readable = False
- r.table.pe_label.writable = False
- r.table.missing.readable = False
- r.table.missing.writable = False
+ table = r.table
+ table.pe_label.readable = table.pe_label.writable = False
+ table.missing.readable = table.missing.writable = False
# S3SQLCustomForm breaks popup return, so disable
s3db.clear_config("pr_person", "crud_form")
@@ -166,8 +165,7 @@ def prep(r):
raise HTTP(404)
elif r.id:
- r.table.volunteer.readable = True
- r.table.volunteer.writable = True
+ r.table.volunteer.readable = r.table.volunteer.writable = True
return True
s3.prep = prep
@@ -187,7 +185,7 @@ def postp(r, output):
list_fields=["id",
"group_id",
"group_head",
- "description"
+ "description",
])
# Basic tabs
@@ -203,7 +201,7 @@ def postp(r, output):
(T("Skills"), "competency"),
(T("Training"), "training"),
(T("Saved Searches"), "saved_search"),
- ]
+ ]
# Configuration tabs
tabs.append((T("Map Settings"), "config"))
3  models/zzz_1st_run.py
View
@@ -265,7 +265,8 @@
grandTotalStart = datetime.datetime.now()
for pop_setting in pop_list:
start = datetime.datetime.now()
- bi.clear_tasks()
+ # Clear Tasklist
+ bi.tasks = []
# Import data specific to the prepopulate setting
if isinstance(pop_setting, str):
path = os.path.join(request.folder,
15 modules/eden/hrm.py
View
@@ -4652,7 +4652,9 @@ def hrm_configure_pr_group_membership():
msg_record_deleted = T("Membership deleted"),
msg_list_empty = T("No Members currently registered"))
- phone_label = current.deployment_settings.get_ui_label_mobile_phone()
+ settings = current.deployment_settings
+ phone_label = settings.get_ui_label_mobile_phone()
+ site_label = settings.get_org_site_label()
list_fields = ["id",
"group_id$description",
"group_head",
@@ -4661,6 +4663,9 @@ def hrm_configure_pr_group_membership():
"person_id$last_name",
(T("Email"), "person_id$email.value"),
(phone_label, "person_id$phone.value"),
+ (current.messages.ORGANISATION,
+ "person_id$hrm_human_resource:organisation_id$name"),
+ (site_label, "person_id$hrm_human_resource:site_id$name"),
]
if current.request.function == "group_membership":
list_fields.insert(1, "group_id")
@@ -4722,7 +4727,10 @@ def prep(r):
if r.interactive or r.representation == "xls":
if r.component_name == "group_membership":
hrm_configure_pr_group_membership()
-
+ if r.representation == "xls":
+ # Modify Title of Report to show Team Name
+ s3.crud_strings.pr_group_membership.title_list = r.record.name
+
return True
s3.prep = prep
@@ -4734,7 +4742,8 @@ def postp(r, output):
update_url = URL(args=["[id]", "group_membership"])
S3CRUD.action_buttons(r, update_url=update_url)
if current.deployment_settings.has_module("msg") and \
- current.auth.permission.has_permission("update", c="hrm", f="compose"):
+ current.auth.permission.has_permission("update", c="hrm",
+ f="compose"):
s3.actions.append({
"url": URL(f="compose",
vars = {"group_id": "[id]"}),
2  modules/eden/pr.py
View
@@ -731,7 +731,7 @@ def model(self):
#"picture",
"gender",
"age_group",
- (messages.ORGANISATION, "hrm_human_resource:organisation_id$name")
+ (messages.ORGANISATION, "hrm_human_resource:organisation_id$name"),
],
crud_form = crud_form,
onvalidation=self.pr_person_onvalidation,
2  modules/s3/codecs/xls.py
View
@@ -43,8 +43,8 @@
raise
from gluon import *
-from gluon.storage import Storage
from gluon.contenttype import contenttype
+from gluon.storage import Storage
from ..s3codec import S3Codec
from ..s3utils import s3_unicode, s3_strip_markup
403 modules/s3/s3import.py
View
@@ -31,12 +31,14 @@
__all__ = ["S3Importer",
"S3ImportJob",
"S3ImportItem",
+ "S3BulkImporter",
]
import cPickle
import os
import sys
import tempfile
+import uuid
from copy import deepcopy
from datetime import datetime
try:
@@ -65,7 +67,7 @@
from s3crud import S3CRUD
from s3resource import S3Resource
-from s3utils import s3_mark_required, s3_has_foreign_key, s3_get_foreign_key, s3_unicode
+from s3utils import s3_debug, s3_mark_required, s3_has_foreign_key, s3_get_foreign_key, s3_unicode
from s3xml import S3XML
DEBUG = False
@@ -1727,7 +1729,6 @@ def __init__(self, job):
self.error = None
# Identification
- import uuid
self.item_id = uuid.uuid4() # unique ID for this item
self.id = None
self.uid = None
@@ -2372,7 +2373,7 @@ def update_policy(f):
field = u.get("field", None)
if isinstance(field, (list, tuple)):
pkey, fkey = field
- query = table.id == self.id
+ query = (table.id == self.id)
row = db(query).select(table[pkey],
limitby=(0, 1)).first()
if row:
@@ -2443,6 +2444,7 @@ def _resolve_references(self):
if not self.table:
return
+ db = current.db
items = self.job.items
for reference in self.references:
@@ -2480,8 +2482,8 @@ def _resolve_references(self):
if item:
fk = item.id
if fk and pkey != "id":
- row = current.db(ktable._id == fk).select(ktable[pkey],
- limitby=(0, 1)).first()
+ row = db(ktable._id == fk).select(ktable[pkey],
+ limitby=(0, 1)).first()
if not row:
fk = None
continue
@@ -2517,8 +2519,8 @@ def _update_reference(self, field, value):
if not value or not self.table:
return
- db = current.db
if self.id and self.permitted:
+ db = current.db
fieldtype = str(self.table[field].type)
if fieldtype.startswith("list:reference"):
query = (self.table.id == self.id)
@@ -2763,7 +2765,6 @@ def __init__(self, manager, table,
except:
pass
else:
- import uuid
self.job_id = uuid.uuid4() # unique ID for this job
# -------------------------------------------------------------------------
@@ -3354,4 +3355,392 @@ def restore_references(self):
item.parent = self.items[item.load_parent]
item.load_parent = None
+# =============================================================================
+class S3BulkImporter(object):
+ """
+ Import CSV files of data to pre-populate the database.
+ Suitable for use in Testing, Demos & Simulations
+
+ http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/PrePopulate
+ """
+
+ def __init__(self):
+ """ Constructor """
+
+ import csv
+ from xml.sax.saxutils import unescape
+
+ self.csv = csv
+ self.unescape = unescape
+ self.importTasks = []
+ self.specialTasks = []
+ self.tasks = []
+ self.alternateTables = {
+ "hrm_group_membership": {"tablename": "pr_group_membership",
+ "prefix": "pr",
+ "name": "group_membership"},
+ "hrm_person": {"tablename": "pr_person",
+ "prefix": "pr",
+ "name": "person"},
+ "member_person": {"tablename": "pr_person",
+ "prefix": "pr",
+ "name": "person"},
+ }
+ self.errorList = []
+ self.resultList = []
+
+ # -------------------------------------------------------------------------
+ def load_descriptor(self, path):
+ """
+ Load the descriptor file and then all the import tasks in that file
+ into the importTasks property.
+ The descriptor file is the file called tasks.cfg in path.
+ The file consists of a comma separated list of:
+ application, resource name, csv filename, xsl filename.
+ """
+
+ source = open(os.path.join(path, "tasks.cfg"), "r")
+ values = self.csv.reader(source)
+ for details in values:
+ if details == []:
+ continue
+ prefix = details[0][0].strip('" ')
+ if prefix == "#": # comment
+ continue
+ if prefix == "*": # specialist function
+ self.extractSpecialistLine(path, details)
+ else: # standard importer
+ self.extractImporterLine(path, details)
+
+ # -------------------------------------------------------------------------
+ def extractImporterLine(self, path, details):
+ """
+ Extract the details for an import Task
+ """
+
+ argCnt = len(details)
+ if argCnt == 4 or argCnt == 5:
+ # remove any spaces and enclosing double quote
+ app = details[0].strip('" ')
+ res = details[1].strip('" ')
+ request = current.request
+
+ csvFileName = details[2].strip('" ')
+ if csvFileName[:7] == "http://":
+ csv = csvFileName
+ else:
+ (csvPath, csvFile) = os.path.split(csvFileName)
+ if csvPath != "":
+ path = os.path.join(request.folder,
+ "private",
+ "templates",
+ csvPath)
+ csv = os.path.join(path, csvFile)
+
+ xslFileName = details[3].strip('" ')
+ templateDir = os.path.join(request.folder,
+ "static",
+ "formats",
+ "s3csv")
+ # Try the app directory in the templates directory first
+ xsl = os.path.join(templateDir, app, xslFileName)
+ _debug("%s %s" % (xslFileName, xsl))
+ if os.path.exists(xsl) == False:
+ # Now try the templates directory
+ xsl = os.path.join(templateDir, xslFileName)
+ _debug ("%s %s" % (xslFileName, xsl))
+ if os.path.exists(xsl) == False:
+ # Use the same directory as the csv file
+ xsl = os.path.join(path, xslFileName)
+ _debug ("%s %s" % (xslFileName, xsl))
+ if os.path.exists(xsl) == False:
+ self.errorList.append(
+ "Failed to find a transform file %s, Giving up." % xslFileName)
+ return
+ vars = None
+ if argCnt == 5:
+ vars = details[4]
+ self.tasks.append([1, app, res, csv, xsl, vars])
+ self.importTasks.append([app, res, csv, xsl, vars])
+ else:
+ self.errorList.append(
+ "prepopulate error: job not of length 4. %s job ignored" % task)
+
+ # -------------------------------------------------------------------------
+ def extractSpecialistLine(self, path, details):
+ """
+ Store a single import job into the importTasks property
+ """
+
+ function = details[1].strip('" ')
+ csv = None
+ if len(details) == 3:
+ fileName = details[2].strip('" ')
+ (csvPath, csvFile) = os.path.split(fileName)
+ if csvPath != "":
+ path = os.path.join(current.request.folder,
+ "private",
+ "templates",
+ csvPath)
+ csv = os.path.join(path, csvFile)
+ extraArgs = None
+ if len(details) == 4:
+ extraArgs = details[3].strip('" ')
+ self.tasks.append([2, function, csv, extraArgs])
+ self.specialTasks.append([function, csv, extraArgs])
+
+ # -------------------------------------------------------------------------
+ def execute_import_task(self, task):
+ """
+ Execute each import job, in order
+ """
+
+ start = datetime.now()
+ if task[0] == 1:
+ db = current.db
+ s3db = current.s3db
+ request = current.request
+ response = current.response
+ errorString = "prepopulate error: file %s missing"
+ # Store the view
+ view = response.view
+
+ _debug ("Running job %s %s (filename=%s transform=%s)" % (task[1], task[2], task[3], task[4]))
+ prefix = task[1]
+ name = task[2]
+ tablename = "%s_%s" % (prefix, name)
+ if tablename in self.alternateTables:
+ details = self.alternateTables[tablename]
+ if "tablename" in details:
+ tablename = details["tablename"]
+ s3db.table(tablename)
+ if "loader" in details:
+ loader = details["loader"]
+ if loader is not None:
+ loader()
+ if "prefix" in details:
+ prefix = details["prefix"]
+ if "name" in details:
+ name = details["name"]
+
+ try:
+ resource = s3db.resource(tablename)
+ except AttributeError:
+ # Table cannot be loaded
+ self.errorList.append("WARNING: Unable to find table %s import job skipped" % tablename)
+ return
+
+ # Check if the source file is accessible
+ filename = task[3]
+ if filename[:7] == "http://":
+ import urllib2
+ req = urllib2.Request(url=filename)
+ try:
+ f = urllib2.urlopen(req)
+ except urllib2.HTTPError, e:
+ self.errorList.append("Could not access %s: %s" % (filename, e.read()))
+
+ return
+ except:
+ self.errorList.append(errorString % filename)
+ return
+ else:
+ csv = f
+ else:
+ try:
+ csv = open(filename, "r")
+ except IOError:
+ self.errorList.append(errorString % filename)
+ return
+
+ # Check if the stylesheet is accessible
+ try:
+ open(task[4], "r")
+ except IOError:
+ self.errorList.append(errorString % task[4])
+ return
+
+ extra_data = None
+ if task[5]:
+ try:
+ extradata = self.unescape(task[5], {"'": '"'})
+ extradata = json.loads(extradata)
+ extra_data = extradata
+ except:
+ self.errorList.append("WARNING:5th parameter invalid, parameter %s ignored" % task[5])
+ try:
+ # @todo: add extra_data and file attachments
+ result = resource.import_xml(csv,
+ format="csv",
+ stylesheet=task[4],
+ extra_data=extra_data)
+ except SyntaxError, e:
+ self.errorList.append("WARNING: import error - %s (file: %s, stylesheet: %s)" %
+ (e, filename, task[4]))
+ return
+
+ if not resource.error:
+ db.commit()
+ else:
+ # Must roll back if there was an error!
+ error = resource.error
+ self.errorList.append("%s - %s: %s" % (
+ task[3], resource.tablename, error))
+ errors = current.xml.collect_errors(resource)
+ if errors:
+ self.errorList.extend(errors)
+ db.rollback()
+
+ # Restore the view
+ response.view = view
+ end = datetime.now()
+ duration = end - start
+ csvName = task[3][task[3].rfind("/") + 1:]
+ try:
+ # Python-2.7
+ duration = '{:.2f}'.format(duration.total_seconds()/60)
+ msg = "%s import job completed in %s mins" % (csvName, duration)
+ except AttributeError:
+ # older Python
+ msg = "%s import job completed in %s" % (csvName, duration)
+ self.resultList.append(msg)
+ if response.s3.debug:
+ s3_debug(msg)
+
+ # -------------------------------------------------------------------------
+ def execute_special_task(self, task):
+ """
+ Execute import tasks which require a custom function,
+ such as import_role
+ """
+
+ start = datetime.now()
+ s3 = current.response.s3
+ if task[0] == 2:
+ fun = task[1]
+ csv = task[2]
+ extraArgs = task[3]
+ if csv is None:
+ if extraArgs is None:
+ error = s3[fun]()
+ else:
+ error = s3[fun](extraArgs)
+ elif extraArgs is None:
+ error = s3[fun](csv)
+ else:
+ error = s3[fun](csv, extraArgs)
+ if error:
+ self.errorList.append(error)
+ end = datetime.now()
+ duration = end - start
+ try:
+ # Python-2.7
+ duration = '{:.2f}'.format(duration.total_seconds()/60)
+ msg = "%s import job completed in %s mins" % (fun, duration)
+ except AttributeError:
+ # older Python
+ msg = "%s import job completed in %s" % (fun, duration)
+ self.resultList.append(msg)
+ if s3.debug:
+ s3_debug(msg)
+
+ # -------------------------------------------------------------------------
+ def import_role(self, filename):
+ """ Import Roles from CSV """
+
+ # Check if the source file is accessible
+ try:
+ openFile = open(filename, "r")
+ except IOError:
+ return "Unable to open file %s" % filename
+
+ auth = current.auth
+ acl = auth.permission
+ create_role = auth.s3_create_role
+
+ def parseACL(_acl):
+ permissions = _acl.split("|")
+ aclValue = 0
+ for permission in permissions:
+ if permission == "READ":
+ aclValue = aclValue | acl.READ
+ if permission == "CREATE":
+ aclValue = aclValue | acl.CREATE
+ if permission == "UPDATE":
+ aclValue = aclValue | acl.UPDATE
+ if permission == "DELETE":
+ aclValue = aclValue | acl.DELETE
+ if permission == "ALL":
+ aclValue = aclValue | acl.ALL
+ return aclValue
+
+ reader = self.csv.DictReader(openFile)
+ roles = {}
+ acls = {}
+ args = {}
+ for row in reader:
+ if row != None:
+ role = row["role"]
+ if "description" in row:
+ desc = row["description"]
+ else:
+ desc = ""
+ rules = {}
+ extra_param = {}
+ if "controller" in row and row["controller"]:
+ rules["c"] = row["controller"]
+ if "function" in row and row["function"]:
+ rules["f"] = row["function"]
+ if "table" in row and row["table"]:
+ rules["t"] = row["table"]
+ if row["oacl"]:
+ rules["oacl"] = parseACL(row["oacl"])
+ if row["uacl"]:
+ rules["uacl"] = parseACL(row["uacl"])
+ #if "org" in row and row["org"]:
+ #rules["organisation"] = row["org"]
+ #if "facility" in row and row["facility"]:
+ #rules["facility"] = row["facility"]
+ if "entity" in row and row["entity"]:
+ rules["entity"] = row["entity"]
+ if "hidden" in row and row["hidden"]:
+ extra_param["hidden"] = row["hidden"]
+ if "system" in row and row["system"]:
+ extra_param["system"] = row["system"]
+ if "protected" in row and row["protected"]:
+ extra_param["protected"] = row["protected"]
+ if "uid" in row and row["uid"]:
+ extra_param["uid"] = row["uid"]
+ if role in roles:
+ acls[role].append(rules)
+ else:
+ roles[role] = [role,desc]
+ acls[role] = [rules]
+ if len(extra_param) > 0 and role not in args:
+ args[role] = extra_param
+ for rulelist in roles.values():
+ if rulelist[0] in args:
+ create_role(rulelist[0],
+ rulelist[1],
+ *acls[rulelist[0]],
+ **args[rulelist[0]])
+ else:
+ create_role(rulelist[0],
+ rulelist[1],
+ *acls[rulelist[0]])
+
+ # -------------------------------------------------------------------------
+ def perform_tasks(self, path):
+ """
+ Load and then execute the import jobs that are listed in the
+ descriptor file (tasks.cfg)
+ """
+
+ self.load_descriptor(path)
+ for task in self.tasks:
+ if task[0] == 1:
+ self.execute_import_task(task)
+ elif task[0] == 2:
+ self.execute_special_task(task)
+
# END =========================================================================
384 modules/s3/s3utils.py
View
@@ -1522,390 +1522,6 @@ def __init__(self, sqlrows,
components.append(TBODY(*tbody))
# =============================================================================
-class S3BulkImporter(object):
- """
- Import CSV files of data to pre-populate the database.
- Suitable for use in Testing, Demos & Simulations
- """
-
- def __init__(self):
- """ Constructor """
-
- import csv
- from xml.sax.saxutils import unescape
-
- self.csv = csv
- self.unescape = unescape
- self.importTasks = []
- self.specialTasks = []
- self.tasks = []
- self.alternateTables = {"hrm_person": {"tablename":"pr_person",
- "prefix":"pr",
- "name":"person"},
- "member_person": {"tablename":"pr_person",
- "prefix":"pr",
- "name":"person"},
- }
- self.errorList = []
- self.resultList = []
-
- # -------------------------------------------------------------------------
- def load_descriptor(self, path):
- """ Method that will load the descriptor file and then all the
- import tasks in that file into the importTasks property.
- The descriptor file is the file called tasks.txt in path.
- The file consists of a comma separated list of:
- application, resource name, csv filename, xsl filename.
- """
-
- source = open(os.path.join(path, "tasks.cfg"), "r")
- values = self.csv.reader(source)
- for details in values:
- if details == []:
- continue
- prefix = details[0][0].strip('" ')
- if prefix == "#": # comment
- continue
- if prefix == "*": # specialist function
- self.extractSpecialistLine(path, details)
- else: # standard importer
- self.extractImporterLine(path, details)
-
- # -------------------------------------------------------------------------
- def extractImporterLine(self, path, details):
- """
- Method that extract the details for an import Task
- """
- argCnt = len(details)
- if argCnt == 4 or argCnt == 5:
- # remove any spaces and enclosing double quote
- app = details[0].strip('" ')
- res = details[1].strip('" ')
- request = current.request
-
- csvFileName = details[2].strip('" ')
- if csvFileName[:7] == "http://":
- csv = csvFileName
- else:
- (csvPath, csvFile) = os.path.split(csvFileName)
- if csvPath != "":
- path = os.path.join(request.folder,
- "private",
- "templates",
- csvPath)
- csv = os.path.join(path, csvFile)
-
- xslFileName = details[3].strip('" ')
- templateDir = os.path.join(request.folder,
- "static",
- "formats",
- "s3csv",
- )
- # try the app directory in the templates directory first
- xsl = os.path.join(templateDir, app, xslFileName)
- _debug("%s %s" % (xslFileName, xsl))
- if os.path.exists(xsl) == False:
- # now try the templates directory
- xsl = os.path.join(templateDir, xslFileName)
- _debug ("%s %s" % (xslFileName, xsl))
- if os.path.exists(xsl) == False:
- # use the same directory as the csv file
- xsl = os.path.join(path, xslFileName)
- _debug ("%s %s" % (xslFileName, xsl))
- if os.path.exists(xsl) == False:
- self.errorList.append(
- "Failed to find a transform file %s, Giving up." % xslFileName)
- return
- vars = None
- if argCnt == 5:
- vars = details[4]
- self.tasks.append([1, app, res, csv, xsl, vars])
- self.importTasks.append([app, res, csv, xsl, vars])
- else:
- self.errorList.append(
- "prepopulate error: job not of length 4. %s job ignored" % task)
-
- # -------------------------------------------------------------------------
- def extractSpecialistLine(self, path, details):
- """ Method that will store a single import job into
- the importTasks property.
- """
- function = details[1].strip('" ')
- csv = None
- if len(details) == 3:
- fileName = details[2].strip('" ')
- (csvPath, csvFile) = os.path.split(fileName)
- if csvPath != "":
- path = os.path.join(current.request.folder,
- "private",
- "templates",
- csvPath)
- csv = os.path.join(path, csvFile)
- extraArgs = None
- if len(details) == 4:
- extraArgs = details[3].strip('" ')
- self.tasks.append([2, function, csv, extraArgs])
- self.specialTasks.append([function, csv, extraArgs])
-
- # -------------------------------------------------------------------------
- def load_import(self, controller, csv, xsl):
- """ Method that will store a single import job into
- the importTasks property.
- """
- self.importTasks.append([controller, csv, xsl])
-
- # -------------------------------------------------------------------------
- def execute_import_task(self, task):
- """ Method that will execute each import job, in order """
- start = datetime.datetime.now()
- if task[0] == 1:
- db = current.db
- request = current.request
- response = current.response
- errorString = "prepopulate error: file %s missing"
- # Store the view
- view = response.view
-
- _debug ("Running job %s %s (filename=%s transform=%s)" % (task[1], task[2], task[3], task[4]))
- prefix = task[1]
- name = task[2]
- tablename = "%s_%s" % (prefix, name)
- if tablename in self.alternateTables:
- details = self.alternateTables[tablename]
- if "tablename" in details:
- tablename = details["tablename"]
- current.s3db.table(tablename)
- if "loader" in details:
- loader = details["loader"]
- if loader is not None:
- loader()
- if "prefix" in details:
- prefix = details["prefix"]
- if "name" in details:
- name = details["name"]
-
- try:
- resource = current.s3db.resource(tablename)
- except AttributeError:
- # Table cannot be loaded
- self.errorList.append("WARNING: Unable to find table %s import job skipped" % tablename)
- return
-
- # Check if the source file is accessible
- filename = task[3]
- if filename[:7] == "http://":
- import urllib2
- req = urllib2.Request(url=filename)
- try:
- f = urllib2.urlopen(req)
- except urllib2.HTTPError, e:
- self.errorList.append("Could not access %s: %s" % (filename, e.read()))
-
- return
- except:
- self.errorList.append(errorString % filename)
- return
- else:
- csv = f
- else:
- try:
- csv = open(filename, "r")
- except IOError:
- self.errorList.append(errorString % filename)
- return
-
- # Check if the stylesheet is accessible
- try:
- open(task[4], "r")
- except IOError:
- self.errorList.append(errorString % task[4])
- return
-
- extra_data = None
- if task[5]:
- try:
- extradata = self.unescape(task[5], {"'": '"'})
- extradata = json.loads(extradata)
- extra_data = extradata
- except:
- self.errorList.append("WARNING:5th parameter invalid, parameter %s ignored" % task[5])
- try:
- # @todo: add extra_data and file attachments
- result = resource.import_xml(csv,
- format="csv",
- stylesheet=task[4],
- extra_data=extra_data)
- except SyntaxError, e:
- self.errorList.append("WARNING: import error - %s (file: %s, stylesheet: %s)" %
- (e, filename, task[4]))
- return
-
- if not resource.error:
- db.commit()
- else:
- # Must roll back if there was an error!
- error = resource.error
- self.errorList.append("%s - %s: %s" % (
- task[3], resource.tablename, error))
- errors = current.xml.collect_errors(resource)
- if errors:
- self.errorList.extend(errors)
- db.rollback()
-
- # Restore the view
- response.view = view
- end = datetime.datetime.now()
- duration = end - start
- csvName = task[3][task[3].rfind("/")+1:]
- try:
- # Python-2.7
- duration = '{:.2f}'.format(duration.total_seconds()/60)
- msg = "%s import job completed in %s mins" % (csvName, duration)
- except AttributeError:
- # older Python
- msg = "%s import job completed in %s" % (csvName, duration)
- self.resultList.append(msg)
- if response.s3.debug:
- s3_debug(msg)
-
- # -------------------------------------------------------------------------
- def execute_special_task(self, task):
- """
- """
-
- start = datetime.datetime.now()
- s3 = current.response.s3
- if task[0] == 2:
- fun = task[1]
- csv = task[2]
- extraArgs = task[3]
- if csv is None:
- if extraArgs is None:
- error = s3[fun]()
- else:
- error = s3[fun](extraArgs)
- elif extraArgs is None:
- error = s3[fun](csv)
- else:
- error = s3[fun](csv, extraArgs)
- if error:
- self.errorList.append(error)
- end = datetime.datetime.now()
- duration = end - start
- try:
- # Python-2.7
- duration = '{:.2f}'.format(duration.total_seconds()/60)
- msg = "%s import job completed in %s mins" % (fun, duration)
- except AttributeError:
- # older Python
- msg = "%s import job completed in %s" % (fun, duration)
- self.resultList.append(msg)
- if s3.debug:
- s3_debug(msg)
-
- # -------------------------------------------------------------------------
- def import_role(self, filename):
- """ Import Roles from CSV """
-
- # Check if the source file is accessible
- try:
- openFile = open(filename, "r")
- except IOError:
- return "Unable to open file %s" % filename
-
- auth = current.auth
- acl = auth.permission
- create_role = auth.s3_create_role
-
- def parseACL(_acl):
- permissions = _acl.split("|")
- aclValue = 0
- for permission in permissions:
- if permission == "READ":
- aclValue = aclValue | acl.READ
- if permission == "CREATE":
- aclValue = aclValue | acl.CREATE
- if permission == "UPDATE":
- aclValue = aclValue | acl.UPDATE
- if permission == "DELETE":
- aclValue = aclValue | acl.DELETE
- if permission == "ALL":
- aclValue = aclValue | acl.ALL
- return aclValue
-
- reader = self.csv.DictReader(openFile)
- roles = {}
- acls = {}
- args = {}
- for row in reader:
- if row != None:
- role = row["role"]
- if "description" in row:
- desc = row["description"]
- else:
- desc = ""
- rules = {}
- extra_param = {}
- if "controller" in row and row["controller"]:
- rules["c"] = row["controller"]
- if "function" in row and row["function"]:
- rules["f"] = row["function"]
- if "table" in row and row["table"]:
- rules["t"] = row["table"]
- if row["oacl"]:
- rules["oacl"] = parseACL(row["oacl"])
- if row["uacl"]:
- rules["uacl"] = parseACL(row["uacl"])
- #if "org" in row and row["org"]:
- #rules["organisation"] = row["org"]
- #if "facility" in row and row["facility"]:
- #rules["facility"] = row["facility"]
- if "entity" in row and row["entity"]:
- rules["entity"] = row["entity"]
- if "hidden" in row and row["hidden"]:
- extra_param["hidden"] = row["hidden"]
- if "system" in row and row["system"]:
- extra_param["system"] = row["system"]
- if "protected" in row and row["protected"]:
- extra_param["protected"] = row["protected"]
- if "uid" in row and row["uid"]:
- extra_param["uid"] = row["uid"]
- if role in roles:
- acls[role].append(rules)
- else:
- roles[role] = [role,desc]
- acls[role] = [rules]
- if len(extra_param) > 0 and role not in args:
- args[role] = extra_param
- for rulelist in roles.values():
- if rulelist[0] in args:
- create_role(rulelist[0],
- rulelist[1],
- *acls[rulelist[0]],
- **args[rulelist[0]])
- else:
- create_role(rulelist[0],
- rulelist[1],
- *acls[rulelist[0]])
-
- # -------------------------------------------------------------------------
- def clear_tasks(self):
- """ Clear the importTask list """
- self.tasks = []
-
- # -------------------------------------------------------------------------
- def perform_tasks(self, path):
- """ convenience method that will load and then execute the import jobs
- that are listed in the descriptor file
- """
- self.load_descriptor(path)
- for task in self.tasks:
- if task[0] == 1:
- self.execute_import_task(task)
- elif task[0] == 2:
- self.execute_special_task(task)
-
-# =============================================================================
class S3DateTime(object):
"""
Toolkit for date+time parsing/representation
2  private/templates/ADAT/tasks.cfg
View
@@ -10,7 +10,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
auth,user,regression/masterUsers.csv,user.xsl
# -----------------------------------------------------------------------------
2  private/templates/ARC/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,IFRC/auth_roles.csv
2  private/templates/CERT/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
#*,import_role,auth_roles.csv
2  private/templates/CSN/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/Crisis/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,IFRC/auth_roles.csv
10 private/templates/DRMP/event.csv
View
@@ -1,8 +1,8 @@
"Name","Description","Exercise","Zero Hour","Closed","Country","L1","L2","L3","KV:XX"
"Flood Manufahi",,,,,,,,,
-"Cyclone Simone",,,"2012-08","True",,,,,
+"Cyclone Simone",,,"2012-09","True",,,,,
"Drought Tibar",,,"2012-04","True",,,,,
-"Dili Tsunami",,,,,,,,,
-"Same Earthquake",,,,,,,,,
-"Aileu Floods",,,,,,,,,
-"Drought Zumalai",,,,,,,,,
+"Dili Tsunami",,,"2012-05","True",,,,,
+"Same Earthquake",,,"2012-06","True",,,,,
+"Aileu Floods",,,"2012-07","True",,,,,
+"Drought Zumalai",,,"2012-08","True",,,,,
2  private/templates/DRMP/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/DRRPP/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/Delphi/tasks.cfg
View
@@ -10,7 +10,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
*,import_role,delphi_roles.csv
"delphi","group","group.csv","group.xsl"
2  private/templates/EUROSHA/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/Georgia/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,IFRC/auth_roles.csv
2  private/templates/IFRC/config.py
View
@@ -5,8 +5,8 @@
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from s3 import IS_ONE_OF, s3forms
-settings = current.deployment_settings
T = current.T
+settings = current.deployment_settings
"""
Template settings for IFRC
2  private/templates/IFRC/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/IFRC_Demo/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Assumes that all of IFRC_Train has already been imported
# GIS
4 private/templates/IFRC_Train/hrm_group_membership.csv
View
@@ -0,0 +1,4 @@
+Team Name,First Name,Middle Name,Last Name,Email,Mobile Phone,HR Type,Organisation,Branch
+HQ,"Quito",,"Cromos",quito.cromos@redcross.tl,,staff,Timor-Leste Red Cross Society,
+HQ,"Herculano",,"Ximenes",herculano.ximenes@redcross.tl,,staff,Timor-Leste Red Cross Society,
+HQ,"Elly",,"Marques",elly.marques@redcross.tl,,staff,Timor-Leste Red Cross Society,
3  private/templates/IFRC_Train/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,IFRC/auth_roles.csv
@@ -85,6 +85,7 @@ hrm,person,staff.csv,person.xsl,"{'Type':'Staff'}"
hrm,person,volunteers.csv,person.xsl,"{'Type':'Volunteer'}"
hrm,training,training.csv,training.xsl
hrm,programme_hours,hrm_programme_hours.csv,programme_hours.xsl
+hrm,group_membership,hrm_group_membership.csv,group_membership.xsl
# -----------------------------------------------------------------------------
# Project Tool
org,facility,facility.csv,facility.xsl
2  private/templates/NYC/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/OCHA/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
#*,import_role,auth_roles.csv
2  private/templates/Oxfam/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,IFRC/auth_roles.csv
2  private/templates/RGIMS/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/SSF/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/SSF_Test/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,SSF/auth_roles.csv
2  private/templates/Sandy/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/SandyRelief/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,auth_roles.csv
2  private/templates/Shipment/tasks.cfg
View
@@ -11,7 +11,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# Roles
*,import_role,IFRC/auth_roles.csv
2  private/templates/Standard/tasks.cfg
View
@@ -12,7 +12,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
# http://eden.sahanafoundation.org/wiki/BluePrint/PrePopulate#OnFirstRun
#
#############################################################################
2  private/templates/default/tasks.cfg
View
@@ -10,7 +10,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# GIS
# Projections
2  private/templates/regression/tasks.cfg
View
@@ -10,7 +10,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
# GIS
# Projections
2  private/templates/roles/tasks.cfg
View
@@ -6,7 +6,7 @@
#
# For details on how to import data into the system see the following:
# zzz_1st_run
-# s3Tools::S3BulkImporter
+# s3import::S3BulkImporter
##########################################################################
*,import_role,dvi_roles.csv
*,import_role,hms_roles.csv
2  static/scripts/tools/build.sahana.py
View
@@ -477,7 +477,7 @@ def docss():
for file in ["bootstrap.css",
"bootstrap-responsive.css",
"font-awesome.css",
- "bootstrap-multiselect.css",
+ #"bootstrap-multiselect.css",
]:
listCSS.append("../../styles/bootstrap/%s" % file)
9 views/bootstrap_css.html
View
@@ -1,16 +1,13 @@
{{if s3.debug:}}
<link href="/{{=appname}}/static/styles/bootstrap/bootstrap.css" rel="stylesheet">
-<!--<link href="/{{=appname}}/static/styles/bootstrap/bootstrap-responsive.css" rel="stylesheet">-->
+<link href="/{{=appname}}/static/styles/bootstrap/bootstrap-responsive.css" rel="stylesheet">
<link href="/{{=appname}}/static/styles/bootstrap/font-awesome.css" rel="stylesheet">
{{else:}}
{{if s3.cdn:}}
<!-- For Sites Hosted on the Public Internet, using a CDN will provide better performance -->
-<!--<link href="//netdna.bootstrapcdn.com/twitter-bootstrap/2.3.1/css/bootstrap-combined.min.css" rel="stylesheet">-->
-<link href="/{{=appname}}/static/styles/bootstrap/bootstrap.css" rel="stylesheet">
+<link href="//netdna.bootstrapcdn.com/twitter-bootstrap/2.3.1/css/bootstrap-combined.min.css" rel="stylesheet">
<link href="//netdna.bootstrapcdn.com/font-awesome/3.0.2/css/font-awesome.css" rel="stylesheet">
{{else:}}
-<!--<link href="/{{=appname}}/static/styles/bootstrap/bootstrap-combined.min.css" rel="stylesheet">-->
-<link href="/{{=appname}}/static/styles/bootstrap/bootstrap.css" rel="stylesheet">
-<link href="/{{=appname}}/static/styles/bootstrap/font-awesome.css" rel="stylesheet">
+<link href="/{{=appname}}/static/styles/bootstrap/bootstrap-combined.min.css" rel="stylesheet">
{{pass}}
{{pass}}
Please sign in to comment.
Something went wrong with that request. Please try again.