Skip to content

Commit

Permalink
Version 1.1.0
Browse files Browse the repository at this point in the history
  • Loading branch information
root authored and root committed Apr 2, 2019
1 parent cd01a6a commit 493d942
Show file tree
Hide file tree
Showing 40 changed files with 3,697 additions and 40 deletions.
10 changes: 9 additions & 1 deletion pibiapp/config/desktop.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,13 @@ def get_data():
"type": "module",
"label": _("External Data"),
"hidden": 1
}
},
{
"module_name": "Redash",
"color": "grey",
"icon": "octicon octicon-pulse",
"type": "module",
"label": _("Redash"),
"hidden": 1
}
]
5 changes: 5 additions & 0 deletions pibiapp/config/external_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@ def get_data():
"name": "Ext Data Source",
"description": _("Load files as new doctype of Frappe"),
},
{
"type": "doctype",
"name": "Change DocType Empty",
"description": _("Change the fields of an external data DocType while it is empty"),
},
{
"type": "doctype",
"name": "Successive loads",
Expand Down
26 changes: 26 additions & 0 deletions pibiapp/config/redash.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from __future__ import unicode_literals
from frappe import _

def get_data():
return [
{
"label": _("Dashboards"),
"items": [
{
"type": "doctype",
"name": "Redash Business Intelligence",
"description": _("It shows the dashboards designed in Redash and enabled in Frappe"),
}
]
},
{
"label": _("Settings"),
"items": [
{
"type": "doctype",
"name": "Redash Dashboards",
"description": _("List of dashboards and viewing permissions"),
}
]
}
]
16 changes: 0 additions & 16 deletions pibiapp/config/xlsdata.py

This file was deleted.

201 changes: 191 additions & 10 deletions pibiapp/external_data/data_manage.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Dolores Juliana Fdez Martin
# License: GNU General Public License v3. See license.txt
#
Expand All @@ -18,11 +19,22 @@

from __future__ import unicode_literals
import frappe, ast
from frappe import _
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
from frappe.utils.csvutils import read_csv_content
import six
from six import text_type, string_types
from datetime import date, datetime
from frappe.utils import nowdate
from frappe.utils.dateutils import parse_date
from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_url_to_form
from xml.etree import ElementTree as ET
import copy

def force_to_unicode(text):
if text == None: text = " "
resp = text.encode('ascii', 'ignore').decode('ascii')
return resp

def loaddata(self, method=None):
ext_rows = readfile(file_url=self.import_file, data_format=self.data_format)
Expand All @@ -36,7 +48,7 @@ def loaddata(self, method=None):
numrecords = addrecords(name=self.name, datarows=datarows)
message = str(numrecords) + ' records loaded in the DocType ' + self.name
frappe.publish_realtime('msgprint', message )

def reloaddata(self, method=None):
ext_rows = readfile(file_url=self.import_file, data_format=self.data_format)
row_stop = 0 if self.row_end == 0 else self.row_start + self.row_end - 1
Expand Down Expand Up @@ -105,6 +117,8 @@ def analyzedata(row_labels, row_start, ext_rows, row_stop=None, newdata=False, n
xrow = 0
tt = []
labels = []
tmp_list = []
fields = []
lengths = []
datatypes = []
datamandatory = []
Expand All @@ -117,19 +131,36 @@ def analyzedata(row_labels, row_start, ext_rows, row_stop=None, newdata=False, n
continue
if row_stop and row_stop > 0 and xrow > row_stop:
break
# Row empty?
if columns > 0:
rowempty = 1
i = 0
for cell in row:
if i >= columns: break
if cell and str(cell).strip() != "":
rowempty = 0
break
i += 1
if rowempty == 1: continue

tmp_list = []
i = 0
for cell in row:
# Label empty END
if columns == 0 and (cell == None or cell == ""): break
if columns > 0 and i >= columns: break

if isinstance(cell, six.string_types):
if columns == 0:
tmp_list.append(cell.replace('"',''))
tmp_list.append(cell.replace('"','').replace('.','').replace(',',''))
else: tmp_list.append(cell.strip().replace('"','').encode('utf8'))
else: tmp_list.append(cell)
else:
tmp_list.append(cell)
if newdata and columns > 0 and i <= columns :
x = 0
if isinstance(cell, six.string_types): x = len(cell)
lengths[i] = max( x, lengths[i])
if datatypes[i] == "Data":
if datatypes[i] == "PTE":
if lengths[i] > 140:
datatypes[i] = "Small Text"
else:
Expand All @@ -138,32 +169,49 @@ def analyzedata(row_labels, row_start, ext_rows, row_stop=None, newdata=False, n
if isinstance(cell, float): datatypes[i] = "Float"
if isinstance(cell, datetime): datatypes[i] = "Datetime"
if isinstance(cell, date): datatypes[i] = "Date"
if isinstance(cell, str): datatypes[i] = "Data"
else:
if cell and cell != "" and cell != None and cell != 0 and cell != "0":
if lengths[i] > 140: datatypes[i] = "Small Text"
if datatypes[i] == "Int" and not isinstance(cell, int) and not isinstance(cell, long): datatypes[i] = "Data"
if datatypes[i] == "Float" and not isinstance(cell, float): datatypes[i] = "Data"
if datatypes[i] == "Date" and not isinstance(cell, date): datatypes[i] = "Data"
if datatypes[i] == "Datetime" and not isinstance(cell, datetime): datatypes[i] = "Data"
if not cell or cell == "" or cell == None:
if datamandatory[i] == 1: datamandatory[i] = 0
if datatypes[i] == "Data" and datamandatory[i] == 1:
if datatypes[i] == "Data" and datamandatory[i] == 1 and isinstance(cell, str):
if not cell.strip() in datavalues[i]:
datavalues[i].append(cell.strip())
i += 1
if xrow == row_labels:
labels = tmp_list
columns = len(labels)
fields = tmp_list
columns = len(tmp_list)
labels = copy.deepcopy(tmp_list)
i = 1
while i <= columns :
lengths.append(0)
datatypes.append("Data")
datatypes.append("PTE")
datamandatory.append(1)
datavalues.append([])
j = i - 1
labels[j] = force_to_unicode(labels[j])
fields[j] = force_to_unicode(fields[j])
fields[j] = str(fields[j]).lower().replace(" ", "_")
if fields[j] == "_":
fields[j] = "column_" + str(i)
labels[j] = "Column " + str(i)
i += 1
datarows.append(fields)
# template diferent
if not newdata:
meta = frappe.get_meta(name)
i = 1
while i <= columns :
j = i - 1
df = meta.get_field(fields[j])
if not df:
frappe.throw(_("The fields of the file you are trying to load do not correspond to the fields in the initial template file"))
i += 1
else:
datarows.append(tmp_list)
if newdata:
Expand All @@ -177,6 +225,7 @@ def analyzedata(row_labels, row_start, ext_rows, row_stop=None, newdata=False, n
datavalues[i] = listdv
else:
datavalues[i] = ""
if datatypes[i] == "PTE": datatypes[i] = "Data"
i += 1
adddoctype(name, module, fields, labels, datatypes, datamandatory, datavalues)
return datarows
Expand All @@ -189,6 +238,7 @@ def adddoctype(name, module, fields, labels, datatypes, datamandatory, datavalue
"name": name,
"quick_entry": 0,
"custom": 1 })

i = 0
for cell in labels:
doc_field = frappe.get_doc({
Expand All @@ -203,6 +253,19 @@ def adddoctype(name, module, fields, labels, datatypes, datamandatory, datavalue
"in_list_view": 1 if i < 3 else 0})
doc.append ("fields", doc_field)
i += 1

doc_field = frappe.get_doc({
"doctype": "DocField",
"label": "Row Original",
"fieldtype": "Text Editor",
"fieldname": "row_original",
"reqd": 0,
"in_standard_filter": 0,
"search_index": 0,
"options": "",
"in_list_view": 0,
"hidden": 1})
doc.append ("fields", doc_field)

if roles == None:
roles = ["System Manager", "Administrator"]
Expand All @@ -214,7 +277,9 @@ def adddoctype(name, module, fields, labels, datatypes, datamandatory, datavalue
doc.insert(ignore_permissions=True)

def addrecords(name, datarows, limit=65000):
meta = frappe.get_meta(name)
columns = 0
x = 0
j = 0
for row in datarows:
if j >= limit: break
Expand All @@ -226,10 +291,126 @@ def addrecords(name, datarows, limit=65000):
i = 0
for cell in row:
if i >= columns: break
df = meta.get_field(fields[i])
fieldtype = df.fieldtype if df else "Data"
if fieldtype in ("Int", "Check"):
cell = cint(cell)
elif fieldtype in ("Float", "Currency", "Percent"):
cell = flt(cell)
elif fieldtype == "Date":
if cell and isinstance(cell, datetime):
cell = str(cell)
if cell and isinstance(cell, string_types):
cell = getdate(parse_date(cell))
elif fieldtype == "Datetime":
if cell:
if " " in cell:
_date, _time = cell.split()
else:
_date, _time = cell, '00:00:00'
_date = parse_date(cell)
cell = get_datetime(_date + " " + _time)
else:
cell = None
elif fieldtype in ("Link", "Dynamic Link", "Data") and cell:
cell = cstr(cell)

datadoc.setdefault(fields[i], cell)
i += 1

row_original = str(datadoc)
xduplicates = frappe.get_list(name, filters={'row_original': row_original}, fields=['name'])
j += 1
if len(xduplicates) > 0:
x += 1
continue
datadoc = conversionrules(datadoc)
datadoc.setdefault("row_original", row_original)
doc = frappe.get_doc(datadoc)
doc.insert(ignore_permissions=True)
j += 1
return j
return j - x

def conversionrules(doc, conversion_type='During loading'):
rules = frappe.get_all("Conversion Rules",
filters={"reference_doctype": doc['doctype'], "conversion_type": conversion_type},
fields=["origin_field", "action", "receiver_field"],
order_by = 'execution_order')
for rule in rules:
x = doc.get(rule.origin_field)
y = executeaction(x, rule.action)
if doc[rule.receiver_field]: doc[rule.receiver_field] = y
else: doc.setdefault(rule.receiver_field, y)
return doc

def executeaction(x, action, param1=None, param2=None):
if not x or x == None: return x
xact = ['Convert to Uppercase','Convert to Lowercase','Convert First Letter to Uppercase',
'Remove White Character from Beginning and End','Replace character or string (All)',
'Replace character or string (the First one)','Replace character or string (the Last one)']
i = xact.index(action)
if i == 0: return x.upper()
if i == 1: return x.lower()
if i == 2: return (x[0].upper() + x[1:].lower())
if i == 3: return x.strip()
if i == 4 and param1 != None and param2 != None: return x.replace(param1, param2)
if i == 5 and param1 != None and param2 != None: return x.replace(param1, param2, 1)

return x

def changedoctype(self, method=None):
if not frappe.db.exists('DocType', self.reference_doctype):
return
if not frappe.db.table_exists(self.name) and not self.docfield:
return
allfields = frappe.get_list('Change DocField', filters={'parent': self.name}, fields="*")
keydata = ["label","fieldtype","reqd","search_index","in_list_view","in_standard_filter","options","default","length","in_global_search","allow_in_quick_entry","bold","description"]
for onefield in allfields:
docname = frappe.get_list('DocField',
filters={'parent': self.reference_doctype, 'fieldname': onefield.fieldname },
fields=["name"])
doc = frappe.get_doc('DocField', docname[0]['name'])
for onekey in keydata:
if doc.get(onekey) != onefield.get(onekey):
setattr(doc,onekey,onefield.get(onekey))
doc.save()

def doctype_query(doctype, txt, searchfield, start, page_len, filters, as_dict=False):
return frappe.db.sql("""select eds.name
from `tabExt Data Source` eds
INNER JOIN tabDocType dt on eds.name = dt.name
""",{
"today": nowdate(),
"txt": "%%%s%%" % txt,
"_txt": txt.replace("%", ""),
"start": start,
"page_len": page_len
}, as_dict=as_dict)

@frappe.whitelist()
def deletedata(doctype):
numrecords = frappe.db.count(doctype)
if not numrecords or numrecords == 0:
frappe.throw(_("{0} is empty contains {1} records.").format(doctype,numrecords))
if not frappe.db.exists('DocType', doctype):
frappe.throw(_("{0} is not a doctype of the database.").format(doctype))
if not frappe.db.exists('Ext Data Source', doctype):
frappe.throw(_("{0} is not a External Data.").format(doctype))

il = frappe.get_all(doctype, fields=['name'])
failed = []
i = 0
for dd in il:
d = dd.name
try:
frappe.delete_doc(doctype, d)
if numrecords >= 5:
frappe.publish_realtime("progress",
dict(progress=[i+1, numrecords], title=_('Deleting {0}').format(doctype), description=d),
user=frappe.session.user)
i += 1
except Exception:
failed.append(d)

message = str(numrecords - len(failed)) + ' records loaded in the DocType ' + doctype
frappe.publish_realtime('msgprint', message )
return failed
Empty file.

0 comments on commit 493d942

Please sign in to comment.