Skip to content
This repository has been archived by the owner on Feb 2, 2024. It is now read-only.

Commit

Permalink
Merge 7608176 into 3654a22
Browse files Browse the repository at this point in the history
  • Loading branch information
shssf committed Aug 11, 2019
2 parents 3654a22 + 7608176 commit 9e7b363
Show file tree
Hide file tree
Showing 8 changed files with 153 additions and 127 deletions.
112 changes: 68 additions & 44 deletions hpat/distributed.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,70 @@
from __future__ import print_function, division, absolute_import

import operator
import types as pytypes # avoid confusion with numba.types
import copy
import warnings
from collections import defaultdict
import numpy as np

import numba
from numba import (ir, types, typing, config, numpy_support,
ir_utils, postproc)
from numba.ir_utils import (mk_unique_var, replace_vars_inner, find_topo_order,
dprint_func_ir, remove_dead, mk_alloc,
get_global_func_typ, get_name_var_table,
get_call_table, get_tuple_table, remove_dels,
compile_to_numba_ir, replace_arg_nodes,
guard, get_definition, require, GuardException,
find_callname, build_definitions,
find_build_sequence, find_const, is_get_setitem)
from numba import ir, types, typing, config, numpy_support, ir_utils, postproc
from numba.ir_utils import (
mk_unique_var,
replace_vars_inner,
find_topo_order,
dprint_func_ir,
remove_dead,
mk_alloc,
get_global_func_typ,
get_name_var_table,
get_call_table,
get_tuple_table,
remove_dels,
compile_to_numba_ir,
replace_arg_nodes,
guard,
get_definition,
require,
GuardException,
find_callname,
build_definitions,
find_build_sequence,
find_const,
is_get_setitem)
from numba.inline_closurecall import inline_closure_call
from numba.typing import signature
from numba.parfor import (get_parfor_reductions, get_parfor_params,
wrap_parfor_blocks, unwrap_parfor_blocks)
from numba.parfor import Parfor, lower_parfor_sequential
import numpy as np
from numba.parfor import (
Parfor,
lower_parfor_sequential,
get_parfor_reductions,
get_parfor_params,
wrap_parfor_blocks,
unwrap_parfor_blocks)

import hpat
import hpat.utils
from hpat import distributed_api, distributed_lower
from hpat.io.pio_api import h5file_type, h5group_type
from hpat import (distributed_api,
distributed_lower) # import lower for module initialization
from hpat.str_ext import string_type
from hpat.str_arr_ext import string_array_type
from hpat.distributed_analysis import (Distribution,
DistributedAnalysis)

# from mpi4py import MPI
import hpat.utils
from hpat.utils import (is_alloc_callname, is_whole_slice, is_array_container,
get_slice_step, is_array, is_np_array, find_build_tuple,
debug_prints, ReplaceFunc, gen_getitem, is_call,
is_const_slice)
from hpat.distributed_api import Reduce_Type
from hpat.distributed_analysis import Distribution, DistributedAnalysis
from hpat.utils import (
is_alloc_callname,
is_whole_slice,
is_array_container,
get_slice_step,
is_array,
is_np_array,
find_build_tuple,
debug_prints,
ReplaceFunc,
gen_getitem,
is_call,
is_const_slice)
from hpat.hiframes.pd_dataframe_ext import DataFrameType


distributed_run_extensions = {}

# analysis data for debugging
Expand Down Expand Up @@ -770,10 +794,9 @@ def f(arr):
assign.value = rhs.args[0]
return [assign]

if (fdef == ('get_series_data',
'hpat.hiframes.api') or fdef == ('get_series_index',
'hpat.hiframes.api') or fdef == ('get_dataframe_data',
'hpat.hiframes.pd_dataframe_ext')):
if ((fdef == ('get_series_data', 'hpat.hiframes.api')
or fdef == ('get_series_index', 'hpat.hiframes.api')
or fdef == ('get_dataframe_data', 'hpat.hiframes.pd_dataframe_ext'))):
out = [assign]
arr = assign.target
# gen len() using 1D_Var reduce approach.
Expand Down Expand Up @@ -829,8 +852,10 @@ def _run_call_np(self, lhs, func_name, assign, args):
"""transform np.func() calls
"""
# allocs are handled separately
assert not ((self._is_1D_Var_arr(lhs) or self._is_1D_arr(lhs)) and func_name in hpat.utils.np_alloc_callnames), (
"allocation calls handled separately 'empty', 'zeros', 'ones', 'full' etc.")
is_1D_bool = (self._is_1D_Var_arr(lhs) or self._is_1D_arr(lhs))
err_str = "allocation calls handled separately 'empty', 'zeros', 'ones', 'full' etc."
assert not (is_1D_bool and func_name in hpat.utils.np_alloc_callnames), err_str

out = [assign]
scope = assign.target.scope
loc = assign.loc
Expand Down Expand Up @@ -1075,9 +1100,9 @@ def f(cond):

def _fix_parallel_df_index(self, df):
def f(df): # pragma: no cover
l = len(df)
start = hpat.distributed_api.dist_exscan(l)
ind = np.arange(start, start + l)
length = len(df)
start = hpat.distributed_api.dist_exscan(length)
ind = np.arange(start, start + length)
df2 = hpat.hiframes.pd_dataframe_ext.set_df_index(df, ind)
return df2

Expand Down Expand Up @@ -1459,9 +1484,8 @@ def f(arr, dim1): # pragma: no cover
def _run_getsetitem(self, arr, index_var, node, full_node):
out = [full_node]
# 1D_Var arrays need adjustment for 1D_Var parfors as well
if ((self._is_1D_arr(arr.name) or
(self._is_1D_Var_arr(arr.name) and arr.name in self._array_starts))
and (arr.name, index_var.name) in self._parallel_accesses):
if ((self._is_1D_arr(arr.name) or (self._is_1D_Var_arr(arr.name) and arr.name in self._array_starts))
and ((arr.name, index_var.name) in self._parallel_accesses)):
scope = index_var.scope
loc = index_var.loc
#ndims = self._get_arr_ndim(arr.name)
Expand Down Expand Up @@ -2154,18 +2178,18 @@ def _get_arr_ndim(self, arrname):
def _is_1D_arr(self, arr_name):
# some arrays like stencil buffers are added after analysis so
# they are not in dists list
return (
arr_name in self._dist_analysis.array_dists and self._dist_analysis.array_dists[arr_name] == Distribution.OneD)
return ((arr_name in self._dist_analysis.array_dists
and self._dist_analysis.array_dists[arr_name] == Distribution.OneD))

def _is_1D_Var_arr(self, arr_name):
# some arrays like stencil buffers are added after analysis so
# they are not in dists list
return (
arr_name in self._dist_analysis.array_dists and self._dist_analysis.array_dists[arr_name] == Distribution.OneD_Var)
return ((arr_name in self._dist_analysis.array_dists
and self._dist_analysis.array_dists[arr_name] == Distribution.OneD_Var))

def _is_REP(self, arr_name):
return (
arr_name not in self._dist_analysis.array_dists or self._dist_analysis.array_dists[arr_name] == Distribution.REP)
return ((arr_name not in self._dist_analysis.array_dists
or self._dist_analysis.array_dists[arr_name] == Distribution.REP))


def _find_first_print(body):
Expand Down
10 changes: 6 additions & 4 deletions hpat/distributed_api.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,21 @@
import time
from enum import Enum
import llvmlite.binding as ll
import operator
import numpy as np

import numba
from numba import types
from numba.typing.templates import infer_global, AbstractTemplate, infer
from numba.typing import signature
from numba.extending import models, register_model, intrinsic, overload
from numba.typing import signature
from numba.typing.templates import infer_global, AbstractTemplate, infer

import hpat
from hpat import config
from hpat.str_arr_ext import (string_array_type, num_total_chars, StringArray,
pre_alloc_string_array, get_offset_ptr,
get_data_ptr, convert_len_arr_to_offset)
from hpat.utils import (debug_prints, empty_like_type, _numba_to_c_type_map, unliteral_all)
import time

if hpat.config.config_transport_mpi:
from . import transport_mpi as transport
Expand Down Expand Up @@ -428,7 +429,8 @@ def alltoallv_tup_overload(send_data, out_data, send_counts, recv_counts, send_d

func_text = "def f(send_data, out_data, send_counts, recv_counts, send_disp, recv_disp):\n"
for i in range(count):
func_text += " alltoallv(send_data[{}], out_data[{}], send_counts, recv_counts, send_disp, recv_disp)\n".format(i, i)
func_text += " alltoallv(send_data[{}], out_data[{}],\n".format(i, i)
func_text += " send_counts, recv_counts, send_disp, recv_disp)\n"
func_text += " return\n"

loc_vars = {}
Expand Down
23 changes: 12 additions & 11 deletions hpat/distributed_lower.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,24 @@
import time
import atexit
from numba.typing.templates import infer_global, AbstractTemplate
import sys
from numba.typing import signature
import llvmlite.binding as ll
import operator
import numpy as np
from llvmlite import ir as lir
import llvmlite.binding as ll

import numba.targets.arrayobj
from numba import types, cgutils
from numba.targets.imputils import lower_builtin
from numba.targets.arrayobj import make_array
from numba.extending import overload
import numba.targets.arrayobj
from numba.targets.imputils import impl_ret_new_ref, impl_ret_borrowed
from numba.targets.imputils import lower_builtin, impl_ret_new_ref, impl_ret_borrowed
from numba.targets.arrayobj import make_array
from numba.typing import signature
from numba.typing.templates import infer_global, AbstractTemplate
from numba.typing.builtins import IndexValueType
import numpy as np

import hpat
from hpat import distributed_api
from hpat.utils import _numba_to_c_type_map
from hpat.distributed_api import mpi_req_numba_type, ReqArrayType, req_array_type
import time
from llvmlite import ir as lir
from . import hdist

if hpat.config.config_transport_mpi:
Expand Down Expand Up @@ -569,7 +570,7 @@ def dist_permutation_array_index(lhs, lhs_len, dtype_size, rhs, p, p_len):
permutation_array_index(lhs.ctypes, lhs_len, elem_size, c_rhs.ctypes,
p.ctypes, p_len)

########### finalize MPI when exiting ####################
# ********* finalize MPI when exiting ********************


def hpat_finalize():
Expand Down
12 changes: 7 additions & 5 deletions hpat/io/pio_api.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
import operator
import numpy as np
from llvmlite import ir as lir

import numba
from numba import types, cgutils
from numba.typing.templates import infer_global, AbstractTemplate, AttributeTemplate, bound_function
from numba.typing import signature
from llvmlite import ir as lir
from numba.typing.templates import infer_global, AbstractTemplate, AttributeTemplate, bound_function
from numba.extending import register_model, models, infer_getattr, infer, intrinsic
from hpat.str_ext import string_type

import hpat
from hpat.utils import unliteral_all
import hpat.io
from hpat.str_ext import string_type
from hpat.utils import unliteral_all

if hpat.config._has_h5py:
import h5py
Expand All @@ -18,7 +20,7 @@
ll.add_symbol('hpat_h5_read_filter', _hdf5.hpat_h5_read_filter)


################## Types #######################
# **************** Types ***********************

class H5FileType(types.Opaque):
def __init__(self):
Expand Down
38 changes: 20 additions & 18 deletions hpat/io/xenon_ext.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import numpy as np
from llvmlite import ir as lir
import llvmlite.binding as ll

import numba
from numba import ir, ir_utils, types
from numba import cgutils, ir, ir_utils, types
from numba.ir_utils import (mk_unique_var, replace_vars_inner, find_topo_order,
dprint_func_ir, remove_dead, mk_alloc, remove_dels,
get_name_var_table, replace_var_names,
Expand All @@ -8,21 +12,15 @@
find_callname, guard, require, get_definition,
build_definitions, replace_vars_stmt, replace_vars_inner)

from llvmlite import ir as lir
import llvmlite.binding as ll
from numba.targets.imputils import impl_ret_new_ref
from numba.extending import lower_builtin, overload, intrinsic, register_model, models
from numba.typing import signature
from numba import cgutils
from numba.targets.imputils import lower_builtin
from numba.targets.imputils import impl_ret_new_ref, lower_builtin
from numba.targets.arrayobj import make_array

import numpy as np
import hpat
from hpat.utils import get_constant, NOT_CONSTANT
from hpat.str_ext import string_type, unicode_to_char_ptr
from hpat.str_arr_ext import StringArray, StringArrayPayloadType, construct_string_array
from hpat.str_arr_ext import string_array_type
from hpat.str_arr_ext import StringArray, StringArrayPayloadType, construct_string_array, string_array_type


def remove_xenon(rhs, lives, call_list):
Expand Down Expand Up @@ -170,18 +168,16 @@ def get_column_read_nodes(c_type, cvar, xe_connect_var, xe_dset_var, i, schema_a

loc = cvar.loc

func_text = (
'def f(xe_connect_var, xe_dset_var, schema_arr):\n col_size = get_column_size_xenon(xe_connect_var, xe_dset_var, {})\n'. format(i))
func_text = ('def f(xe_connect_var, xe_dset_var, schema_arr):\n')
func_text = (' col_size = get_column_size_xenon(xe_connect_var, xe_dset_var, {})\n'. format(i))
# func_text += ' print(col_size)\n'
# generate strings differently since upfront allocation is not possible
if c_type == string_array_type:
# pass size for easier allocation and distributed analysis
func_text += ' column = read_xenon_str(xe_connect_var, xe_dset_var, {}, col_size, schema_arr)\n'.format(
i)
func_text += ' column = read_xenon_str(xe_connect_var, xe_dset_var, {}, col_size, schema_arr)\n'.format(i)
else:
el_type = get_element_type(c_type.dtype)
func_text += ' column = np.empty(col_size, dtype=np.{})\n'.format(
el_type)
func_text += ' column = np.empty(col_size, dtype=np.{})\n'.format(el_type)
func_text += ' status = read_xenon_col(xe_connect_var, xe_dset_var, {}, column, schema_arr)\n'.format(i)
loc_vars = {}
exec(func_text, {}, loc_vars)
Expand Down Expand Up @@ -271,9 +267,15 @@ def __init__(self):
register_model(XeDSetType)(models.OpaqueModel)

get_column_size_xenon = types.ExternalFunction(
"get_column_size_xenon", types.int64(
xe_connect_type, xe_dset_type, types.intp))
# read_xenon_col = types.ExternalFunction("c_read_xenon", types.void(string_type, types.intp, types.voidptr, types.CPointer(types.int64)))
"get_column_size_xenon", types.int64(xe_connect_type, xe_dset_type, types.intp))
# read_xenon_col = types.ExternalFunction(
# "c_read_xenon",
# types.void(
# string_type,
# types.intp,
# types.voidptr,
# types.CPointer(
# types.int64)))
xe_connect = types.ExternalFunction("c_xe_connect", xe_connect_type(types.voidptr))
xe_open = types.ExternalFunction("c_xe_open", xe_dset_type(xe_connect_type, types.voidptr))
xe_close = types.ExternalFunction("c_xe_close", types.void(xe_connect_type, xe_dset_type))
Expand Down
Loading

0 comments on commit 9e7b363

Please sign in to comment.