@@ -767,19 +767,23 @@ def _index_with_as_index(self, b):
767767 new .names = gp .names + original .names
768768 return new
769769
770- def _try_cast (self , result , obj ):
770+ def _try_cast (self , result , obj , numeric_only = False ):
771771 """
772772 try to cast the result to our obj original type,
773773 we may have roundtripped thru object in the mean-time
774774
775+ if numeric_only is True, then only try to cast numerics
776+ and not datetimelikes
777+
775778 """
776779 if obj .ndim > 1 :
777780 dtype = obj .values .dtype
778781 else :
779782 dtype = obj .dtype
780783
781784 if not is_scalar (result ):
782- result = _possibly_downcast_to_dtype (result , dtype )
785+ if numeric_only and is_numeric_dtype (dtype ) or not numeric_only :
786+ result = _possibly_downcast_to_dtype (result , dtype )
783787
784788 return result
785789
@@ -830,7 +834,7 @@ def _python_agg_general(self, func, *args, **kwargs):
830834 for name , obj in self ._iterate_slices ():
831835 try :
832836 result , counts = self .grouper .agg_series (obj , f )
833- output [name ] = self ._try_cast (result , obj )
837+ output [name ] = self ._try_cast (result , obj , numeric_only = True )
834838 except TypeError :
835839 continue
836840
@@ -1117,7 +1121,11 @@ def sem(self, ddof=1):
11171121 @Appender (_doc_template )
11181122 def size (self ):
11191123 """Compute group sizes"""
1120- return self .grouper .size ()
1124+ result = self .grouper .size ()
1125+
1126+ if isinstance (self .obj , Series ):
1127+ result .name = getattr (self , 'name' , None )
1128+ return result
11211129
11221130 sum = _groupby_function ('sum' , 'add' , np .sum )
11231131 prod = _groupby_function ('prod' , 'prod' , np .prod )
@@ -1689,7 +1697,9 @@ def size(self):
16891697 ids , _ , ngroup = self .group_info
16901698 ids = _ensure_platform_int (ids )
16911699 out = np .bincount (ids [ids != - 1 ], minlength = ngroup or None )
1692- return Series (out , index = self .result_index , dtype = 'int64' )
1700+ return Series (out ,
1701+ index = self .result_index ,
1702+ dtype = 'int64' )
16931703
16941704 @cache_readonly
16951705 def _max_groupsize (self ):
@@ -2908,7 +2918,8 @@ def transform(self, func, *args, **kwargs):
29082918 result = concat (results ).sort_index ()
29092919
29102920 # we will only try to coerce the result type if
2911- # we have a numeric dtype
2921+ # we have a numeric dtype, as these are *always* udfs
2922+ # the cython take a different path (and casting)
29122923 dtype = self ._selected_obj .dtype
29132924 if is_numeric_dtype (dtype ):
29142925 result = _possibly_downcast_to_dtype (result , dtype )
0 commit comments