Skip to content
This repository has been archived by the owner on Nov 22, 2022. It is now read-only.

Commit

Permalink
Fix return type for DataFrame.groupBy / cube / rollup
Browse files Browse the repository at this point in the history
  • Loading branch information
zero323 committed Nov 11, 2018
1 parent eb87b0e commit e2d225f
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions third_party/3/pyspark/sql/dataframe.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ from py4j.java_gateway import JavaObject # type: ignore
from pyspark.sql._typing import ColumnOrName, Literal, LiteralType
from pyspark.sql.types import *
from pyspark.sql.context import SQLContext
from pyspark.sql.group import GroupedData
from pyspark.sql.readwriter import DataFrameWriter
from pyspark.sql.streaming import DataStreamWriter
from pyspark.sql.column import Column
Expand Down Expand Up @@ -93,17 +94,17 @@ class DataFrame:
def selectExpr(self, *expr: List[str]) -> DataFrame: ...
def filter(self, condition: ColumnOrName) -> DataFrame: ...
@overload
def groupBy(self, *cols: ColumnOrName) -> DataFrame: ...
def groupBy(self, *cols: ColumnOrName) -> GroupedData: ...
@overload
def groupBy(self, __cols: List[ColumnOrName]) -> DataFrame: ...
def groupBy(self, __cols: List[ColumnOrName]) -> GroupedData: ...
@overload
def rollup(self, *cols: ColumnOrName) -> DataFrame: ...
def rollup(self, *cols: ColumnOrName) -> GroupedData: ...
@overload
def rollup(self, __cols: List[ColumnOrName]) -> DataFrame: ...
def rollup(self, __cols: List[ColumnOrName]) -> GroupedData: ...
@overload
def cube(self, *cols: ColumnOrName) -> DataFrame: ...
def cube(self, *cols: ColumnOrName) -> GroupedData: ...
@overload
def cube(self, __cols: List[ColumnOrName]) -> DataFrame: ...
def cube(self, __cols: List[ColumnOrName]) -> GroupedData: ...
def agg(self, *exprs: Union[Column, Dict[str, str]]) -> DataFrame: ...
def union(self, other: DataFrame) -> DataFrame: ...
def unionAll(self, other: DataFrame) -> DataFrame: ...
Expand Down

0 comments on commit e2d225f

Please sign in to comment.