Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion python/pyspark/sql/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
from pyspark.sql.context import SQLContext, HiveContext, UDFRegistration, UDTFRegistration
from pyspark.sql.session import SparkSession
from pyspark.sql.column import Column
from pyspark.sql.catalog import Catalog
from pyspark.sql.catalog import Catalog, Reference
from pyspark.sql.dataframe import DataFrame, DataFrameNaFunctions, DataFrameStatFunctions
from pyspark.sql.group import GroupedData
from pyspark.sql.observation import Observation
Expand All @@ -52,6 +52,7 @@
from pyspark.sql.pandas.group_ops import PandasCogroupedOps
from pyspark.sql.utils import is_remote

ref = Reference()

__all__ = [
"SparkSession",
Expand All @@ -74,4 +75,5 @@
"DataFrameWriterV2",
"PandasCogroupedOps",
"is_remote",
"ref",
]
12 changes: 12 additions & 0 deletions python/pyspark/sql/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,18 @@
from pyspark.sql.types import DataType


class Reference(str):
def __init__(self, prefix: str = ""):
object.__setattr__(self, "_prefix", prefix)

def __getattr__(self, key: str) -> "Reference":
prefix = object.__getattribute__(self, "_prefix")
if prefix:
return Reference(f"{prefix}.{key}")
else:
return Reference(key)


class CatalogMetadata(NamedTuple):
name: str
description: Optional[str]
Expand Down