This repository has been archived by the owner on Nov 22, 2022. It is now read-only.
-
-
Notifications
You must be signed in to change notification settings - Fork 37
/
util.pyi
109 lines (86 loc) · 3.42 KB
/
util.pyi
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# Stubs for pyspark.ml.util (Python 3)
from typing import Any, Dict, Generic, Optional, Type, TypeVar, Union
from pyspark.context import SparkContext
from pyspark.ml.param import Params
from pyspark.sql.context import SQLContext
from pyspark.sql.session import SparkSession
S = TypeVar("S")
R = TypeVar("R", bound=MLReadable)
basestring = Type[str]
unicode = Type[str]
long = Type[int]
class Identifiable:
uid: str
def __init__(self) -> None: ...
class BaseReadWrite:
def __init__(self) -> None: ...
def session(self, sparkSession: SparkSession) -> Union[MLWriter, MLReader]: ...
@property
def sparkSession(self) -> SparkSession: ...
@property
def sc(self) -> SparkContext: ...
class MLWriter(BaseReadWrite):
shouldOverwrite: bool = ...
def __init__(self) -> None: ...
def save(self, path: str) -> None: ...
def saveImpl(self, path: str) -> None: ...
def overwrite(self) -> MLWriter: ...
class GeneralMLWriter(MLWriter):
source: str
def format(self, source: str) -> MLWriter: ...
class JavaMLWriter(MLWriter):
def __init__(self, instance: JavaMLWritable) -> None: ...
def save(self, path: str) -> None: ...
def overwrite(self) -> JavaMLWriter: ...
def option(self, key: str, value: Any) -> JavaMLWriter: ...
def session(self, sparkSession: SparkSession) -> JavaMLWriter: ...
class GeneralJavaMLWriter(JavaMLWriter):
def __init__(self, instance: MLWritable) -> None: ...
def format(self, source: str) -> GeneralJavaMLWriter: ...
class MLWritable:
def write(self) -> MLWriter: ...
def save(self, path: str) -> None: ...
class JavaMLWritable(MLWritable):
def write(self) -> JavaMLWriter: ...
class GeneralJavaMLWritable(JavaMLWritable):
def write(self) -> GeneralJavaMLWriter: ...
class MLReader(BaseReadWrite, Generic[R]):
def load(self, path: str) -> R: ...
class JavaMLReader(MLReader[R]):
def __init__(self, clazz: Type[JavaMLReadable]) -> None: ...
def load(self, path: str) -> R: ...
def session(self, sparkSession: SparkSession) -> JavaMLReader[R]: ...
class MLReadable(Generic[R]):
@classmethod
def read(cls) -> MLReader[R]: ...
@classmethod
def load(cls, path: str) -> R: ...
class JavaMLReadable(MLReadable[R]):
@classmethod
def read(cls) -> JavaMLReader[R]: ...
class DefaultParamsWritable(MLWritable):
def write(self) -> MLWriter: ...
class DefaultParamsWriter(MLWriter):
instance: DefaultParamsWritable
def __init__(self, instance: DefaultParamsWritable) -> None: ...
def saveImpl(self, path: str) -> None: ...
@staticmethod
def saveMetadata(instance: DefaultParamsWritable, path: str, sc: SparkContext, extraMetadata: Optional[Dict[str, Any]] = ..., paramMap: Optional[Dict[str, Any]] = ...) -> None: ...
class DefaultParamsReadable(MLReadable[R]):
@classmethod
def read(cls) -> MLReader[R]: ...
class DefaultParamsReader(MLReader[R]):
cls: Type[R]
def __init__(self, cls: Type[MLReadable]) -> None: ...
def load(self, path: str) -> R: ...
@staticmethod
def loadMetadata(path: str, sc: SparkContext, expectedClassName: str = ...) -> Dict[str, Any]: ...
@staticmethod
def getAndSetParams(instance: R, metadata: Dict[str, Any]) -> None: ...
@staticmethod
def loadParamsInstance(path: str, sc: SparkContext) -> R: ...
class HasTrainingSummary(Generic[S]):
@property
def hasSummary(self) -> bool: ...
@property
def summary(self) -> S: ...