Skip to content

Commit

Permalink
Fixed all estimates for the pass ration in days
Browse files Browse the repository at this point in the history
  • Loading branch information
canimus committed Mar 17, 2024
1 parent e51569d commit eb32c98
Show file tree
Hide file tree
Showing 7 changed files with 21 additions and 21 deletions.
6 changes: 3 additions & 3 deletions test/unit/pyspark_dataframe/test_is_on_friday.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def test_negative(spark):
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 1.0
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1


def test_parameters(spark):
Expand All @@ -38,7 +38,7 @@ def test_coverage(spark):
check = Check(CheckLevel.WARNING, "pytest")
check.is_on_friday("date", 0.2)
rs = check.validate(df)
assert rs.first().status == "PASS"
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 0.2
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1
6 changes: 3 additions & 3 deletions test/unit/pyspark_dataframe/test_is_on_monday.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_negative(spark):
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 1.0
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1


def test_parameters(spark):
Expand All @@ -39,7 +39,7 @@ def test_coverage(spark):
check = Check(CheckLevel.WARNING, "pytest")
check.is_on_monday("date", 0.2)
rs = check.validate(df)
assert rs.first().status == "PASS"
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 0.2
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1
6 changes: 3 additions & 3 deletions test/unit/pyspark_dataframe/test_is_on_saturday.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_negative(spark):
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 1.0
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1


def test_parameters(spark):
Expand All @@ -39,7 +39,7 @@ def test_coverage(spark):
check = Check(CheckLevel.WARNING, "pytest")
check.is_on_saturday("date", 0.2)
rs = check.validate(df)
assert rs.first().status == "PASS"
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 0.2
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1
6 changes: 3 additions & 3 deletions test/unit/pyspark_dataframe/test_is_on_sunday.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_negative(spark):
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 1.0
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1


def test_parameters(spark):
Expand All @@ -39,7 +39,7 @@ def test_coverage(spark):
check = Check(CheckLevel.WARNING, "pytest")
check.is_on_sunday("date", 0.2)
rs = check.validate(df)
assert rs.first().status == "PASS"
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 0.2
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1
6 changes: 3 additions & 3 deletions test/unit/pyspark_dataframe/test_is_on_thursday.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_negative(spark):
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 1.0
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1


def test_parameters(spark):
Expand All @@ -39,7 +39,7 @@ def test_coverage(spark):
check = Check(CheckLevel.WARNING, "pytest")
check.is_on_thursday("date", 0.2)
rs = check.validate(df)
assert rs.first().status == "PASS"
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 0.2
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1
6 changes: 3 additions & 3 deletions test/unit/pyspark_dataframe/test_is_on_tuesday.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_negative(spark):
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 1.0
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1


def test_parameters(spark):
Expand All @@ -39,7 +39,7 @@ def test_coverage(spark):
check = Check(CheckLevel.WARNING, "pytest")
check.is_on_tuesday("date", 0.2)
rs = check.validate(df)
assert rs.first().status == "PASS"
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 0.2
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1
6 changes: 3 additions & 3 deletions test/unit/pyspark_dataframe/test_is_on_wednesday.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_negative(spark):
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 1.0
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1


def test_parameters(spark):
Expand All @@ -39,7 +39,7 @@ def test_coverage(spark):
check = Check(CheckLevel.WARNING, "pytest")
check.is_on_wednesday("DATE", 0.2)
rs = check.validate(df)
assert rs.first().status == "PASS"
assert rs.first().status == "FAIL"
assert rs.first().violations == 8
assert rs.first().pass_threshold == 0.2
assert rs.first().pass_rate >= 0.2
assert rs.first().pass_rate >= 0.1

0 comments on commit eb32c98

Please sign in to comment.