Skip to content

Commit 7e8a018

Browse files
fpomsAndreas Kodewitz
authored andcommitted
Update black version to fix build (snorkel-team#1693)
1 parent f7ca012 commit 7e8a018

File tree

13 files changed

+32
-25
lines changed

13 files changed

+32
-25
lines changed

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ dill>=0.3.0,<0.4.0
4343

4444
#### DEV TOOLS
4545

46-
black>=19.3b0,<20.0
46+
black>=22.3
4747
flake8>=3.7.0,<4.0.0
4848
isort>=4.3.0,<5.0.0
4949
mypy==0.720

snorkel/utils/lr_schedulers.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,5 +40,7 @@ class LRSchedulerConfig(Config):
4040
warmup_unit: str = "batches" # [epochs, batches]
4141
warmup_percentage: float = 0.0 # warm up percentage
4242
min_lr: float = 0.0 # minimum learning rate
43-
exponential_config: ExponentialLRSchedulerConfig = ExponentialLRSchedulerConfig() # type:ignore
43+
exponential_config: ExponentialLRSchedulerConfig = (
44+
ExponentialLRSchedulerConfig() # type:ignore
45+
)
4446
step_config: StepLRSchedulerConfig = StepLRSchedulerConfig() # type:ignore

test/augmentation/apply/test_tf_applier.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,15 +16,15 @@
1616

1717
@transformation_function()
1818
def square(x: DataPoint) -> DataPoint:
19-
x.num = x.num ** 2
19+
x.num = x.num**2
2020
return x
2121

2222

2323
@transformation_function()
2424
def square_returns_none(x: DataPoint) -> DataPoint:
2525
if x.num == 2:
2626
return None
27-
x.num = x.num ** 2
27+
x.num = x.num**2
2828
return x
2929

3030

test/classification/test_classifier_convergence.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def setUpClass(cls):
3232

3333
@pytest.mark.complex
3434
def test_convergence(self):
35-
""" Test multitask classifier convergence with two tasks."""
35+
"""Test multitask classifier convergence with two tasks."""
3636

3737
dataloaders = []
3838

test/classification/test_loss.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def test_sce_equals_ce(self):
2727

2828
ce_loss = F.cross_entropy(Y_probs, Y_golds, reduction="mean")
2929
ces_loss = cross_entropy_with_probs(Y_probs, Y_golds_probs, reduction="mean")
30-
np.testing.assert_equal(ce_loss.numpy(), ces_loss.numpy())
30+
np.testing.assert_almost_equal(ce_loss.numpy(), ces_loss.numpy())
3131

3232
def test_perfect_predictions(self):
3333
# Does soft ce loss achieve approx. 0 loss with perfect predictions?

test/classification/training/test_trainer.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,12 @@ def dict_check(self, dict1, dict2):
250250
if isinstance(dict1_, collections.Mapping):
251251
self.dict_check(dict1_, dict2_)
252252
elif isinstance(dict1_, torch.Tensor):
253-
self.assertTrue(torch.eq(dict1_, dict2_,).all())
253+
self.assertTrue(
254+
torch.eq(
255+
dict1_,
256+
dict2_,
257+
).all()
258+
)
254259
else:
255260
self.assertEqual(dict1_, dict2_)
256261

test/labeling/apply/test_lf_applier.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
@preprocessor()
1919
def square(x: DataPoint) -> DataPoint:
20-
x.num_squared = x.num ** 2
20+
x.num_squared = x.num**2
2121
return x
2222

2323

@@ -27,7 +27,7 @@ def __init__(self):
2727

2828
def __call__(self, x: float) -> float:
2929
self.n_hits += 1
30-
return x ** 2
30+
return x**2
3131

3232

3333
@labeling_function()
@@ -247,7 +247,7 @@ def test_lf_applier_dask_preprocessor(self) -> None:
247247
def test_lf_applier_pandas_preprocessor_memoized(self) -> None:
248248
@preprocessor(memoize=True)
249249
def square_memoize(x: DataPoint) -> DataPoint:
250-
x.num_squared = x.num ** 2
250+
x.num_squared = x.num**2
251251
return x
252252

253253
@labeling_function(pre=[square_memoize])

test/labeling/apply/test_spark.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
@preprocessor()
1717
def square(x: Row) -> Row:
18-
return Row(num=x.num, num_squared=x.num ** 2)
18+
return Row(num=x.num, num_squared=x.num**2)
1919

2020

2121
@labeling_function()
@@ -91,7 +91,7 @@ def test_lf_applier_spark_preprocessor_memoized(self) -> None:
9191

9292
@preprocessor(memoize=True)
9393
def square_memoize(x: DataPoint) -> DataPoint:
94-
return Row(num=x.num, num_squared=x.num ** 2)
94+
return Row(num=x.num, num_squared=x.num**2)
9595

9696
@labeling_function(pre=[square_memoize])
9797
def fp_memoized(x: DataPoint) -> int:

test/labeling/lf/test_core.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
@preprocessor()
1212
def square(x: DataPoint) -> DataPoint:
13-
x.num = x.num ** 2
13+
x.num = x.num**2
1414
return x
1515

1616

@@ -74,7 +74,7 @@ def lf(x: DataPoint) -> int:
7474
self._run_lf(lf)
7575

7676
def test_labeling_function_decorator_args(self) -> None:
77-
db = [3, 6, 43 ** 2]
77+
db = [3, 6, 43**2]
7878

7979
@labeling_function(name="my_lf", resources=dict(db=db), pre=[square])
8080
def lf(x: DataPoint, db: List[int]) -> int:

test/map/test_core.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,12 @@ def __init__(self):
5959

6060
def __call__(self, x: float) -> float:
6161
self.n_hits += 1
62-
return x ** 2
62+
return x**2
6363

6464

6565
@lambda_mapper()
6666
def square(x: DataPoint) -> DataPoint:
67-
x.num_squared = x.num ** 2
67+
x.num_squared = x.num**2
6868
return x
6969

7070

@@ -356,7 +356,7 @@ def test_mapper_decorator_no_parens(self) -> None:
356356

357357
@lambda_mapper
358358
def square(x: DataPoint) -> DataPoint:
359-
x.num_squared = x.num ** 2
359+
x.num_squared = x.num**2
360360
return x
361361

362362
def test_mapper_with_args_kwargs(self) -> None:

0 commit comments

Comments
 (0)