Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Commit

Permalink
Bump black from 20.8b1 to 21.5b1 (#5195)
Browse files Browse the repository at this point in the history
* Bump black from 20.8b1 to 21.5b1

Bumps [black](/~https://github.com/psf/black) from 20.8b1 to 21.5b1.
- [Release notes](/~https://github.com/psf/black/releases)
- [Changelog](/~https://github.com/psf/black/blob/main/CHANGES.md)
- [Commits](/~https://github.com/psf/black/commits)

Signed-off-by: dependabot[bot] <support@github.com>

* formatting changes

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Akshita Bhagia <akshita23bhagia@gmail.com>
  • Loading branch information
dependabot[bot] and AkshitaB authored May 25, 2021
1 parent d2840cb commit 3e1b553
Show file tree
Hide file tree
Showing 5 changed files with 6 additions and 6 deletions.
2 changes: 1 addition & 1 deletion allennlp/commands/find_learning_rate.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ def search_learning_rate(


def _smooth(values: List[float], beta: float) -> List[float]:
""" Exponential smoothing of values """
"""Exponential smoothing of values"""
avg_value = 0.0
smoothed = []
for i, value in enumerate(values):
Expand Down
2 changes: 1 addition & 1 deletion allennlp/models/archival.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@


class Archive(NamedTuple):
""" An archive comprises a Model and its experimental config"""
"""An archive comprises a Model and its experimental config"""

model: Model
config: Params
Expand Down
4 changes: 2 additions & 2 deletions allennlp/modules/transformer/t5.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def _relative_position_bucket(
return relative_buckets

def compute_bias(self, query_length: int, key_length: int) -> FloatT:
""" Compute binned relative position bias """
"""Compute binned relative position bias"""
context_position = torch.arange(query_length, dtype=torch.long)[:, None]
memory_position = torch.arange(key_length, dtype=torch.long)[None, :]
relative_position = memory_position - context_position # shape (query_length, key_length)
Expand Down Expand Up @@ -283,7 +283,7 @@ def unshape(states):
return states.transpose(1, 2).contiguous().view(batch_size, -1, self.inner_dim)

def project(hidden_states, proj_layer, key_value_states, past_key_value) -> FloatT:
""" projects hidden states correctly to key/query states """
"""projects hidden states correctly to key/query states"""
if key_value_states is None:
# self-attn
# (batch_size, num_heads, seq_length, dim_per_head)
Expand Down
2 changes: 1 addition & 1 deletion dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ flake8
mypy==0.812

# Automatic code formatting
black==20.8b1
black==21.5b1

# Allows generation of coverage reports with pytest.
pytest-cov
Expand Down
2 changes: 1 addition & 1 deletion tests/data/vocabulary_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -700,7 +700,7 @@ def test_read_pretrained_words(self):
)

def test_from_instances_exclusive_embeddings_file_inside_archive(self):
""" Just for ensuring there are no problems when reading pretrained tokens from an archive """
"""Just for ensuring there are no problems when reading pretrained tokens from an archive"""
# Read embeddings file from archive
archive_path = str(self.TEST_DIR / "embeddings-archive.zip")

Expand Down

0 comments on commit 3e1b553

Please sign in to comment.