Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Remove unnecessary flake8 exceptions #3762

Merged
merged 3 commits into from
Feb 12, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 5 additions & 31 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -13,40 +13,14 @@ exclude =

per-file-ignores =
# __init__.py files are allowed to have unused imports and lines-too-long
allennlp/__init__.py:F401
allennlp/**/__init__.py:F401,E501
**/__init__.py:F401
# for some reason ("fixtures" dir not being a Python package?) we need to add this line
allennlp/tests/fixtures/**/__init__.py:F401

# tests don't have to respect
# E501: line length
# E261: 2 spaces before comments (it would be ideal if they did, though)
# E731: do not assign a lambda expression, use a def
# F401: unused imports
allennlp/tests/**:E501,E261,E731,F401
allennlp/tests/**:E731

# scripts don't have to respect
# E501: line length
# E402: imports not at top of file (because we mess with sys.path)
# E302 expected 2 blank lines, found 1 (it would be ideal if they did, though)
scripts/**:E501,E402,E302

# here are a few files that get to ignore line-too-long, lucky them
allennlp/pretrained.py:E501

# here are some files that get to ignore E402 module level import not at top of file
allennlp/modules/elmo.py:E402
allennlp/modules/elmo_lstm.py:E402
allennlp/modules/token_embedders/embedding.py:E402
allennlp/commands/elmo.py:E402
allennlp/tests/modules/elmo_test.py:E402
allennlp/tests/commands/elmo_test.py:E402
allennlp/tests/modules/token_embedders/embedding_test.py:E402

# Should probably fix this, but there's a lot of them.
# E127 continuation line over-indented for visual indent
allennlp/tests/semparse/worlds/atis_world_test.py:E127

# This one test is much more readable with nonstandard comments
# E114 indentation is not a multiple of four (comment)
# E116 unexpected indentation (comment)
# E128 continuation line under-indented for visual indent
allennlp/tests/data/token_indexers/bert_indexer_test.py:E114,E116,E128
scripts/**:E402
4 changes: 2 additions & 2 deletions allennlp/commands/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from allennlp.commands.test_install import TestInstall
from allennlp.commands.train import Train
from allennlp.common.plugins import import_plugins
from allennlp.common.util import import_submodules
from allennlp.common.util import import_module_and_submodules

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -90,7 +90,7 @@ def main(prog: Optional[str] = None) -> None:
if "func" in dir(args):
# Import any additional modules needed (to register custom classes).
for package_name in args.include_package:
import_submodules(package_name)
import_module_and_submodules(package_name)
args.func(args)
else:
parser.print_help()
19 changes: 9 additions & 10 deletions allennlp/commands/elmo.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,25 +68,24 @@
import json
import logging
import os
from typing import IO, List, Iterable, Tuple
import warnings

from overrides import overrides

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py
from typing import IO, Iterable, List, Tuple

import numpy
import torch
from overrides import overrides

from allennlp.commands.subcommand import Subcommand
from allennlp.common.checks import ConfigurationError
from allennlp.common.tqdm import Tqdm
from allennlp.common.util import lazy_groups_of, prepare_global_logging
from allennlp.common.checks import ConfigurationError
from allennlp.data.token_indexers.elmo_indexer import ELMoTokenCharactersIndexer
from allennlp.nn.util import remove_sentence_boundaries
from allennlp.modules.elmo import _ElmoBiLm, batch_to_ids
from allennlp.commands.subcommand import Subcommand
from allennlp.nn.util import remove_sentence_boundaries

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py
Comment on lines +86 to +88
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I put it at the end so the regular imports are sorted.


logger = logging.getLogger(__name__)

Expand Down
3 changes: 2 additions & 1 deletion allennlp/commands/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,9 +369,10 @@ def _train_worker(

if distributed:
# Since the worker is spawned and not forked, the extra imports need to be done again.
# Both the ones from the plugins and the ones from `include_package`.
import_plugins()
for package_name in include_package:
common_util.import_submodules(package_name)
common_util.import_module_and_submodules(package_name)

num_procs_per_node = len(distributed_device_ids)
# The Unique identifier of the worker process among all the processes in the
Expand Down
4 changes: 2 additions & 2 deletions allennlp/common/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ def push_python_path(path: PathType) -> ContextManagerFunctionReturnType[None]:
sys.path.remove(path)


def import_submodules(package_name: str) -> None:
def import_module_and_submodules(package_name: str) -> None:
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note I make this change mentioned in another PR here.

"""
Import all submodules under the given package.
Primarily useful so that people using AllenNLP as a library
Expand All @@ -459,7 +459,7 @@ def import_submodules(package_name: str) -> None:
if path_string and module_finder.path != path_string:
continue
subpackage = f"{package_name}.{name}"
import_submodules(subpackage)
import_module_and_submodules(subpackage)


def peak_memory_mb() -> float:
Expand Down
35 changes: 17 additions & 18 deletions allennlp/modules/elmo.py
Original file line number Diff line number Diff line change
@@ -1,37 +1,36 @@
import json
import logging
from typing import Union, List, Dict, Any
import warnings

import torch
from torch.nn.modules import Dropout
from typing import Any, Dict, List, Union

import numpy

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py
import torch
from overrides import overrides
from torch.nn.modules import Dropout

from allennlp.common.file_utils import cached_path
from allennlp.common import FromParams
from allennlp.common.checks import ConfigurationError
from allennlp.common.file_utils import cached_path
from allennlp.common.util import lazy_groups_of
from allennlp.common import FromParams
from allennlp.data import Instance, Token, Vocabulary
from allennlp.data.batch import Batch
from allennlp.data.fields import TextField
from allennlp.data.token_indexers.elmo_indexer import (
ELMoCharacterMapper,
ELMoTokenCharactersIndexer,
)
from allennlp.modules.elmo_lstm import ElmoLstm
from allennlp.modules.highway import Highway
from allennlp.modules.scalar_mix import ScalarMix
from allennlp.nn.util import (
remove_sentence_boundaries,
add_sentence_boundary_token_ids,
get_device_of,
remove_sentence_boundaries,
)
from allennlp.data.token_indexers.elmo_indexer import (
ELMoCharacterMapper,
ELMoTokenCharactersIndexer,
)
from allennlp.data.batch import Batch
from allennlp.data import Token, Vocabulary, Instance
from allennlp.data.fields import TextField

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py


logger = logging.getLogger(__name__)
Expand Down
14 changes: 7 additions & 7 deletions allennlp/modules/elmo_lstm.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
"""
A stacked bidirectional LSTM with skip connections between layers.
"""
from typing import Optional, Tuple, List
import warnings
from typing import List, Optional, Tuple

import numpy
import torch
from torch.nn.utils.rnn import PackedSequence, pad_packed_sequence

from allennlp.common.checks import ConfigurationError
from allennlp.common.file_utils import cached_path
from allennlp.modules.encoder_base import _EncoderBase
from allennlp.modules.lstm_cell_with_projection import LstmCellWithProjection

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py
import numpy

from allennlp.modules.lstm_cell_with_projection import LstmCellWithProjection
from allennlp.common.checks import ConfigurationError
from allennlp.modules.encoder_base import _EncoderBase
from allennlp.common.file_utils import cached_path


class ElmoLstm(_EncoderBase):
Expand Down
24 changes: 12 additions & 12 deletions allennlp/modules/token_embedders/embedding.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,29 @@
import io
import tarfile
import zipfile
import re
import itertools
import logging
import re
import tarfile
import warnings
import itertools
from typing import Optional, Tuple, Sequence, cast, IO, Iterator, Any, NamedTuple
import zipfile
from typing import Any, cast, IO, Iterator, NamedTuple, Optional, Sequence, Tuple

from overrides import overrides
import numpy
import torch
from overrides import overrides
from torch.nn.functional import embedding

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py

from allennlp.common import Tqdm, Registrable
from allennlp.common.checks import ConfigurationError
from allennlp.common.file_utils import get_file_extension, cached_path, is_url_or_existing_file
from allennlp.common.file_utils import cached_path, get_file_extension, is_url_or_existing_file
from allennlp.data import Vocabulary
from allennlp.modules.token_embedders.token_embedder import TokenEmbedder
from allennlp.modules.time_distributed import TimeDistributed
from allennlp.modules.token_embedders.token_embedder import TokenEmbedder
from allennlp.nn import util

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py

logger = logging.getLogger(__name__)


Expand Down
9 changes: 5 additions & 4 deletions allennlp/tests/commands/elmo_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,18 @@
import tempfile
import warnings

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py
import numpy
import pytest

from allennlp.common.checks import ConfigurationError
from allennlp.commands import main
from allennlp.commands.elmo import ElmoEmbedder
from allennlp.common.checks import ConfigurationError
from allennlp.tests.modules.elmo_test import ElmoTestCase

with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=FutureWarning)
import h5py


class TestElmoCommand(ElmoTestCase):
def setUp(self):
Expand Down
6 changes: 3 additions & 3 deletions allennlp/tests/common/params_test.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import json
import os
import re
import tempfile
from collections import OrderedDict

import pytest

from allennlp.common.checks import ConfigurationError
from allennlp.common.params import Params, unflatten, with_fallback, parse_overrides, infer_and_cast
from allennlp.common.params import infer_and_cast, Params, parse_overrides, unflatten, with_fallback
from allennlp.common.testing import AllenNlpTestCase


Expand Down Expand Up @@ -38,7 +37,8 @@ def test_overrides(self):
filename = self.FIXTURES_ROOT / "simple_tagger" / "experiment.json"
overrides = (
'{ "train_data_path": "FOO", "model": { "type": "BAR" },'
'"model.text_field_embedder.tokens.type": "BAZ", "iterator.sorting_keys.0.0": "question"}'
'"model.text_field_embedder.tokens.type": "BAZ",'
'"iterator.sorting_keys.0.0": "question"}'
)
params = Params.from_file(filename, overrides)

Expand Down
7 changes: 0 additions & 7 deletions allennlp/tests/common/registrable_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@
import os

import pytest
import torch
import torch.nn.init
import torch.optim.lr_scheduler

from allennlp.common.checks import ConfigurationError
from allennlp.common.registrable import Registrable
Expand All @@ -14,14 +11,10 @@
from allennlp.data.iterators.data_iterator import DataIterator
from allennlp.data.token_indexers.token_indexer import TokenIndexer
from allennlp.data.tokenizers.tokenizer import Tokenizer
from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder
from allennlp.modules.seq2vec_encoders.seq2vec_encoder import Seq2VecEncoder
from allennlp.modules.similarity_functions import SimilarityFunction
from allennlp.modules.text_field_embedders.text_field_embedder import TextFieldEmbedder
from allennlp.modules.token_embedders.token_embedder import TokenEmbedder
from allennlp.nn import Initializer
from allennlp.nn.regularizers.regularizer import Regularizer
from allennlp.training.learning_rate_schedulers import LearningRateScheduler


class TestRegistrable(AllenNlpTestCase):
Expand Down
2 changes: 1 addition & 1 deletion allennlp/tests/common/util_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def test_import_submodules(self):
assert "mymodule" not in sys.modules
assert "mymodule.submodule" not in sys.modules

util.import_submodules("mymodule")
util.import_module_and_submodules("mymodule")

assert "mymodule" in sys.modules
assert "mymodule.submodule" in sys.modules
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
from collections import Counter
from typing import Tuple

import numpy as np
import pytest

from allennlp.common.testing import AllenNlpTestCase
from allennlp.data.dataset_readers import ShardedDatasetReader, SequenceTaggingDatasetReader
from allennlp.data.dataset_readers import SequenceTaggingDatasetReader, ShardedDatasetReader
from allennlp.data.instance import Instance
from allennlp.data.vocabulary import Vocabulary


def fingerprint(instance: Instance) -> Tuple[str, ...]:
Expand Down
9 changes: 3 additions & 6 deletions allennlp/tests/data/fields/text_field_test.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,15 @@
from collections import defaultdict
from typing import Dict, List

import pytest
import numpy
import torch
import pytest

from allennlp.common.checks import ConfigurationError
from allennlp.common.testing import AllenNlpTestCase
from allennlp.data import Token, Vocabulary
from allennlp.data.fields import TextField
from allennlp.data.token_indexers import SingleIdTokenIndexer, TokenCharactersIndexer, TokenIndexer

from allennlp.common.testing import AllenNlpTestCase
from allennlp.common.checks import ConfigurationError
from allennlp.common.util import pad_sequence_to_length


class DictReturningTokenIndexer(TokenIndexer):
"""
Expand Down
3 changes: 0 additions & 3 deletions allennlp/tests/data/iterators/bucket_iterator_test.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
import pytest

from allennlp.common import Params
from allennlp.common.checks import ConfigurationError
from allennlp.data import Instance, Token
from allennlp.data.fields import TextField
from allennlp.data.iterators import BucketIterator
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from transformers.tokenization_auto import AutoTokenizer

from allennlp.common.testing import AllenNlpTestCase
from allennlp.data import Token, Vocabulary
from allennlp.data import Vocabulary
from allennlp.data.token_indexers import PretrainedTransformerIndexer
from allennlp.data.tokenizers import PretrainedTransformerTokenizer

Expand Down
Loading