-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdebugger.py
51 lines (43 loc) · 1.23 KB
/
debugger.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import json
import shutil
import sys
from allennlp.commands import main
config_file = "experiments/alternatinglstmselfattention.json"
# Use overrides to train on CPU.
overrides = json.dumps({"trainer": {"cuda_device": -1}})
serialization_dir = "~/tmp/irony_debug"
# Training will fail if the serialization directory already
# has stuff in it. If you are running the same training loop
# over and over again for debugging purposes, it will.
# Hence we wipe it out in advance.
# BE VERY CAREFUL NOT TO DO THIS FOR ACTUAL TRAINING!
shutil.rmtree(serialization_dir, ignore_errors=True)
# Assemble the command into sys.argv
sys.argv = [
"python", # command name, not used by main
"train",
config_file,
"-s", serialization_dir,
"--include-package", "irony_model",
"-o", overrides,
]
# archive_path = "~/tmp/medels/irony/model.tar.gz"
# predictor_name = "ironic-predictor"
# package_name = "irony_model"
# field_name = "tweet"
# sys.argv = [
# "python",
# "-m",
# "allenlp.service.server_simple",
# "--archive-path",
# archive_path,
# "--predictor",
# predictor_name,
# "--include-package",
# package_name,
# "--title",
# predictor_name,
# "--field-name",
# field_name
# ]
main()