-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathutils.py
executable file
·79 lines (70 loc) · 2.67 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
from __future__ import absolute_import, division, print_function
import os
import tqdm
import torch
import random
import subprocess
import numpy as np
def compute_confidence_interval(data):
"""
Compute 95% confidence interval
:param data: An array of mean accuracy (or mAP) across a number of sampled episodes.
:return: the 95% confidence interval for this data.
"""
a = 1.0 * np.array(data)
m = np.mean(a)
std = np.std(a)
pm = 1.96 * (std / np.sqrt(len(a)))
return m, pm
def warp_tqdm(disable_tqdm, desc, data_loader):
if disable_tqdm:
tqdm_loader = data_loader
else:
tqdm_loader = tqdm.tqdm(data_loader, desc=desc, total=len(data_loader))
return tqdm_loader
def set_seed(_seed: int):
random.seed(_seed)
np.random.seed(_seed)
torch.manual_seed(_seed)
torch.cuda.manual_seed_all(_seed)
def dist_training(args):
# Setup CUDA, GPU & distributed training
if args.local_rank == -1 or args.no_cuda:
device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu")
args.n_gpu = torch.cuda.device_count()
else: # Initializes the distributed backend which will take care of sychronizing nodes/GPUs
torch.cuda.set_device(args.local_rank)
device = torch.device("cuda", args.local_rank)
torch.distributed.init_process_group(backend="nccl")
args.n_gpu = 1
args.device = device
return args
def save_running_code(src_folder, tgt_folder, exclude_folders=[], logger=None):
code_backup = os.path.join(tgt_folder, 'code_backup')
os.makedirs(code_backup, exist_ok=True)
__str_exclude = ""
for folders in exclude_folders:
__str_exclude = __str_exclude + " --exclude " + str(folders)
_command = "rsync -a -progress {} {} {}".format(src_folder, code_backup, __str_exclude)
logger.info("Executing \'"+_command)
subprocess.check_output(_command, shell=True)
def backup_codes_to_project_folder(params, logger):
exclude_folders = []
try:
with open(".gitignore", "r") as filePtr:
for line in filePtr:
line=line.strip()
if line == "" or line.startswith("#"):
continue
exclude_folders.append(line.strip())
except:
logger.warning("No .gitignore file found. \
Please make sure the all large files are not copied to the experiment folder.")
backup_codes_path = os.path.join(params.output_dir, "code")
os.makedirs(backup_codes_path, exist_ok=True)
save_running_code(
src_folder=os.getcwd(),
tgt_folder=backup_codes_path,
exclude_folders=exclude_folders,
logger=logger
)