-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.py
More file actions
92 lines (74 loc) · 3.21 KB
/
utils.py
File metadata and controls
92 lines (74 loc) · 3.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
from architectures.DAREUNet import DARE_UNet
import data_preparation
import argparse
from imutils import paths
from glob import glob
import os
import torch
import config
import sys
def clear_image_directories():
directories = [config.TRAIN_DATASETS_PATH + "src/*", config.TRAIN_DATASETS_PATH + "mask/*",
config.TEST_DATASETS_PATH + "src/*", config.TEST_DATASETS_PATH + "mask/*",
config.PATCHES_PATH + "src/*", config.PATCHES_PATH + "mask/*"]
for dir in directories:
for file in glob(dir):
os.remove(file)
def save_to_file(filepath, content):
f = open(filepath, "w")
f.write("\n".join(content))
f.close()
def read_txt_as_list(filepath):
file = open(filepath, "r")
content = file.read()
file.close()
return content.split("\n")
def save_model(checkpoint):
torch.save(checkpoint, config.BEST_MODEL_PATH)
def load_model(checkpoint, model):
model.load_state_dict(checkpoint)
def average(list, round_to):
return round(sum(list) / len(list), round_to)
def list_directory(directory):
image_paths = sorted(list(paths.list_images(directory + "src/")))
mask_paths = sorted(list(paths.list_images(directory + "mask/")))
return image_paths, mask_paths
def parse_cli_args():
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', dest='batch_size',
type=int, default=config.BATCH_SIZE)
parser.add_argument('--epochs', dest='epochs',
type=int, default=config.NUM_EPOCHS)
parser.add_argument('--learning_rate', dest='learning_rate',
type=float, default=config.LR)
parser.add_argument('--patch_size', dest='patch_size',
type=int, default=config.PATCH_SIZE)
parser.add_argument('--patch_step_train',
dest='patch_step_train', type=int, default=config.PATCH_STEP_TRAIN)
parser.add_argument('--random_seed', dest='random_seed',
type=int, default=config.RANDOM_SEED)
parser.add_argument('--val_set_ratio', dest='val_set_ratio',
type=float, default=config.VAL_SET_RATIO)
parser.add_argument('--test_set_ratio', dest='test_set_ratio',
type=float, default=config.TEST_SET_RATIO)
parser.add_argument('--limits', dest='limits', type=int,
default=config.TRAIN_LIMITS)
parser.add_argument('--project_name', dest='project_name',
default=config.PROJECT_NAME)
parser.add_argument('--prepare_new_dataset',
dest='prepare_new_dataset', type=bool, default=False)
args = parser.parse_args()
config.SYNC_WANDB = True
config.BATCH_SIZE = args.batch_size
config.NUM_EPOCHS = args.epochs
config.LR = args.learning_rate
config.PATCH_SIZE = args.patch_size
config.PATCH_STEP_TRAIN = args.patch_step_train
config.RANDOM_SEED = args.random_seed
config.VAL_SET_RATIO = args.val_set_ratio
config.TEST_SET_RATIO = args.test_set_ratio
config.TRAIN_LIMITS = args.limits
config.PROJECT_NAME = args.project_name
if(args.prepare_new_dataset):
data_preparation.prepare_datasets()
sys.exit("New dataset is loaded!")