-
-
Notifications
You must be signed in to change notification settings - Fork 1.8k
Expand file tree
/
Copy pathtrain_dlctransreid.py
More file actions
124 lines (101 loc) · 3.23 KB
/
train_dlctransreid.py
File metadata and controls
124 lines (101 loc) · 3.23 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
#
# DeepLabCut Toolbox (deeplabcut.org)
# © A. & M.W. Mathis Labs
# https://github.com/DeepLabCut/DeepLabCut
#
# Please see AUTHORS for contributors.
# https://github.com/DeepLabCut/DeepLabCut/blob/master/AUTHORS
#
# Licensed under GNU Lesser General Public License v3.0
#
import random
try:
import torch
except ModuleNotFoundError as e:
raise ModuleNotFoundError("Unsupervised identity learning requires PyTorch. Please run `pip install torch`.") from e
import glob
import os
from pathlib import Path
import numpy as np
from deeplabcut.utils import auxiliaryfunctions
from .config import cfg
from .datasets import make_dlc_dataloader
from .loss import easy_triplet_loss
from .model import make_dlc_model
from .processor import do_dlc_train
from .solver import make_easy_optimizer
from .solver.scheduler_factory import create_scheduler
def set_seed(seed):
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
random.seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = True
def split_train_test(npy_list, train_frac):
# with npy list form videos, split each to train and test
train_list = []
test_list = []
for npy in npy_list:
vectors = np.load(npy)
n_samples = vectors.shape[0]
indices = np.random.permutation(n_samples)
num_train = int(n_samples * train_frac)
vectors = vectors[indices]
train = vectors[:num_train]
test = vectors[num_train:]
train_list.append(train)
test_list.append(test)
train_list = np.concatenate(train_list, axis=0)
test_list = np.concatenate(test_list, axis=0)
return train_list, test_list
def train_tracking_transformer(
path_config_file,
dlcscorer,
videos,
videotype="",
train_frac=0.8,
modelprefix="",
train_epochs=100,
batch_size=64,
ckpt_folder="",
destfolder=None,
):
npy_list = []
videos = auxiliaryfunctions.get_list_of_videos(videos, videotype)
for video in videos:
videofolder = str(Path(video).parents[0])
if destfolder is None:
destfolder = videofolder
video_name = Path(video).stem
# video_name = '.'.join(video.split("/")[-1].split(".")[:-1])
files = glob.glob(os.path.join(destfolder, video_name + dlcscorer + "*.npy"))
# assuming there is only one match
npy_list.append(files[0])
train_list, test_list = split_train_test(npy_list, train_frac)
train_loader, val_loader = make_dlc_dataloader(train_list, test_list, batch_size)
# make my own model factory
num_kpts = train_list.shape[2]
feature_dim = train_list.shape[-1]
model = make_dlc_model(cfg, feature_dim, num_kpts)
# make my own loss factory
triplet_loss = easy_triplet_loss()
optimizer = make_easy_optimizer(cfg, model)
scheduler = create_scheduler(cfg, optimizer)
num_query = 1
do_dlc_train(
cfg,
model,
triplet_loss,
train_loader,
val_loader,
optimizer,
scheduler,
num_kpts,
feature_dim,
num_query,
total_epochs=train_epochs,
ckpt_folder=ckpt_folder,
)