Skip to content

Commit 4b68fb0

Browse files
committed
feat: update to latest torch & gradio version
1 parent 7c0b1c0 commit 4b68fb0

File tree

8 files changed

+40
-37
lines changed

8 files changed

+40
-37
lines changed

.github/workflows/unitest.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ jobs:
2121
wget https://github.com/fumiama/RVC-Models-Downloader/releases/download/v0.2.5/rvcmd_linux_amd64.deb
2222
sudo apt -y install ./rvcmd_linux_amd64.deb
2323
pip install --force pip==24.0 # fix fairseq installing issue https://github.com/facebookresearch/fairseq/issues/5552
24-
python -m pip install --upgrade pip
2524
python -m pip install --upgrade setuptools
2625
python -m pip install --upgrade wheel
2726
pip install torch torchvision torchaudio

gui.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ def launcher(self):
251251
sg.FileBrowse(
252252
i18n("Select the .pth file"),
253253
initial_folder=os.path.join(
254-
os.getcwd(), "assets/weights"
254+
os.getcwd(), "assets", "weights"
255255
),
256256
file_types=[("Model File", "*.pth")],
257257
),

infer/lib/train/data_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def get_audio(self, filename):
111111
spec_filename = filename.replace(".wav", ".spec.pt")
112112
if os.path.exists(spec_filename):
113113
try:
114-
spec = torch.load(spec_filename)
114+
spec = torch.load(spec_filename, weights_only=True)
115115
except:
116116
logger.warning("%s %s", spec_filename, traceback.format_exc())
117117
spec = spectrogram_torch(

infer/lib/train/utils.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,7 @@ def go(model, bkey):
7171

7272
def load_checkpoint(checkpoint_path, model, optimizer=None, load_opt=1):
7373
assert os.path.isfile(checkpoint_path)
74-
checkpoint_dict = torch.load(checkpoint_path, map_location="cpu")
75-
76-
saved_state_dict = checkpoint_dict["model"]
74+
saved_state_dict = torch.load(checkpoint_path, map_location="cpu", weights_only=True)["model"]
7775
if hasattr(model, "module"):
7876
state_dict = model.module.state_dict()
7977
else:

infer/modules/train/train.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -131,9 +131,14 @@ def run(rank, n_gpus, hps: utils.HParams, logger: logging.Logger):
131131
writer = SummaryWriter(log_dir=hps.model_dir)
132132
writer_eval = SummaryWriter(log_dir=os.path.join(hps.model_dir, "eval"))
133133

134-
dist.init_process_group(
135-
backend="gloo", init_method="env://", world_size=n_gpus, rank=rank
136-
)
134+
try:
135+
dist.init_process_group(
136+
backend="gloo" if os.name == "nt" or not torch.cuda.is_available() else "nccl", init_method="env://", world_size=n_gpus, rank=rank
137+
)
138+
except:
139+
dist.init_process_group(
140+
backend="gloo" if os.name == "nt" or not torch.cuda.is_available() else "nccl", init_method="env://?use_libuv=False", world_size=n_gpus, rank=rank
141+
)
137142
torch.manual_seed(hps.train.seed)
138143
if torch.cuda.is_available():
139144
torch.cuda.set_device(rank)
@@ -238,13 +243,13 @@ def run(rank, n_gpus, hps: utils.HParams, logger: logging.Logger):
238243
if hasattr(net_g, "module"):
239244
logger.info(
240245
net_g.module.load_state_dict(
241-
torch.load(hps.pretrainG, map_location="cpu")["model"]
246+
torch.load(hps.pretrainG, map_location="cpu", weights_only=True)["model"]
242247
)
243248
) ##测试不加载优化器
244249
else:
245250
logger.info(
246251
net_g.load_state_dict(
247-
torch.load(hps.pretrainG, map_location="cpu")["model"]
252+
torch.load(hps.pretrainG, map_location="cpu", weights_only=True)["model"]
248253
)
249254
) ##测试不加载优化器
250255
if hps.pretrainD != "":
@@ -253,13 +258,13 @@ def run(rank, n_gpus, hps: utils.HParams, logger: logging.Logger):
253258
if hasattr(net_d, "module"):
254259
logger.info(
255260
net_d.module.load_state_dict(
256-
torch.load(hps.pretrainD, map_location="cpu")["model"]
261+
torch.load(hps.pretrainD, map_location="cpu", weights_only=True)["model"]
257262
)
258263
)
259264
else:
260265
logger.info(
261266
net_d.load_state_dict(
262-
torch.load(hps.pretrainD, map_location="cpu")["model"]
267+
torch.load(hps.pretrainD, map_location="cpu", weights_only=True)["model"]
263268
)
264269
)
265270

infer/modules/vc/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import os
1+
import os, pathlib
22

33
from fairseq import checkpoint_utils
44

@@ -8,7 +8,7 @@ def get_index_path_from_model(sid):
88
(
99
f
1010
for f in [
11-
os.path.join(root, name)
11+
str(pathlib.Path(root, name))
1212
for path in [os.getenv("outside_index_root"), os.getenv("index_root")]
1313
for root, _, files in os.walk(path, topdown=False)
1414
for name in files

rvc/f0/models.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,9 @@ def get_rmvpe(
77
from rvc.f0.e2e import E2E
88

99
model = E2E(4, 1, (2, 2))
10-
ckpt = torch.load(model_path, map_location=device)
10+
ckpt = torch.load(model_path, map_location=device, weights_only=True)
1111
model.load_state_dict(ckpt)
12+
del ckpt
1213
model.eval()
1314
if is_half:
1415
model = model.half()

web.py

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
shutil.rmtree(tmp, ignore_errors=True)
4747
os.makedirs(tmp, exist_ok=True)
4848
os.makedirs(os.path.join(now_dir, "logs"), exist_ok=True)
49-
os.makedirs(os.path.join(now_dir, "assets/weights"), exist_ok=True)
49+
os.makedirs(os.path.join(now_dir, "assets", "weights"), exist_ok=True)
5050
os.environ["TEMP"] = tmp
5151
warnings.filterwarnings("ignore")
5252
torch.manual_seed(114514)
@@ -142,20 +142,22 @@ def forward_dml(ctx, x, scale):
142142
outside_index_root = os.getenv("outside_index_root")
143143

144144
names = []
145-
for name in os.listdir(weight_root):
146-
if name.endswith(".pth"):
147-
names.append(name)
148145
index_paths = []
149146

147+
def lookup_names(weight_root):
148+
global names
149+
for name in os.listdir(weight_root):
150+
if name.endswith(".pth"):
151+
names.append(name)
150152

151153
def lookup_indices(index_root):
152154
global index_paths
153-
for root, dirs, files in os.walk(index_root, topdown=False):
155+
for root, _, files in os.walk(index_root, topdown=False):
154156
for name in files:
155157
if name.endswith(".index") and "trained" not in name:
156-
index_paths.append("%s/%s" % (root, name))
157-
158+
index_paths.append(str(pathlib.Path(root, name)))
158159

160+
lookup_names(weight_root)
159161
lookup_indices(index_root)
160162
lookup_indices(outside_index_root)
161163
uvr5_names = []
@@ -165,15 +167,12 @@ def lookup_indices(index_root):
165167

166168

167169
def change_choices():
170+
global index_paths, names
168171
names = []
169-
for name in os.listdir(weight_root):
170-
if name.endswith(".pth"):
171-
names.append(name)
172+
lookup_names(weight_root)
172173
index_paths = []
173-
for root, dirs, files in os.walk(index_root, topdown=False):
174-
for name in files:
175-
if name.endswith(".index") and "trained" not in name:
176-
index_paths.append("%s/%s" % (root, name))
174+
lookup_indices(index_root)
175+
lookup_indices(outside_index_root)
177176
return {"choices": sorted(names), "__type__": "update"}, {
178177
"choices": sorted(index_paths),
179178
"__type__": "update",
@@ -223,16 +222,17 @@ def if_done_multi(done, ps):
223222

224223
def preprocess_dataset(trainset_dir, exp_dir, sr, n_p):
225224
sr = sr_dict[sr]
226-
os.makedirs("%s/logs/%s" % (now_dir, exp_dir), exist_ok=True)
227-
f = open("%s/logs/%s/preprocess.log" % (now_dir, exp_dir), "w")
225+
exp_path = pathlib.Path(now_dir, "logs", exp_dir)
226+
os.makedirs(exp_path, exist_ok=True)
227+
log_file_path = exp_path / "preprocess.log"
228+
f = open(log_file_path, "w")
228229
f.close()
229-
cmd = '"%s" infer/modules/train/preprocess.py "%s" %s %s "%s/logs/%s" %s %.1f' % (
230+
cmd = '"%s" infer/modules/train/preprocess.py "%s" %s %s "%s" %s %.1f' % (
230231
config.python_cmd,
231232
trainset_dir,
232233
sr,
233234
n_p,
234-
now_dir,
235-
exp_dir,
235+
str(exp_path),
236236
config.noparallel,
237237
config.preprocess_per,
238238
)
@@ -249,12 +249,12 @@ def preprocess_dataset(trainset_dir, exp_dir, sr, n_p):
249249
),
250250
).start()
251251
while 1:
252-
with open("%s/logs/%s/preprocess.log" % (now_dir, exp_dir), "r") as f:
252+
with open(log_file_path, "r") as f:
253253
yield (f.read())
254254
sleep(1)
255255
if done[0]:
256256
break
257-
with open("%s/logs/%s/preprocess.log" % (now_dir, exp_dir), "r") as f:
257+
with open(log_file_path, "r") as f:
258258
log = f.read()
259259
logger.info(log)
260260
yield log

0 commit comments

Comments
 (0)