This repo contains code for paper "Disentangled Speech Representation Learning for One-Shot Cross-Lingual Voice Conversion Using ß-VAE" in SLT 2022.
conda env create -f environment.yaml
conda activate betavae-vc-env
- Download corpus
- Modify the paths specified in
configs/haparams.py
:corpus_dir
for bothVCTK
andAiShell3
,dataset_dir
for extracted features and TFRecord files. - Prepare the dataset for training:
python preprocess.py
CUDA_VISIBLE_DEVICES=0 TF_FORCE_GPU_ALLOW_GROWTH=true python train.py --out_dir ./outputs --data_dir /path/to/save/features/tfrecords
# inference from mels
# test-mels.txt contains list of paths for mel-spectrograms with *.npy format, one path per line
CUDA_VISIBLE_DEVICES=0 TF_FORCE_GPU_ALLOW_GROWTH=true python inference-from-mel.py --ckpt_path ./outputs/models/ckpt-500 --test_dir outputs/tests --src_mels test-mels.txt --ref_mels test-mels.txt
# inference from wavs
# test-wavs.txt contains list of paths for speech with *.wav format, one path per line
CUDA_VISIBLE_DEVICES=0 TF_FORCE_GPU_ALLOW_GROWTH=true python inference-from-wav.py --ckpt_path ./outputs/models/ckpt-500 --test_dir outputs/tests --src_wavs test-wavs.txt --ref_wavs test-wavs.txt
CUDA_VISIBLE_DEVICES=0 TF_FORCE_GPU_ALLOW_GROWTH=true python feature_extraction.py --data_dir /path/to/save/features/tfrecords --save_dir ./outputs/features --ckpt_path ./outputs/models/ckpt-300
# compute EER using content embeddings
python tests/compute_eer.py --data_dir ./outputs/features/EN --mode content
# compute EER using speaker embeddings
python tests/compute_eer.py --data_dir ./outputs/features/EN --mode spk
@inproceedings{slt2022_hui_disentanle,
author = {Hui Lu and
Disong Wang and
Xixin Wu and
Zhiyong Wu and
Xunying Liu and
Helen Meng},
title = {Disentangled Speech Representation Learning for One-Shot Cross-Lingual
Voice Conversion Using Beta-VAE},
booktitle = {{IEEE} Spoken Language Technology Workshop, {SLT} 2022, Doha, Qatar,
January 9-12, 2023},
pages = {814--821},
publisher = {{IEEE}},
year = {2022},
doi = {10.1109/SLT54892.2023.10022787},
}