Skip to content

Instantly share code, notes, and snippets.

View huchenxucs's full-sized avatar

Chenxu Hu huchenxucs

View GitHub Profile
@huchenxucs
huchenxucs / multi_rsync.sh
Last active October 11, 2022 09:22
Multiprocess rsync (多线程的rsync) for data copy
#!/usr/bin/env bash
# Define source, target, maxdepth and cd to source
source="abspath/to/source/dir"
target="abspath/to/target/dir"
depth=5
cd "${source}"
# Set the maximum number of concurrent rsync threads
maxthreads=64
@huchenxucs
huchenxucs / .tmux.conf
Created February 13, 2022 13:12
tmux conf file
# open mouse mode
set-option -g mouse on
# reload config
bind r source-file ~/.tmux.conf \; display-message "Config reloaded..."
# set window split
bind-key v split-window -h
bind-key h split-window -v
@huchenxucs
huchenxucs / extract_mel_from_wav.py
Created August 21, 2020 07:53
extract mel from wav file, can be used for pwg and waveglow.
def process_utterance(wav_path,
fft_size=1024,
hop_size=256,
win_length=1024,
window="hann",
num_mels=80,
fmin=80,
fmax=7600,
eps=1e-10,
sample_rate=22050,
class Conv1dWithMask(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, bias=True, w_init_gain='linear'):
super(Conv1dWithMask, self).__init__()
assert kernel_size > 1, f"Conv1dWithMask kernel size must greater than 1"
self.kernel_size = kernel_size
self.out_channels = out_channels
self.conv = torch.nn.Conv1d(in_channels, out_channels, kernel_size=kernel_size, bias=bias)
torch.nn.init.xavier_uniform_(
self.conv.weight, gain=torch.nn.init.calculate_gain(w_init_gain))
@huchenxucs
huchenxucs / fastspeech.py
Created August 4, 2020 06:51
encoder-decoder self attention mask
import numpy as np
from modules.transformer_tts import TransformerEncoder, TransformerDecoder
from modules.operations import *
from modules.tts_modules import FastspeechDecoder, RefEncoder, DurationPredictor, LengthRegulator, PitchPredictor, \
TacotronDecoder, EnergyPredictor
from modules.pos_embed import RelativePositionBias
from utils.world_utils import f0_to_coarse_torch, restore_pitch
from utils.tts_utils import sequence_mask
@huchenxucs
huchenxucs / pos_embed.py
Created July 23, 2020 06:09
T5 relative positional embedding
import math
import torch
import torch.nn as nn
from torch.nn import functional as F
class RelativePositionBias(nn.Module):
def __init__(self, bidirectional=True, num_buckets=32, max_distance=128, n_heads=2):
super(RelativePositionBias, self).__init__()
self.bidirectional = bidirectional