Created
November 26, 2020 17:22
-
-
Save albertz/7f3834965d69472f6c7246132f5d186e to your computer and use it in GitHub Desktop.
PyTorch to RETURNN, Parallel WaveGAN example, full log output
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/usr/local/bin/python3 "/Users/az/Library/Application Support/JetBrains/Toolbox/apps/PyCharm-C/ch-0/202.7660.27/PyCharm CE.app/Contents/plugins/python-ce/helpers/pydev/pydevd.py" --multiproc --qt-support=auto --client 127.0.0.1 --port 57798 --file /Users/az/Programmierung/import-parallel-wavegan/pytorch_to_returnn.py --pwg_config mb_melgan.v2.yaml --pwg_checkpoint mb_melgan_models/checkpoint-1000000steps.pkl --features data/features.npy | |
pydev debugger: process 58079 is connecting | |
Connected to pydev debugger (build 202.7660.27) | |
2020-11-26 18:21:44.088082: I tensorflow/core/platform/cpu_feature_guard.cc:143] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA | |
2020-11-26 18:21:44.101822: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x7fabda13afa0 initialized for platform Host (this does not guarantee that XLA will be used). Devices: | |
2020-11-26 18:21:44.101846: I tensorflow/compiler/xla/service/service.cc:176] StreamExecutor device (0): Host, Default Version | |
CUDA_VISIBLE_DEVICES is not set. | |
Collecting TensorFlow device list... | |
Local devices available to TensorFlow: | |
1/2: name: "/device:CPU:0" | |
device_type: "CPU" | |
memory_limit: 268435456 | |
locality { | |
} | |
incarnation: 3807773853604125641 | |
2/2: name: "/device:XLA_CPU:0" | |
device_type: "XLA_CPU" | |
memory_limit: 17179869184 | |
locality { | |
} | |
incarnation: 8874893949683666076 | |
physical_device_desc: "device: XLA_CPU device" | |
Feature shape: (80, 80) | |
>>> Running with standard reference imports... | |
>>> Running with wrapped imports, wrapping original PyTorch... | |
*** register sys.meta_path for ctx <WrapCtx 'pytorch_to_returnn.import_wrapper._torch_traced'> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.from_numpy' -> <class 'pytorch_to_returnn.import_wrapper.base_wrappers.function._VariableFunctionsClass.from_numpy'> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.from_numpy(...) | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan> | |
*** <WrapCtx 'pytorch_to_returnn.import_wrapper._torch_traced'> extend by mod 'parallel_wavegan' | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.models> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.models.melgan> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers.causal_conv> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn' -> <WrappedIndirectModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.Module' -> <class 'pytorch_to_returnn.import_wrapper.torch_wrappers.module.WrappedModuleBase'> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers.pqmf> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers.residual_block> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.module.Module' -> <class 'pytorch_to_returnn.import_wrapper.torch_wrappers.module.WrappedModuleBase'> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.linear> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.Tensor' -> <class 'pytorch_to_returnn.import_wrapper.torch_wrappers.tensor.WrappedTorchTensor'> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.parameter.Parameter' -> <class 'pytorch_to_returnn.import_wrapper.torch_wrappers.parameter.WrappedTorchParameter'> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init' -> <WrappedIndirectModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.conv> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.utils> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch._six.container_abcs' | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._size_1_t' | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._size_2_t' | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._size_3_t' | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.activation> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init.xavier_uniform_' -> <function xavier_uniform_ at 0x10b91d440> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init.constant_' -> <function constant_ at 0x10b91d4d0> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init.xavier_normal_' -> <function xavier_normal_ at 0x10b91d560> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.loss> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.distance> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn._reduction' -> <WrappedIndirectModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn._reduction> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.container> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch._jit_internal._copy_to_script_wrapper' -> <function _copy_to_script_wrapper at 0x1552d0560> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch._jit_internal._copy_to_script_wrapper(...) | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.pooling> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._size_any_t' | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._ratio_3_t' | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._ratio_2_t' | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.batchnorm> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules._functions> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.autograd.function.Function' | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.instancenorm> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.normalization> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.Size' | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.dropout> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.padding> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._size_4_t' | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._size_6_t' | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.sparse> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.rnn> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.utils.rnn.PackedSequence' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.utils.rnn.PackedSequence'> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch._VF' -> <WrappedIndirectModule pytorch_to_returnn.import_wrapper._torch_traced.torch._VF> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch._VF.rnn_tanh' -> <class 'pytorch_to_returnn.import_wrapper.base_wrappers.function._VariableFunctionsClass.rnn_tanh'> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch._VF.rnn_relu' -> <class 'pytorch_to_returnn.import_wrapper.base_wrappers.function._VariableFunctionsClass.rnn_relu'> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch._jit_internal._overload_method' -> <function _overload_method at 0x1553a8ef0> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch._jit_internal._overload_method(...) | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.pixelshuffle> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.upsampling> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.common_types._ratio_any_t' | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.fold> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.adaptive> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.log_softmax' -> <function log_softmax at 0x15a90fd40> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.transformer> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.flatten> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.Conv1d' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.conv.Conv1d'> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers.residual_stack> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers.upsample> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.Conv2d' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.conv.Conv2d'> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.models.parallel_wavegan> | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.device' | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.models.melgan.MelGANGenerator(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.ReflectionPad1d' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.padding.ReflectionPad1d'> | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.padding.ReflectionPad1d(...) | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.conv.Conv1d(...) | |
**** torch tensor func __get__ | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init.kaiming_uniform_' -> <function kaiming_uniform_ at 0x154d70e60> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init.kaiming_uniform_(...) | |
**** torch tensor func dim | |
**** torch tensor func size | |
**** torch tensor func __getitem__ | |
**** torch tensor func uniform_ | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init._calculate_fan_in_and_fan_out' -> <function _calculate_fan_in_and_fan_out at 0x155360b90> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init._calculate_fan_in_and_fan_out(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init.uniform_' -> <function uniform_ at 0x155360c20> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.init.uniform_(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.LeakyReLU' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.activation.LeakyReLU'> | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.activation.LeakyReLU(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.ConvTranspose1d' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.conv.ConvTranspose1d'> | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.conv.ConvTranspose1d(...) | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers.residual_stack.ResidualStack(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.Sequential' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.container.Sequential'> | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.container.Sequential(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.Tanh' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.activation.Tanh'> | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.activation.Tanh(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.utils.weight_norm' -> <function weight_norm at 0x1553c1950> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.utils.weight_norm(...) | |
**** torch tensor func norm_except_dim | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.load' -> <function load at 0x1553c18c0> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.load(...) | |
**** torch tensor func copy_ | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.utils.remove_weight_norm' -> <function remove_weight_norm at 0x1553c1f80> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.utils.remove_weight_norm(...) | |
**** torch tensor func is_floating_point | |
**** torch tensor func to | |
**** torch tensor func _has_compatible_shallow_copy_type | |
**** torch tensor func __set__ | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers.pqmf.PQMF(...) | |
**** torch tensor func float | |
**** torch tensor func unsqueeze | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.zeros' -> <class 'pytorch_to_returnn.import_wrapper.base_wrappers.function._VariableFunctionsClass.zeros'> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.zeros(...) | |
**** torch tensor func __setitem__ | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.ConstantPad1d' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.padding.ConstantPad1d'> | |
*** torch module create pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.padding.ConstantPad1d(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.no_grad' -> <class 'pytorch_to_returnn.import_wrapper._torch_traced.torch.autograd.grad_mode.no_grad'> | |
*** WrappedClass pytorch_to_returnn.import_wrapper._torch_traced.torch.no_grad(...) | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.models.melgan.MelGANGenerator(...)(...) | |
**** torch tensor func __hash__ | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.container.Sequential(...)(...) | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.padding.ReflectionPad1d(...)(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.pad' -> <function _pad at 0x1552f08c0> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.pad(...) | |
**** torch tensor func _pad | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.conv.Conv1d(...)(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.conv1d' -> <class 'pytorch_to_returnn.import_wrapper.base_wrappers.function._VariableFunctionsClass.conv1d'> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.conv1d(...) | |
**** torch tensor func conv1d | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.activation.LeakyReLU(...)(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.leaky_relu' -> <function leaky_relu at 0x1552f0b90> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.leaky_relu(...) | |
**** torch tensor func leaky_relu | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.conv.ConvTranspose1d(...)(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.conv_transpose1d' -> <class 'pytorch_to_returnn.import_wrapper.base_wrappers.function._VariableFunctionsClass.conv_transpose1d'> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.functional.conv_transpose1d(...) | |
**** torch tensor func conv_transpose1d | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.parallel_wavegan.layers.residual_stack.ResidualStack(...)(...) | |
**** torch tensor func add | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.activation.Tanh(...)(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.tanh' -> <class 'pytorch_to_returnn.import_wrapper.base_wrappers.function._VariableFunctionsClass.tanh'> | |
*** func call pytorch_to_returnn.import_wrapper._torch_traced.torch.tanh(...) | |
**** torch tensor func tanh | |
**** torch tensor func mul | |
*** torch module call pytorch_to_returnn.import_wrapper._torch_traced.torch.nn.modules.padding.ConstantPad1d(...)(...) | |
*** indirect getattr 'pytorch_to_returnn.import_wrapper._torch_traced.torch.no_grad(...).prev' | |
**** torch tensor func cpu | |
**** torch tensor func numpy | |
>>>> Module naming hierarchy: | |
.tmp_root: (hidden) | |
melgan: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry ReflectionPad1d((3, 3))> -> ... | |
layer1: <ModuleEntry Conv1d(80, 384, kernel_size=(7,), stride=(1,))> -> ... | |
layer2: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer3: <ModuleEntry ConvTranspose1d(384, 192, kernel_size=(10,), stride=(5,), padding=(3,), output_padding=(1,))> -> ... | |
layer4: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((1, 1))> -> ... | |
layer2: <ModuleEntry Conv1d(192, 192, kernel_size=(3,), stride=(1,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(192, 192, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(192, 192, kernel_size=(1,), stride=(1,))> -> ... | |
layer5: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((3, 3))> -> ... | |
layer2: <ModuleEntry Conv1d(192, 192, kernel_size=(3,), stride=(1,), dilation=(3,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(192, 192, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(192, 192, kernel_size=(1,), stride=(1,))> -> ... | |
layer6: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((9, 9))> -> ... | |
layer2: <ModuleEntry Conv1d(192, 192, kernel_size=(3,), stride=(1,), dilation=(9,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(192, 192, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(192, 192, kernel_size=(1,), stride=(1,))> -> ... | |
layer7: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((27, 27))> -> ... | |
layer2: <ModuleEntry Conv1d(192, 192, kernel_size=(3,), stride=(1,), dilation=(27,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(192, 192, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(192, 192, kernel_size=(1,), stride=(1,))> -> ... | |
layer8: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer9: <ModuleEntry ConvTranspose1d(192, 96, kernel_size=(10,), stride=(5,), padding=(3,), output_padding=(1,))> -> ... | |
layer10: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((1, 1))> -> ... | |
layer2: <ModuleEntry Conv1d(96, 96, kernel_size=(3,), stride=(1,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(96, 96, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(96, 96, kernel_size=(1,), stride=(1,))> -> ... | |
layer11: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((3, 3))> -> ... | |
layer2: <ModuleEntry Conv1d(96, 96, kernel_size=(3,), stride=(1,), dilation=(3,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(96, 96, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(96, 96, kernel_size=(1,), stride=(1,))> -> ... | |
layer12: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((9, 9))> -> ... | |
layer2: <ModuleEntry Conv1d(96, 96, kernel_size=(3,), stride=(1,), dilation=(9,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(96, 96, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(96, 96, kernel_size=(1,), stride=(1,))> -> ... | |
layer13: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((27, 27))> -> ... | |
layer2: <ModuleEntry Conv1d(96, 96, kernel_size=(3,), stride=(1,), dilation=(27,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(96, 96, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(96, 96, kernel_size=(1,), stride=(1,))> -> ... | |
layer14: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer15: <ModuleEntry ConvTranspose1d(96, 48, kernel_size=(4,), stride=(2,), padding=(1,))> -> ... | |
layer16: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((1, 1))> -> ... | |
layer2: <ModuleEntry Conv1d(48, 48, kernel_size=(3,), stride=(1,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(48, 48, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(48, 48, kernel_size=(1,), stride=(1,))> -> ... | |
layer17: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((3, 3))> -> ... | |
layer2: <ModuleEntry Conv1d(48, 48, kernel_size=(3,), stride=(1,), dilation=(3,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(48, 48, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(48, 48, kernel_size=(1,), stride=(1,))> -> ... | |
layer18: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((9, 9))> -> ... | |
layer2: <ModuleEntry Conv1d(48, 48, kernel_size=(3,), stride=(1,), dilation=(9,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(48, 48, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(48, 48, kernel_size=(1,), stride=(1,))> -> ... | |
layer19: <ModuleEntry ResidualStack(...)> -> ... | |
stack: <ModuleEntry Sequential(...)> -> ... | |
layer0: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer1: <ModuleEntry ReflectionPad1d((27, 27))> -> ... | |
layer2: <ModuleEntry Conv1d(48, 48, kernel_size=(3,), stride=(1,), dilation=(27,))> -> ... | |
layer3: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer4: <ModuleEntry Conv1d(48, 48, kernel_size=(1,), stride=(1,))> -> ... | |
skip_layer: <ModuleEntry Conv1d(48, 48, kernel_size=(1,), stride=(1,))> -> ... | |
layer20: <ModuleEntry LeakyReLU(negative_slope=0.2)> -> ... | |
layer21: <ModuleEntry ReflectionPad1d((3, 3))> -> ... | |
layer22: <ModuleEntry Conv1d(48, 4, kernel_size=(7,), stride=(1,))> -> ... | |
layer23: <ModuleEntry Tanh()> -> ... | |
pad_fn: <ModuleEntry ConstantPad1d(padding=(31, 31), value=0.0)> -> ... | |
>>>> Root module calls: | |
{ | |
'melgan': <CallEntry #1 <ModuleEntry Sequential(...)>>, | |
'pad_fn': <CallEntry #0 <ModuleEntry ConstantPad1d(padding=(31, 31), value=0.0)>> | |
} | |
>>>> Modules with params: | |
{ | |
'melgan.layer1': Conv1d(80, 384, kernel_size=(7,), stride=(1,)), | |
'melgan.layer3': ConvTranspose1d(384, 192, kernel_size=(10,), stride=(5,), padding=(3,), output_padding=(1,)), | |
'melgan.layer4.stack.layer2': Conv1d(192, 192, kernel_size=(3,), stride=(1,)), | |
'melgan.layer4.stack.layer4': Conv1d(192, 192, kernel_size=(1,), stride=(1,)), | |
'melgan.layer4.skip_layer': Conv1d(192, 192, kernel_size=(1,), stride=(1,)), | |
'melgan.layer5.stack.layer2': Conv1d(192, 192, kernel_size=(3,), stride=(1,), dilation=(3,)), | |
'melgan.layer5.stack.layer4': Conv1d(192, 192, kernel_size=(1,), stride=(1,)), | |
'melgan.layer5.skip_layer': Conv1d(192, 192, kernel_size=(1,), stride=(1,)), | |
'melgan.layer6.stack.layer2': Conv1d(192, 192, kernel_size=(3,), stride=(1,), dilation=(9,)), | |
'melgan.layer6.stack.layer4': Conv1d(192, 192, kernel_size=(1,), stride=(1,)), | |
'melgan.layer6.skip_layer': Conv1d(192, 192, kernel_size=(1,), stride=(1,)), | |
'melgan.layer7.stack.layer2': Conv1d(192, 192, kernel_size=(3,), stride=(1,), dilation=(27,)), | |
'melgan.layer7.stack.layer4': Conv1d(192, 192, kernel_size=(1,), stride=(1,)), | |
'melgan.layer7.skip_layer': Conv1d(192, 192, kernel_size=(1,), stride=(1,)), | |
'melgan.layer9': ConvTranspose1d(192, 96, kernel_size=(10,), stride=(5,), padding=(3,), output_padding=(1,)), | |
'melgan.layer10.stack.layer2': Conv1d(96, 96, kernel_size=(3,), stride=(1,)), | |
'melgan.layer10.stack.layer4': Conv1d(96, 96, kernel_size=(1,), stride=(1,)), | |
'melgan.layer10.skip_layer': Conv1d(96, 96, kernel_size=(1,), stride=(1,)), | |
'melgan.layer11.stack.layer2': Conv1d(96, 96, kernel_size=(3,), stride=(1,), dilation=(3,)), | |
'melgan.layer11.stack.layer4': Conv1d(96, 96, kernel_size=(1,), stride=(1,)), | |
'melgan.layer11.skip_layer': Conv1d(96, 96, kernel_size=(1,), stride=(1,)), | |
'melgan.layer12.stack.layer2': Conv1d(96, 96, kernel_size=(3,), stride=(1,), dilation=(9,)), | |
'melgan.layer12.stack.layer4': Conv1d(96, 96, kernel_size=(1,), stride=(1,)), | |
'melgan.layer12.skip_layer': Conv1d(96, 96, kernel_size=(1,), stride=(1,)), | |
'melgan.layer13.stack.layer2': Conv1d(96, 96, kernel_size=(3,), stride=(1,), dilation=(27,)), | |
'melgan.layer13.stack.layer4': Conv1d(96, 96, kernel_size=(1,), stride=(1,)), | |
'melgan.layer13.skip_layer': Conv1d(96, 96, kernel_size=(1,), stride=(1,)), | |
'melgan.layer15': ConvTranspose1d(96, 48, kernel_size=(4,), stride=(2,), padding=(1,)), | |
'melgan.layer16.stack.layer2': Conv1d(48, 48, kernel_size=(3,), stride=(1,)), | |
'melgan.layer16.stack.layer4': Conv1d(48, 48, kernel_size=(1,), stride=(1,)), | |
'melgan.layer16.skip_layer': Conv1d(48, 48, kernel_size=(1,), stride=(1,)), | |
'melgan.layer17.stack.layer2': Conv1d(48, 48, kernel_size=(3,), stride=(1,), dilation=(3,)), | |
'melgan.layer17.stack.layer4': Conv1d(48, 48, kernel_size=(1,), stride=(1,)), | |
'melgan.layer17.skip_layer': Conv1d(48, 48, kernel_size=(1,), stride=(1,)), | |
'melgan.layer18.stack.layer2': Conv1d(48, 48, kernel_size=(3,), stride=(1,), dilation=(9,)), | |
'melgan.layer18.stack.layer4': Conv1d(48, 48, kernel_size=(1,), stride=(1,)), | |
'melgan.layer18.skip_layer': Conv1d(48, 48, kernel_size=(1,), stride=(1,)), | |
'melgan.layer19.stack.layer2': Conv1d(48, 48, kernel_size=(3,), stride=(1,), dilation=(27,)), | |
'melgan.layer19.stack.layer4': Conv1d(48, 48, kernel_size=(1,), stride=(1,)), | |
'melgan.layer19.skip_layer': Conv1d(48, 48, kernel_size=(1,), stride=(1,)), | |
'melgan.layer22': Conv1d(48, 4, kernel_size=(7,), stride=(1,)) | |
} | |
>>>> Looks good! | |
>>> Running with wrapped Torch import, wrapping replacement for PyTorch... | |
*** register sys.meta_path for ctx <WrapCtx 'pytorch_to_returnn.import_wrapper._torch_returnn'> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan> | |
*** <WrapCtx 'pytorch_to_returnn.import_wrapper._torch_returnn'> extend by mod 'parallel_wavegan' | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.models> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.models.melgan> | |
WARNING:tensorflow:From /Users/az/Programmierung/import-parallel-wavegan/returnn/tf/network.py:352: calling Zeros.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version. | |
Instructions for updating: | |
Call initializer instance with the dtype argument instead of passing it to the constructor | |
WARNING:tensorflow:From /Users/az/Library/Python/3.7/lib/python/site-packages/tensorflow/python/ops/resource_variable_ops.py:1666: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version. | |
Instructions for updating: | |
If using Keras pass *_constraint arguments to layers. | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.layers> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.layers.causal_conv> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.layers.pqmf> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.layers.residual_block> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.layers.residual_stack> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.layers.upsample> | |
*** exec mod <WrappedSourceModule pytorch_to_returnn.import_wrapper._torch_returnn.parallel_wavegan.models.parallel_wavegan> | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (384, 80, 7)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_weight' VariableLayer output: [384,80,F|7] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer1_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_Norm' MathNormLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (384, 80, 7)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_weight_v' VariableLayer output: [384,80,F|7] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer1_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_Norm_1' MathNormLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (384, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_weight_g' VariableLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer1_weight_g', 'layer1_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_truediv' CombineLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer1_weight_v', 'layer1_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_mul' CombineLayer output: [384,80,F|7] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (384, 192, 10)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_weight' VariableLayer output: [384,192,F|10] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer3_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_Norm' MathNormLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (384, 192, 10)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_weight_v' VariableLayer output: [384,192,F|10] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer3_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_Norm_1' MathNormLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (384, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_weight_g' VariableLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer3_weight_g', 'layer3_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_truediv' CombineLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer3_weight_v', 'layer3_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_mul' CombineLayer output: [384,192,F|10] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_weight' VariableLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_mul' CombineLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_weight' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_mul' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_weight' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_weight_v' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_mul' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_weight' VariableLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_mul' CombineLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_weight' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_mul' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_weight' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_weight_v' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_mul' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_weight' VariableLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_mul' CombineLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_weight' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_mul' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_weight' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_weight_v' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_mul' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_weight' VariableLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_mul' CombineLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_weight' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_mul' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_weight' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 192, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_weight_v' VariableLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_mul' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 96, 10)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_weight' VariableLayer output: [192,96,F|10] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer9_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_Norm' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 96, 10)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_weight_v' VariableLayer output: [192,96,F|10] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer9_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_Norm_1' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (192, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_weight_g' VariableLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer9_weight_g', 'layer9_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_truediv' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer9_weight_v', 'layer9_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_mul' CombineLayer output: [192,96,F|10] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_weight' VariableLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_mul' CombineLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_weight' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_mul' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_weight' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_weight_v' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_mul' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_weight' VariableLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_mul' CombineLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_weight' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_mul' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_weight' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_weight_v' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_mul' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_weight' VariableLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_mul' CombineLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_weight' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_mul' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_weight' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_weight_v' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_mul' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_weight' VariableLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_mul' CombineLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_weight' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_mul' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_weight' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 96, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_weight_v' VariableLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_mul' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 48, 4)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_weight' VariableLayer output: [96,48,F|4] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer15_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_Norm' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 48, 4)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_weight_v' VariableLayer output: [96,48,F|4] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer15_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_Norm_1' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (96, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_weight_g' VariableLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer15_weight_g', 'layer15_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_truediv' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer15_weight_v', 'layer15_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_mul' CombineLayer output: [96,48,F|4] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_weight' VariableLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_mul' CombineLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_weight' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_mul' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_weight' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_weight_v' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_mul' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_weight' VariableLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_mul' CombineLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_weight' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_mul' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_weight' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_weight_v' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_mul' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_weight' VariableLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_mul' CombineLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_weight' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_mul' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_weight' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_weight_v' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_mul' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_weight' VariableLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 3)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_weight_v' VariableLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_mul' CombineLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_weight' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_weight_v' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_mul' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_weight' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_Norm' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 48, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_weight_v' VariableLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_Norm_1' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (48, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_weight_g' VariableLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_truediv' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_mul' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_weight' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (4, 48, 7)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_weight' VariableLayer output: [4,48,F|7] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_Norm' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer22_weight'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_Norm' MathNormLayer output: [4,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_weight_v' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (4, 48, 7)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_weight_v' VariableLayer output: [4,48,F|7] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_Norm_1' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer22_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_Norm_1' MathNormLayer output: [4,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_weight_g' layer dict: {'class': 'variable', 'add_batch_axis': False, 'shape': (4, 1, 1)} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_weight_g' VariableLayer output: [4,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_truediv' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer22_weight_g', 'layer22_Norm_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_truediv' CombineLayer output: [4,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer22_weight_v', 'layer22_truediv']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_mul' CombineLayer output: [4,48,F|7] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer1_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_Norm_2' MathNormLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer1_weight_g', 'layer1_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_truediv_1' CombineLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer1_weight_v', 'layer1_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer1_mul_1' CombineLayer output: [384,80,F|7] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer3_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_Norm_2' MathNormLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer3_weight_g', 'layer3_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_truediv_1' CombineLayer output: [384,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer3_weight_v', 'layer3_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer3_mul_1' CombineLayer output: [384,192,F|10] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer4:subnet/'skip_layer_mul_1' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer5:subnet/'skip_layer_mul_1' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer6:subnet/'skip_layer_mul_1' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [192,192,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer7:subnet/'skip_layer_mul_1' CombineLayer output: [192,192,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer9_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_Norm_2' MathNormLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer9_weight_g', 'layer9_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_truediv_1' CombineLayer output: [192,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer9_weight_v', 'layer9_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer9_mul_1' CombineLayer output: [192,96,F|10] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer10:subnet/'skip_layer_mul_1' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer11:subnet/'skip_layer_mul_1' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer12:subnet/'skip_layer_mul_1' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [96,96,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer13:subnet/'skip_layer_mul_1' CombineLayer output: [96,96,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer15_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_Norm_2' MathNormLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer15_weight_g', 'layer15_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_truediv_1' CombineLayer output: [96,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer15_weight_v', 'layer15_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer15_mul_1' CombineLayer output: [96,48,F|4] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer16:subnet/'skip_layer_mul_1' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer17:subnet/'skip_layer_mul_1' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer18:subnet/'skip_layer_mul_1' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer2_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer2_weight_g', 'layer2_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer2_weight_v', 'layer2_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer2_mul_1' CombineLayer output: [48,48,F|3] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer4_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer4_weight_g', 'layer4_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer4_weight_v', 'layer4_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/stack:subnet/'layer4_mul_1' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'skip_layer_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_Norm_2' MathNormLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['skip_layer_weight_g', 'skip_layer_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_truediv_1' CombineLayer output: [48,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['skip_layer_weight_v', 'skip_layer_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/layer19:subnet/'skip_layer_mul_1' CombineLayer output: [48,48,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_Norm_2' layer dict: {'class': 'math_norm', 'p': 2, 'keep_dims': True, 'axes': ['static:1', 'F'], 'from': 'layer22_weight_v'} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_Norm_2' MathNormLayer output: [4,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_truediv_1' layer dict: {'class': 'combine', 'kind': 'truediv', 'from': ['layer22_weight_g', 'layer22_Norm_2']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_truediv_1' CombineLayer output: [4,1,F|1] | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_mul_1' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['layer22_weight_v', 'layer22_truediv_1']} | |
*** root/.tmp_root:subnet/melgan:subnet/'layer22_mul_1' CombineLayer output: [4,48,F|7] | |
*** root/.tmp_root:subnet/Cast_PQMF:subnet/'Cast_PQMF_unnamed_const' layer dict: {'class': 'constant', 'value': array([[ 6.84837762e-06, -8.61604274e-20, -1.57372949e-05, | |
1.11185474e-05, 1.61987245e-04, 4.59894990e-04, | |
7.72398151e-04, 8.38639994e-04, 4.98372346e-04, | |
-1.10215669e-18, 7.11952089e-05, 1.49111563e-03, | |
4.30004690e-03, 7.27269441e-03, 8.37646608e-03, | |
6.31989583e-03, 2.24644065e-03, 1.26586195e-19, | |
3.99743896e-03, 1.55457166e-02, 3.02382909e-02, | |
3.92091472e-02, 3.47761063e-02, 1.74983894e-02, | |
-5.26998738e-04, 2.48157347e-18, 3.44539554e-02, | |
1.02453039e-01, 1.83332388e-01, 2.43451169e-01, | |
2.52647189e-01, 2.00818326e-01, 1.04649892e-01, | |
1.49070848e-17, -7.59387617e-02, -1.02453039e-01, | |
-8.31792064e-02, -4.05271704e-02, -1.27228750e-03, | |
1.74983894e-02, 1.44047349e-02, 7.20260350e-18, | |
-1.25251102e-02, -1.55457166e-02, -9.65067136e-03, | |
-6.89103149e-04, 5.42338747e-03, 6.31989583e-03, | |
3.46964585e-03, 2.22662048e-18, -1.78113774e-03, | |
-1.49111563e-03, -1.71880439e-04, 9.22816437e-04, | |
1.20317728e-03, 8.38639994e-04, 3.19937790e-04, | |
-6.19814487e-19, -6.70973139e-05, -1.11185474e-05, | |
3.79931908e-05, 3.90753211e-05, 1.65334461e-05], | |
[ 1.65334461e-05, -1.24443201e-19, -3.79931908e-05, | |
1.11185474e-05, -6.70973139e-05, -4.59894990e-04, | |
-3.19937790e-04, 8.38639994e-04, 1.20317728e-03, | |
6.78645723e-19, 1.71880439e-04, 1.49111563e-03, | |
-1.78113774e-03, -7.27269441e-03, -3.46964585e-03, | |
6.31989583e-03, 5.42338747e-03, -7.59943711e-19, | |
9.65067136e-03, 1.55457166e-02, -1.25251102e-02, | |
-3.92091472e-02, -1.44047349e-02, 1.74983894e-02, | |
-1.27228750e-03, 1.24078674e-17, 8.31792064e-02, | |
1.02453039e-01, -7.59387617e-02, -2.43451169e-01, | |
-1.04649892e-01, 2.00818326e-01, 2.52647189e-01, | |
1.49070848e-17, -1.83332388e-01, -1.02453039e-01, | |
3.44539554e-02, 4.05271704e-02, 5.26998738e-04, | |
1.74983894e-02, 3.47761063e-02, 8.64555117e-17, | |
-3.02382909e-02, -1.55457166e-02, 3.99743896e-03, | |
6.89103149e-04, -2.24644065e-03, 6.31989583e-03, | |
8.37646608e-03, 1.87081137e-17, -4.30004690e-03, | |
-1.49111563e-03, 7.11952089e-05, -9.22816437e-04, | |
-4.98372346e-04, 8.38639994e-04, 7.72398151e-04, | |
1.35198609e-18, -1.61987245e-04, -1.11185474e-05, | |
-1.57372949e-05, -3.90753211e-05, -6.84837762e-06], | |
[-1.65334461e-05, 1.91171999e-20, 3.79931908e-05, | |
1.11185474e-05, 6.70973139e-05, -4.59894990e-04, | |
3.19937790e-04, 8.38639994e-04, -1.20317728e-03, | |
-3.39208620e-18, -1.71880439e-04, 1.49111563e-03, | |
1.78113774e-03, -7.27269441e-03, 3.46964585e-03, | |
6.31989583e-03, -5.42338747e-03, -1.94141486e-18, | |
-9.65067136e-03, 1.55457166e-02, 1.25251102e-02, | |
-3.92091472e-02, 1.44047349e-02, 1.74983894e-02, | |
1.27228750e-03, -8.93617306e-17, -8.31792064e-02, | |
1.02453039e-01, 7.59387617e-02, -2.43451169e-01, | |
1.04649892e-01, 2.00818326e-01, -2.52647189e-01, | |
-4.47212543e-17, 1.83332388e-01, -1.02453039e-01, | |
-3.44539554e-02, 4.05271704e-02, -5.26998738e-04, | |
1.74983894e-02, -3.47761063e-02, 3.84381550e-17, | |
3.02382909e-02, -1.55457166e-02, -3.99743896e-03, | |
6.89103149e-04, 2.24644065e-03, 6.31989583e-03, | |
-8.37646608e-03, -2.31613547e-17, 4.30004690e-03, | |
-1.49111563e-03, -7.11952089e-05, -9.22816437e-04, | |
4.98372346e-04, 8.38639994e-04, -7.72398151e-04, | |
-3.38010756e-18, 1.61987245e-04, -1.11185474e-05, | |
1.57372949e-05, -3.90753211e-05, 6.84837762e-06], | |
[-6.84837762e-06, 5.73999735e-20, 1.57372949e-05, | |
1.11185474e-05, -1.61987245e-04, 4.59894990e-04, | |
-7.72398151e-04, 8.38639994e-04, -4.98372346e-04, | |
3.84299347e-18, -7.11952089e-05, 1.49111563e-03, | |
-4.30004690e-03, 7.27269441e-03, -8.37646608e-03, | |
6.31989583e-03, -2.24644065e-03, -4.72716424e-18, | |
-3.99743896e-03, 1.55457166e-02, -3.02382909e-02, | |
3.92091472e-02, -3.47761063e-02, 1.74983894e-02, | |
5.26998738e-04, 4.46934081e-17, -3.44539554e-02, | |
1.02453039e-01, -1.83332388e-01, 2.43451169e-01, | |
-2.52647189e-01, 2.00818326e-01, -1.04649892e-01, | |
-4.47212543e-17, 7.59387617e-02, -1.02453039e-01, | |
8.31792064e-02, -4.05271704e-02, 1.27228750e-03, | |
1.74983894e-02, -1.44047349e-02, -1.10464190e-16, | |
1.25251102e-02, -1.55457166e-02, 9.65067136e-03, | |
-6.89103149e-04, -5.42338747e-03, 6.31989583e-03, | |
-3.46964585e-03, 2.49516544e-17, 1.78113774e-03, | |
-1.49111563e-03, 1.71880439e-04, 9.22816437e-04, | |
-1.20317728e-03, 8.38639994e-04, -3.19937790e-04, | |
-4.50282476e-19, 6.70973139e-05, -1.11185474e-05, | |
-3.79931908e-05, 3.90753211e-05, -1.65334461e-05]])} | |
*** root/.tmp_root:subnet/Cast_PQMF:subnet/'Cast_PQMF_unnamed_const' ConstantLayer output: [4,F|63] | |
*** root/.tmp_root:subnet/Cast_PQMF:subnet/'Cast' layer dict: {'class': 'cast', 'from': 'Cast_PQMF_unnamed_const', 'dtype': 'float32'} | |
*** root/.tmp_root:subnet/Cast_PQMF:subnet/'Cast' CastLayer output: [4,F|63] | |
*** root/.tmp_root:subnet/Cast_PQMF:subnet/'Cast_PQMF_unnamed_const_1' layer dict: {'class': 'constant', 'value': array([[ 1.65334461e-05, 3.90753211e-05, 3.79931908e-05, | |
-1.11185474e-05, -6.70973139e-05, -6.19814487e-19, | |
3.19937790e-04, 8.38639994e-04, 1.20317728e-03, | |
9.22816437e-04, -1.71880439e-04, -1.49111563e-03, | |
-1.78113774e-03, 2.22662048e-18, 3.46964585e-03, | |
6.31989583e-03, 5.42338747e-03, -6.89103149e-04, | |
-9.65067136e-03, -1.55457166e-02, -1.25251102e-02, | |
7.20260350e-18, 1.44047349e-02, 1.74983894e-02, | |
-1.27228750e-03, -4.05271704e-02, -8.31792064e-02, | |
-1.02453039e-01, -7.59387617e-02, 1.49070848e-17, | |
1.04649892e-01, 2.00818326e-01, 2.52647189e-01, | |
2.43451169e-01, 1.83332388e-01, 1.02453039e-01, | |
3.44539554e-02, 2.48157347e-18, -5.26998738e-04, | |
1.74983894e-02, 3.47761063e-02, 3.92091472e-02, | |
3.02382909e-02, 1.55457166e-02, 3.99743896e-03, | |
1.26586195e-19, 2.24644065e-03, 6.31989583e-03, | |
8.37646608e-03, 7.27269441e-03, 4.30004690e-03, | |
1.49111563e-03, 7.11952089e-05, -1.10215669e-18, | |
4.98372346e-04, 8.38639994e-04, 7.72398151e-04, | |
4.59894990e-04, 1.61987245e-04, 1.11185474e-05, | |
-1.57372949e-05, -8.61604274e-20, 6.84837762e-06], | |
[-6.84837762e-06, -3.90753211e-05, -1.57372949e-05, | |
-1.11185474e-05, -1.61987245e-04, 1.35198609e-18, | |
7.72398151e-04, 8.38639994e-04, -4.98372346e-04, | |
-9.22816437e-04, 7.11952089e-05, -1.49111563e-03, | |
-4.30004690e-03, 1.87081137e-17, 8.37646608e-03, | |
6.31989583e-03, -2.24644065e-03, 6.89103149e-04, | |
3.99743896e-03, -1.55457166e-02, -3.02382909e-02, | |
8.64555117e-17, 3.47761063e-02, 1.74983894e-02, | |
5.26998738e-04, 4.05271704e-02, 3.44539554e-02, | |
-1.02453039e-01, -1.83332388e-01, 1.49070848e-17, | |
2.52647189e-01, 2.00818326e-01, -1.04649892e-01, | |
-2.43451169e-01, -7.59387617e-02, 1.02453039e-01, | |
8.31792064e-02, 1.24078674e-17, -1.27228750e-03, | |
1.74983894e-02, -1.44047349e-02, -3.92091472e-02, | |
-1.25251102e-02, 1.55457166e-02, 9.65067136e-03, | |
-7.59943711e-19, 5.42338747e-03, 6.31989583e-03, | |
-3.46964585e-03, -7.27269441e-03, -1.78113774e-03, | |
1.49111563e-03, 1.71880439e-04, 6.78645723e-19, | |
1.20317728e-03, 8.38639994e-04, -3.19937790e-04, | |
-4.59894990e-04, -6.70973139e-05, 1.11185474e-05, | |
-3.79931908e-05, -1.24443201e-19, 1.65334461e-05], | |
[ 6.84837762e-06, -3.90753211e-05, 1.57372949e-05, | |
-1.11185474e-05, 1.61987245e-04, -3.38010756e-18, | |
-7.72398151e-04, 8.38639994e-04, 4.98372346e-04, | |
-9.22816437e-04, -7.11952089e-05, -1.49111563e-03, | |
4.30004690e-03, -2.31613547e-17, -8.37646608e-03, | |
6.31989583e-03, 2.24644065e-03, 6.89103149e-04, | |
-3.99743896e-03, -1.55457166e-02, 3.02382909e-02, | |
3.84381550e-17, -3.47761063e-02, 1.74983894e-02, | |
-5.26998738e-04, 4.05271704e-02, -3.44539554e-02, | |
-1.02453039e-01, 1.83332388e-01, -4.47212543e-17, | |
-2.52647189e-01, 2.00818326e-01, 1.04649892e-01, | |
-2.43451169e-01, 7.59387617e-02, 1.02453039e-01, | |
-8.31792064e-02, -8.93617306e-17, 1.27228750e-03, | |
1.74983894e-02, 1.44047349e-02, -3.92091472e-02, | |
1.25251102e-02, 1.55457166e-02, -9.65067136e-03, | |
-1.94141486e-18, -5.42338747e-03, 6.31989583e-03, | |
3.46964585e-03, -7.27269441e-03, 1.78113774e-03, | |
1.49111563e-03, -1.71880439e-04, -3.39208620e-18, | |
-1.20317728e-03, 8.38639994e-04, 3.19937790e-04, | |
-4.59894990e-04, 6.70973139e-05, 1.11185474e-05, | |
3.79931908e-05, 1.91171999e-20, -1.65334461e-05], | |
[-1.65334461e-05, 3.90753211e-05, -3.79931908e-05, | |
-1.11185474e-05, 6.70973139e-05, -4.50282476e-19, | |
-3.19937790e-04, 8.38639994e-04, -1.20317728e-03, | |
9.22816437e-04, 1.71880439e-04, -1.49111563e-03, | |
1.78113774e-03, 2.49516544e-17, -3.46964585e-03, | |
6.31989583e-03, -5.42338747e-03, -6.89103149e-04, | |
9.65067136e-03, -1.55457166e-02, 1.25251102e-02, | |
-1.10464190e-16, -1.44047349e-02, 1.74983894e-02, | |
1.27228750e-03, -4.05271704e-02, 8.31792064e-02, | |
-1.02453039e-01, 7.59387617e-02, -4.47212543e-17, | |
-1.04649892e-01, 2.00818326e-01, -2.52647189e-01, | |
2.43451169e-01, -1.83332388e-01, 1.02453039e-01, | |
-3.44539554e-02, 4.46934081e-17, 5.26998738e-04, | |
1.74983894e-02, -3.47761063e-02, 3.92091472e-02, | |
-3.02382909e-02, 1.55457166e-02, -3.99743896e-03, | |
-4.72716424e-18, -2.24644065e-03, 6.31989583e-03, | |
-8.37646608e-03, 7.27269441e-03, -4.30004690e-03, | |
1.49111563e-03, -7.11952089e-05, 3.84299347e-18, | |
-4.98372346e-04, 8.38639994e-04, -7.72398151e-04, | |
4.59894990e-04, -1.61987245e-04, 1.11185474e-05, | |
1.57372949e-05, 5.73999735e-20, -6.84837762e-06]])} | |
*** root/.tmp_root:subnet/Cast_PQMF:subnet/'Cast_PQMF_unnamed_const_1' ConstantLayer output: [4,F|63] | |
*** root/.tmp_root:subnet/Cast_PQMF:subnet/'Cast_1' layer dict: {'class': 'cast', 'from': 'Cast_PQMF_unnamed_const_1', 'dtype': 'float32'} | |
*** root/.tmp_root:subnet/Cast_PQMF:subnet/'Cast_1' CastLayer output: [4,F|63] | |
*** root/melgan:subnet/'layer0' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'data'} | |
*** root/melgan:subnet/'layer0' PadLayer output: [B,F|80,T|'spatial:1:melgan/layer0'] | |
*** root/melgan:subnet/'layer0' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
**** torch tensor func detach | |
*** root/melgan:subnet/'layer1' layer dict: {'class': 'conv', 'from': 'layer0', 'activation': None, 'with_bias': True, 'n_out': 384, 'filter_size': (7,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/'layer1' ConvLayer output: [B,T|'time:var:extern_data:data',F|384] | |
*** root/melgan:subnet/'layer1' ConvLayer importing params ['bias', 'weight'] ...) | |
WARNING:tensorflow:From /Users/az/Programmierung/import-parallel-wavegan/pytorch_to_returnn/torch/nn/modules/conv.py:103: Variable.load (from tensorflow.python.ops.variables) is deprecated and will be removed in a future version. | |
Instructions for updating: | |
Prefer Variable.assign which has equivalent behavior in 2.X. | |
*** root/melgan:subnet/'layer1' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer2' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer1'} | |
*** root/melgan:subnet/'layer2' EvalLayer output: [B,T|'time:var:extern_data:data',F|384] | |
*** root/melgan:subnet/'layer2' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer3' layer dict: {'class': 'transposed_conv', 'from': 'layer2', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (10,), 'strides': (5,), 'padding': 'valid', 'output_padding': (1,), 'remove_padding': (3,)} | |
*** root/melgan:subnet/'layer3' TransposedConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer3' TransposedConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/'layer3' TransposedConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (1, 1), 'from': 'layer0'} | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer4/stack/layer1',F|192] | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer4:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer4:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer4:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer4:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer4:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer4:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer4:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer4:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer4:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer4:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer4:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer4' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer4' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'layer0'} | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer5/stack/layer1',F|192] | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (3,)} | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer5:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer5:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer5:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer5:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer5:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer5:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer5:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer5:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer5:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer5:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer5:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer5' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer5' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (9, 9), 'from': 'layer0'} | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer6/stack/layer1',F|192] | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (9,)} | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer6:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer6:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer6:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer6:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer6:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer6:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer6:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer6:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer6:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer6:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer6:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer6' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer6' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (27, 27), 'from': 'layer0'} | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer7/stack/layer1',F|192] | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (27,)} | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer7:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer7:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer7:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer7:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 192, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer7:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer7:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer7:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer7:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer7:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/layer7:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer7:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer7' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer7' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer8' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer7'} | |
*** root/melgan:subnet/'layer8' EvalLayer output: [B,T|'spatial:0:melgan/layer3',F|192] | |
*** root/melgan:subnet/'layer8' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer9' layer dict: {'class': 'transposed_conv', 'from': 'layer8', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (10,), 'strides': (5,), 'padding': 'valid', 'output_padding': (1,), 'remove_padding': (3,)} | |
*** root/melgan:subnet/'layer9' TransposedConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer9' TransposedConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/'layer9' TransposedConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (1, 1), 'from': 'layer0'} | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer10/stack/layer1',F|96] | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer10:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer10:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer10:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer10:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer10:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer10:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer10:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer10:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer10:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer10:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer10:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer10' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer10' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'layer0'} | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer11/stack/layer1',F|96] | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (3,)} | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer11:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer11:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer11:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer11:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer11:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer11:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer11:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer11:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer11:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer11:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer11:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer11' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer11' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (9, 9), 'from': 'layer0'} | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer12/stack/layer1',F|96] | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (9,)} | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer12:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer12:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer12:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer12:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer12:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer12:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer12:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer12:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer12:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer12:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer12:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer12' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer12' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (27, 27), 'from': 'layer0'} | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer13/stack/layer1',F|96] | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (27,)} | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer13:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer13:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer13:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer13:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 96, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer13:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer13:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer13:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer13:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer13:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/layer13:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer13:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer13' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer13' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer14' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer13'} | |
*** root/melgan:subnet/'layer14' EvalLayer output: [B,T|'spatial:0:melgan/layer9',F|96] | |
*** root/melgan:subnet/'layer14' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer15' layer dict: {'class': 'transposed_conv', 'from': 'layer14', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (4,), 'strides': (2,), 'padding': 'valid', 'output_padding': (0,), 'remove_padding': (1,)} | |
*** root/melgan:subnet/'layer15' TransposedConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer15' TransposedConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/'layer15' TransposedConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (1, 1), 'from': 'layer0'} | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer16/stack/layer1',F|48] | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer16:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer16:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer16:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer16:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer16:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer16:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer16:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer16:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer16:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer16:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer16:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer16' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer16' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'layer0'} | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer17/stack/layer1',F|48] | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (3,)} | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer17:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer17:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer17:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer17:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer17:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer17:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer17:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer17:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer17:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer17:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer17:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer17' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer17' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (9, 9), 'from': 'layer0'} | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer18/stack/layer1',F|48] | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (9,)} | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer18:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer18:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer18:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer18:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer18:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer18:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer18:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer18:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer18:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer18:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer18:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer18' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer18' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer0' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'} | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer0' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer0' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer1' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (27, 27), 'from': 'layer0'} | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer1' PadLayer output: [B,T|'spatial:0:melgan/layer19/stack/layer1',F|48] | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer1' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer2' layer dict: {'class': 'conv', 'from': 'layer1', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (3,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (27,)} | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer2' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer2' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer2' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer3' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'} | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer3' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer3' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer4' layer dict: {'class': 'conv', 'from': 'layer3', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer4' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer4' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'layer4' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer4'} | |
*** root/melgan:subnet/layer19:subnet/stack:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer19:subnet/'stack' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer19:subnet/'stack' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer19:subnet/'skip_layer' layer dict: {'class': 'conv', 'from': 'data', 'activation': None, 'with_bias': True, 'n_out': 48, 'filter_size': (1,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/layer19:subnet/'skip_layer' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer19:subnet/'skip_layer' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/layer19:subnet/'skip_layer' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/layer19:subnet/'add' layer dict: {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']} | |
*** root/melgan:subnet/layer19:subnet/'add' CombineLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/layer19:subnet/'output' layer dict: {'class': 'copy', 'from': 'add'} | |
*** root/melgan:subnet/layer19:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer19' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer19' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer20' layer dict: {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer19'} | |
*** root/melgan:subnet/'layer20' EvalLayer output: [B,T|'spatial:0:melgan/layer15',F|48] | |
*** root/melgan:subnet/'layer20' EvalLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer21' layer dict: {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'layer20'} | |
*** root/melgan:subnet/'layer21' PadLayer output: [B,T|'spatial:0:melgan/layer21',F|48] | |
*** root/melgan:subnet/'layer21' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer22' layer dict: {'class': 'conv', 'from': 'layer21', 'activation': None, 'with_bias': True, 'n_out': 4, 'filter_size': (7,), 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/melgan:subnet/'layer22' ConvLayer output: [B,T|'spatial:0:melgan/layer15',F|4] | |
*** root/melgan:subnet/'layer22' ConvLayer importing params ['bias', 'weight'] ...) | |
*** root/melgan:subnet/'layer22' ConvLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'layer23' layer dict: {'class': 'activation', 'activation': 'tanh', 'from': 'layer22'} | |
*** root/melgan:subnet/'layer23' ActivationLayer output: [B,T|'spatial:0:melgan/layer15',F|4] | |
*** root/melgan:subnet/'layer23' ActivationLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/melgan:subnet/'output' layer dict: {'class': 'copy', 'from': 'layer23'} | |
*** root/melgan:subnet/'output' CopyLayer output: [B,T|'spatial:0:melgan/layer15',F|4] | |
*** root/'melgan' SubnetworkLayer output: [B,T|'spatial:0:melgan/layer15',F|4] | |
*** root/'melgan' SubnetworkLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/'PQMF_Cast_unnamed_const' layer dict: {'class': 'constant', 'value': array(4, dtype=int32)} | |
*** root/'PQMF_Cast_unnamed_const' ConstantLayer output: [] | |
*** root/'PQMF_Cast' layer dict: {'class': 'cast', 'from': 'PQMF_Cast_unnamed_const', 'dtype': 'float32'} | |
*** root/'PQMF_Cast' CastLayer output: [] | |
*** root/'PQMF_updown_filter' layer dict: {'class': 'constant', 'value': array([[[1., 0., 0., 0.], | |
[0., 0., 0., 0.], | |
[0., 0., 0., 0.], | |
[0., 0., 0., 0.]], | |
[[0., 0., 0., 0.], | |
[1., 0., 0., 0.], | |
[0., 0., 0., 0.], | |
[0., 0., 0., 0.]], | |
[[0., 0., 0., 0.], | |
[0., 0., 0., 0.], | |
[1., 0., 0., 0.], | |
[0., 0., 0., 0.]], | |
[[0., 0., 0., 0.], | |
[0., 0., 0., 0.], | |
[0., 0., 0., 0.], | |
[1., 0., 0., 0.]]], dtype=float32)} | |
*** root/'PQMF_updown_filter' ConstantLayer output: [4,4,F|4] | |
*** root/'PQMF_mul' layer dict: {'class': 'combine', 'kind': 'mul', 'from': ['PQMF_updown_filter', 'PQMF_Cast']} | |
*** root/'PQMF_mul' CombineLayer output: [4,4,F|4] | |
*** root/'PQMF_FunctionalConvTransposed1d' layer dict: {'class': 'transposed_conv', 'from': 'melgan', 'n_out': 4, 'activation': None, 'with_bias': False, 'bias': None, 'filter_size': (4,), 'filter': 'PQMF_mul', 'filter_perm': {'static:0': 'F', 'static:1': 'static:1', 'F': 'static:0'}, 'padding': 'valid', 'output_padding': (0,), 'remove_padding': (0,), 'strides': (4,)} | |
*** root/'PQMF_FunctionalConvTransposed1d' TransposedConvLayer output: [B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|4] | |
*** root/'pad_fn' layer dict: {'class': 'pad', 'mode': 'constant', 'axes': 'spatial', 'padding': (31, 31), 'from': 'PQMF_FunctionalConvTransposed1d', 'value': 0.0} | |
*** root/'pad_fn' PadLayer output: [B,T|'spatial:0:pad_fn',F|4] | |
*** root/'pad_fn' PadLayer check RETURNN outputs given Torch inputs/outputs ... | |
*** root/'PQMF_synthesis_filter' layer dict: {'class': 'constant', 'value': array([[[ 1.65334459e-05, 3.90753194e-05, 3.79931917e-05, | |
-1.11185473e-05, -6.70973168e-05, -6.19814498e-19, | |
3.19937797e-04, 8.38639971e-04, 1.20317726e-03, | |
9.22816456e-04, -1.71880441e-04, -1.49111566e-03, | |
-1.78113778e-03, 2.22662049e-18, 3.46964574e-03, | |
6.31989585e-03, 5.42338751e-03, -6.89103152e-04, | |
-9.65067092e-03, -1.55457165e-02, -1.25251105e-02, | |
7.20260322e-18, 1.44047346e-02, 1.74983889e-02, | |
-1.27228745e-03, -4.05271687e-02, -8.31792057e-02, | |
-1.02453038e-01, -7.59387612e-02, 1.49070850e-17, | |
1.04649894e-01, 2.00818330e-01, 2.52647191e-01, | |
2.43451163e-01, 1.83332384e-01, 1.02453038e-01, | |
3.44539545e-02, 2.48157348e-18, -5.26998716e-04, | |
1.74983889e-02, 3.47761065e-02, 3.92091461e-02, | |
3.02382912e-02, 1.55457165e-02, 3.99743905e-03, | |
1.26586189e-19, 2.24644062e-03, 6.31989585e-03, | |
8.37646611e-03, 7.27269426e-03, 4.30004671e-03, | |
1.49111566e-03, 7.11952089e-05, -1.10215664e-18, | |
4.98372363e-04, 8.38639971e-04, 7.72398140e-04, | |
4.59895004e-04, 1.61987249e-04, 1.11185473e-05, | |
-1.57372942e-05, -8.61604251e-20, 6.84837778e-06], | |
[-6.84837778e-06, -3.90753194e-05, -1.57372942e-05, | |
-1.11185473e-05, -1.61987249e-04, 1.35198608e-18, | |
7.72398140e-04, 8.38639971e-04, -4.98372363e-04, | |
-9.22816456e-04, 7.11952089e-05, -1.49111566e-03, | |
-4.30004671e-03, 1.87081140e-17, 8.37646611e-03, | |
6.31989585e-03, -2.24644062e-03, 6.89103152e-04, | |
3.99743905e-03, -1.55457165e-02, -3.02382912e-02, | |
8.64555147e-17, 3.47761065e-02, 1.74983889e-02, | |
5.26998716e-04, 4.05271687e-02, 3.44539545e-02, | |
-1.02453038e-01, -1.83332384e-01, 1.49070850e-17, | |
2.52647191e-01, 2.00818330e-01, -1.04649894e-01, | |
-2.43451163e-01, -7.59387612e-02, 1.02453038e-01, | |
8.31792057e-02, 1.24078672e-17, -1.27228745e-03, | |
1.74983889e-02, -1.44047346e-02, -3.92091461e-02, | |
-1.25251105e-02, 1.55457165e-02, 9.65067092e-03, | |
-7.59943702e-19, 5.42338751e-03, 6.31989585e-03, | |
-3.46964574e-03, -7.27269426e-03, -1.78113778e-03, | |
1.49111566e-03, 1.71880441e-04, 6.78645703e-19, | |
1.20317726e-03, 8.38639971e-04, -3.19937797e-04, | |
-4.59895004e-04, -6.70973168e-05, 1.11185473e-05, | |
-3.79931917e-05, -1.24443197e-19, 1.65334459e-05], | |
[ 6.84837778e-06, -3.90753194e-05, 1.57372942e-05, | |
-1.11185473e-05, 1.61987249e-04, -3.38010752e-18, | |
-7.72398140e-04, 8.38639971e-04, 4.98372363e-04, | |
-9.22816456e-04, -7.11952089e-05, -1.49111566e-03, | |
4.30004671e-03, -2.31613549e-17, -8.37646611e-03, | |
6.31989585e-03, 2.24644062e-03, 6.89103152e-04, | |
-3.99743905e-03, -1.55457165e-02, 3.02382912e-02, | |
3.84381566e-17, -3.47761065e-02, 1.74983889e-02, | |
-5.26998716e-04, 4.05271687e-02, -3.44539545e-02, | |
-1.02453038e-01, 1.83332384e-01, -4.47212551e-17, | |
-2.52647191e-01, 2.00818330e-01, 1.04649894e-01, | |
-2.43451163e-01, 7.59387612e-02, 1.02453038e-01, | |
-8.31792057e-02, -8.93617311e-17, 1.27228745e-03, | |
1.74983889e-02, 1.44047346e-02, -3.92091461e-02, | |
1.25251105e-02, 1.55457165e-02, -9.65067092e-03, | |
-1.94141482e-18, -5.42338751e-03, 6.31989585e-03, | |
3.46964574e-03, -7.27269426e-03, 1.78113778e-03, | |
1.49111566e-03, -1.71880441e-04, -3.39208613e-18, | |
-1.20317726e-03, 8.38639971e-04, 3.19937797e-04, | |
-4.59895004e-04, 6.70973168e-05, 1.11185473e-05, | |
3.79931917e-05, 1.91172006e-20, -1.65334459e-05], | |
[-1.65334459e-05, 3.90753194e-05, -3.79931917e-05, | |
-1.11185473e-05, 6.70973168e-05, -4.50282487e-19, | |
-3.19937797e-04, 8.38639971e-04, -1.20317726e-03, | |
9.22816456e-04, 1.71880441e-04, -1.49111566e-03, | |
1.78113778e-03, 2.49516550e-17, -3.46964574e-03, | |
6.31989585e-03, -5.42338751e-03, -6.89103152e-04, | |
9.65067092e-03, -1.55457165e-02, 1.25251105e-02, | |
-1.10464187e-16, -1.44047346e-02, 1.74983889e-02, | |
1.27228745e-03, -4.05271687e-02, 8.31792057e-02, | |
-1.02453038e-01, 7.59387612e-02, -4.47212551e-17, | |
-1.04649894e-01, 2.00818330e-01, -2.52647191e-01, | |
2.43451163e-01, -1.83332384e-01, 1.02453038e-01, | |
-3.44539545e-02, 4.46934089e-17, 5.26998716e-04, | |
1.74983889e-02, -3.47761065e-02, 3.92091461e-02, | |
-3.02382912e-02, 1.55457165e-02, -3.99743905e-03, | |
-4.72716432e-18, -2.24644062e-03, 6.31989585e-03, | |
-8.37646611e-03, 7.27269426e-03, -4.30004671e-03, | |
1.49111566e-03, -7.11952089e-05, 3.84299344e-18, | |
-4.98372363e-04, 8.38639971e-04, -7.72398140e-04, | |
4.59895004e-04, -1.61987249e-04, 1.11185473e-05, | |
1.57372942e-05, 5.73999756e-20, -6.84837778e-06]]], | |
dtype=float32)} | |
*** root/'PQMF_synthesis_filter' ConstantLayer output: [1,4,F|63] | |
*** root/'PQMF_FunctionalConv1d' layer dict: {'class': 'conv', 'from': 'pad_fn', 'n_out': 1, 'activation': None, 'with_bias': False, 'bias': None, 'filter_size': (63,), 'filter': 'PQMF_synthesis_filter', 'filter_perm': {'static:0': 'F', 'static:1': 'static:1', 'F': 'static:0'}, 'padding': 'valid', 'strides': (1,), 'dilation_rate': (1,)} | |
*** root/'PQMF_FunctionalConv1d' ConvLayer output: [B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|1] | |
*** root/'output' layer dict: {'class': 'copy', 'from': 'PQMF_FunctionalConv1d'} | |
*** root/'output' CopyLayer output: [B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|1] | |
RETURNN output: Data(name='output_output', shape=(None, 1), batch_shape_meta=[B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|1]) axis map RETURNN->Torch {0: 0, 2: 1, 1: 2} | |
>>>> Module naming hierarchy: | |
.tmp_root: (hidden) | |
data: None -> None | |
melgan: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,4,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|4] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,80,80) returnn_data:'layer0_output' [B,F|80,T|'spatial:1:melgan/layer0'] axes id> | |
layer1: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,384,80) returnn_data:'layer1_output' [B,T|'time:var:extern_data:data',F|384] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,384,80) returnn_data:'layer2_output' [B,T|'time:var:extern_data:data',F|384] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <ConvTranspose1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer4/stack/layer1',F|192] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer5: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer5/stack/layer1',F|192] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer6: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer6/stack/layer1',F|192] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer7: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer7/stack/layer1',F|192] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer8: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,192,80) returnn_data:'layer8_output' [B,T|'spatial:0:melgan/layer3',F|192] axes {0:0,2:1,1:2}> | |
layer9: <ModuleEntry <ConvTranspose1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer9_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer10: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer10/stack/layer1',F|96] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer11: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer11/stack/layer1',F|96] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer12: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer12/stack/layer1',F|96] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer13: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer13/stack/layer1',F|96] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer14: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,96,80) returnn_data:'layer14_output' [B,T|'spatial:0:melgan/layer9',F|96] axes {0:0,2:1,1:2}> | |
layer15: <ModuleEntry <ConvTranspose1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer15_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer16: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer16/stack/layer1',F|48] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer17: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer17/stack/layer1',F|48] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer18: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer18/stack/layer1',F|48] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer19: <ModuleEntry <ResidualStack>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
data: None -> None | |
stack: <ModuleEntry <Sequential>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
data: None -> None | |
layer0: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer0_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer1: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer1_output' [B,T|'spatial:0:melgan/layer19/stack/layer1',F|48] axes {0:0,2:1,1:2}> | |
layer2: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer2_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer3: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer3_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer4: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
skip_layer: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'skip_layer_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
add: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer20: <ModuleEntry <LeakyReLU>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer20_output' [B,T|'spatial:0:melgan/layer15',F|48] axes {0:0,2:1,1:2}> | |
layer21: <ModuleEntry <ReflectionPad1d>> -> <TensorEntry name:? tensor:(1,48,80) returnn_data:'layer21_output' [B,T|'spatial:0:melgan/layer21',F|48] axes {0:0,2:1,1:2}> | |
layer22: <ModuleEntry <Conv1d>> -> <TensorEntry name:? tensor:(1,4,80) returnn_data:'layer22_output' [B,T|'spatial:0:melgan/layer15',F|4] axes {0:0,2:1,1:2}> | |
layer23: <ModuleEntry <Tanh>> -> <TensorEntry name:? tensor:(1,4,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|4] axes {0:0,2:1,1:2}> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,4,80) returnn_data:'output_output' [B,T|'spatial:0:melgan/layer15',F|4] axes {0:0,2:1,1:2}> | |
PQMF_Cast: <ModuleEntry <Cast>> -> <TensorEntry name:? tensor:() returnn_data:'PQMF_Cast_output' [] axes id> | |
PQMF_Cast_unnamed_const: <ModuleEntry <Constant>> -> <TensorEntry name:'value' tensor:() returnn_data:'PQMF_Cast_unnamed_const_const' [] axes id> | |
PQMF_mul: <ModuleEntry <BinaryOperator>> -> <TensorEntry name:? tensor:(4,4,4) returnn_data:'PQMF_mul_output' [4,4,F|4] axes id> | |
PQMF_updown_filter: <ModuleEntry <Constant>> -> <TensorEntry name:'updown_filter' tensor:(4,4,4) returnn_data:'PQMF_updown_filter_const' [4,4,F|4] axes id> | |
PQMF_FunctionalConvTransposed1d: <ModuleEntry <FunctionalConvTransposed1d>> -> <TensorEntry name:? tensor:(1,4,80) returnn_data:'PQMF_FunctionalConvTransposed1d_output' [B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|4] axes {0:0,2:1,1:2}> | |
pad_fn: <ModuleEntry <ConstantPad1d>> -> <TensorEntry name:? tensor:(1,4,80) returnn_data:'pad_fn_output' [B,T|'spatial:0:pad_fn',F|4] axes {0:0,2:1,1:2}> | |
PQMF_FunctionalConv1d: <ModuleEntry <FunctionalConv1d>> -> <TensorEntry name:? tensor:(1,1,80) returnn_data:'output_output' [B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|1] axes {0:0,2:1,1:2}> | |
PQMF_synthesis_filter: <ModuleEntry <Constant>> -> <TensorEntry name:'synthesis_filter' tensor:(1,4,63) returnn_data:'PQMF_synthesis_filter_const' [1,4,F|63] axes id> | |
output: <ModuleEntry <Copy>> -> <TensorEntry name:? tensor:(1,1,80) returnn_data:'output_output' [B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|1] axes {0:0,2:1,1:2}> | |
>>>> RETURNN net dict: | |
{ | |
'melgan': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'data'}, | |
'layer1': { | |
'class': 'conv', | |
'from': 'layer0', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 384, | |
'filter_size': (7,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'layer2': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer1'}, | |
'layer3': { | |
'class': 'transposed_conv', | |
'from': 'layer2', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (10,), | |
'strides': (5,), | |
'padding': 'valid', | |
'output_padding': (1,), | |
'remove_padding': (3,) | |
}, | |
'layer4': { | |
'class': 'subnetwork', | |
'from': 'layer3', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (1, 1), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer5': { | |
'class': 'subnetwork', | |
'from': 'layer4', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (3,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer6': { | |
'class': 'subnetwork', | |
'from': 'layer5', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (9, 9), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (9,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer7': { | |
'class': 'subnetwork', | |
'from': 'layer6', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (27, 27), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (27,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 192, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer8': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer7'}, | |
'layer9': { | |
'class': 'transposed_conv', | |
'from': 'layer8', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (10,), | |
'strides': (5,), | |
'padding': 'valid', | |
'output_padding': (1,), | |
'remove_padding': (3,) | |
}, | |
'layer10': { | |
'class': 'subnetwork', | |
'from': 'layer9', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (1, 1), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer11': { | |
'class': 'subnetwork', | |
'from': 'layer10', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (3,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer12': { | |
'class': 'subnetwork', | |
'from': 'layer11', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (9, 9), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (9,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer13': { | |
'class': 'subnetwork', | |
'from': 'layer12', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (27, 27), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (27,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 96, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer14': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer13'}, | |
'layer15': { | |
'class': 'transposed_conv', | |
'from': 'layer14', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (4,), | |
'strides': (2,), | |
'padding': 'valid', | |
'output_padding': (0,), | |
'remove_padding': (1,) | |
}, | |
'layer16': { | |
'class': 'subnetwork', | |
'from': 'layer15', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (1, 1), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer17': { | |
'class': 'subnetwork', | |
'from': 'layer16', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (3,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer18': { | |
'class': 'subnetwork', | |
'from': 'layer17', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (9, 9), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (9,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer19': { | |
'class': 'subnetwork', | |
'from': 'layer18', | |
'subnetwork': { | |
'stack': { | |
'class': 'subnetwork', | |
'from': 'data', | |
'subnetwork': { | |
'layer0': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'data'}, | |
'layer1': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (27, 27), 'from': 'layer0'}, | |
'layer2': { | |
'class': 'conv', | |
'from': 'layer1', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (3,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (27,) | |
}, | |
'layer3': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer2'}, | |
'layer4': { | |
'class': 'conv', | |
'from': 'layer3', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'output': {'class': 'copy', 'from': 'layer4'} | |
} | |
}, | |
'skip_layer': { | |
'class': 'conv', | |
'from': 'data', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 48, | |
'filter_size': (1,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'add': {'class': 'combine', 'kind': 'add', 'from': ['stack', 'skip_layer']}, | |
'output': {'class': 'copy', 'from': 'add'} | |
} | |
}, | |
'layer20': {'class': 'eval', 'eval': 'tf.nn.leaky_relu(source(0), alpha=0.2)', 'from': 'layer19'}, | |
'layer21': {'class': 'pad', 'mode': 'reflect', 'axes': 'spatial', 'padding': (3, 3), 'from': 'layer20'}, | |
'layer22': { | |
'class': 'conv', | |
'from': 'layer21', | |
'activation': None, | |
'with_bias': True, | |
'n_out': 4, | |
'filter_size': (7,), | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'layer23': {'class': 'activation', 'activation': 'tanh', 'from': 'layer22'}, | |
'output': {'class': 'copy', 'from': 'layer23'} | |
} | |
}, | |
'PQMF_Cast': {'class': 'cast', 'from': 'PQMF_Cast_unnamed_const', 'dtype': 'float32'}, | |
'PQMF_Cast_unnamed_const': {'class': 'constant', 'value': numpy.array(4, dtype=numpy.int32)}, | |
'PQMF_mul': {'class': 'combine', 'kind': 'mul', 'from': ['PQMF_updown_filter', 'PQMF_Cast']}, | |
'PQMF_updown_filter': { | |
'class': 'constant', | |
'value': numpy.array([ | |
[[1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]], | |
[[0.0, 0.0, 0.0, 0.0], [1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]], | |
[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]], | |
[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [1.0, 0.0, 0.0, 0.0]] | |
], dtype=numpy.float32) | |
}, | |
'PQMF_FunctionalConvTransposed1d': { | |
'class': 'transposed_conv', | |
'from': 'melgan', | |
'n_out': 4, | |
'activation': None, | |
'with_bias': False, | |
'bias': None, | |
'filter_size': (4,), | |
'filter': 'PQMF_mul', | |
'filter_perm': {'static:0': 'F', 'static:1': 'static:1', 'F': 'static:0'}, | |
'padding': 'valid', | |
'output_padding': (0,), | |
'remove_padding': (0,), | |
'strides': (4,) | |
}, | |
'pad_fn': { | |
'class': 'pad', | |
'mode': 'constant', | |
'axes': 'spatial', | |
'padding': (31, 31), | |
'from': 'PQMF_FunctionalConvTransposed1d', | |
'value': 0.0 | |
}, | |
'PQMF_FunctionalConv1d': { | |
'class': 'conv', | |
'from': 'pad_fn', | |
'n_out': 1, | |
'activation': None, | |
'with_bias': False, | |
'bias': None, | |
'filter_size': (63,), | |
'filter': 'PQMF_synthesis_filter', | |
'filter_perm': {'static:0': 'F', 'static:1': 'static:1', 'F': 'static:0'}, | |
'padding': 'valid', | |
'strides': (1,), | |
'dilation_rate': (1,) | |
}, | |
'PQMF_synthesis_filter': { | |
'class': 'constant', | |
'value': numpy.array([ | |
[ | |
[ | |
1.653344588703476e-05, | |
3.907531936420128e-05, | |
3.7993191654095426e-05, | |
-1.1118547263322398e-05, | |
-6.70973167871125e-05, | |
-6.19814497996956e-19, | |
0.000319937797030434, | |
0.0008386399713344872, | |
0.0012031772639602423, | |
0.0009228164562955499, | |
-0.00017188044148497283, | |
-0.0014911156613379717, | |
-0.0017811377765610814, | |
2.2266204893018784e-18, | |
0.00346964574418962, | |
0.006319895852357149, | |
0.005423387512564659, | |
-0.0006891031516715884, | |
-0.009650670923292637, | |
-0.015545716509222984, | |
-0.012525110505521297, | |
7.202603219092196e-18, | |
0.01440473459661007, | |
0.01749838888645172, | |
-0.0012722874525934458, | |
-0.04052716866135597, | |
-0.08317920565605164, | |
-0.10245303809642792, | |
-0.07593876123428345, | |
1.490708503996114e-17, | |
0.10464989393949509, | |
0.20081833004951477, | |
0.25264719128608704, | |
0.24345116317272186, | |
0.1833323836326599, | |
0.10245303809642792, | |
0.03445395454764366, | |
2.481573477317513e-18, | |
-0.0005269987159408629, | |
0.01749838888645172, | |
0.03477610647678375, | |
0.03920914605259895, | |
0.03023829124867916, | |
0.015545716509222984, | |
0.003997439052909613, | |
1.265861891747689e-19, | |
0.0022464406210929155, | |
0.006319895852357149, | |
0.008376466110348701, | |
0.007272694259881973, | |
0.0043000467121601105, | |
0.0014911156613379717, | |
7.119520887499675e-05, | |
-1.1021566446688507e-18, | |
0.0004983723629266024, | |
0.0008386399713344872, | |
0.0007723981398157775, | |
0.0004598950035870075, | |
0.0001619872491573915, | |
1.1118547263322398e-05, | |
-1.5737294233986177e-05, | |
-8.616042507323048e-20, | |
6.848377779533621e-06 | |
], | |
[ | |
-6.848377779533621e-06, | |
-3.907531936420128e-05, | |
-1.5737294233986177e-05, | |
-1.1118547263322398e-05, | |
-0.0001619872491573915, | |
1.351986075970332e-18, | |
0.0007723981398157775, | |
0.0008386399713344872, | |
-0.0004983723629266024, | |
-0.0009228164562955499, | |
7.119520887499675e-05, | |
-0.0014911156613379717, | |
-0.0043000467121601105, | |
1.8708113957901537e-17, | |
0.008376466110348701, | |
0.006319895852357149, | |
-0.0022464406210929155, | |
0.0006891031516715884, | |
0.003997439052909613, | |
-0.015545716509222984, | |
-0.03023829124867916, | |
8.645551472572356e-17, | |
0.03477610647678375, | |
0.01749838888645172, | |
0.0005269987159408629, | |
0.04052716866135597, | |
0.03445395454764366, | |
-0.10245303809642792, | |
-0.1833323836326599, | |
1.490708503996114e-17, | |
0.25264719128608704, | |
0.20081833004951477, | |
-0.10464989393949509, | |
-0.24345116317272186, | |
-0.07593876123428345, | |
0.10245303809642792, | |
0.08317920565605164, | |
1.2407867179792413e-17, | |
-0.0012722874525934458, | |
0.01749838888645172, | |
-0.01440473459661007, | |
-0.03920914605259895, | |
-0.012525110505521297, | |
0.015545716509222984, | |
0.009650670923292637, | |
-7.599437017507494e-19, | |
0.005423387512564659, | |
0.006319895852357149, | |
-0.00346964574418962, | |
-0.007272694259881973, | |
-0.0017811377765610814, | |
0.0014911156613379717, | |
0.00017188044148497283, | |
6.78645702812047e-19, | |
0.0012031772639602423, | |
0.0008386399713344872, | |
-0.000319937797030434, | |
-0.0004598950035870075, | |
-6.70973167871125e-05, | |
1.1118547263322398e-05, | |
-3.7993191654095426e-05, | |
-1.244431968521913e-19, | |
1.653344588703476e-05 | |
], | |
[ | |
6.848377779533621e-06, | |
-3.907531936420128e-05, | |
1.5737294233986177e-05, | |
-1.1118547263322398e-05, | |
0.0001619872491573915, | |
-3.3801075166899776e-18, | |
-0.0007723981398157775, | |
0.0008386399713344872, | |
0.0004983723629266024, | |
-0.0009228164562955499, | |
-7.119520887499675e-05, | |
-0.0014911156613379717, | |
0.0043000467121601105, | |
-2.3161354936505294e-17, | |
-0.008376466110348701, | |
0.006319895852357149, | |
0.0022464406210929155, | |
0.0006891031516715884, | |
-0.003997439052909613, | |
-0.015545716509222984, | |
0.03023829124867916, | |
3.8438156623053137e-17, | |
-0.03477610647678375, | |
0.01749838888645172, | |
-0.0005269987159408629, | |
0.04052716866135597, | |
-0.03445395454764366, | |
-0.10245303809642792, | |
0.1833323836326599, | |
-4.4721255119883424e-17, | |
-0.25264719128608704, | |
0.20081833004951477, | |
0.10464989393949509, | |
-0.24345116317272186, | |
0.07593876123428345, | |
0.10245303809642792, | |
-0.08317920565605164, | |
-8.936173108986737e-17, | |
0.0012722874525934458, | |
0.01749838888645172, | |
0.01440473459661007, | |
-0.03920914605259895, | |
0.012525110505521297, | |
0.015545716509222984, | |
-0.009650670923292637, | |
-1.9414148179481886e-18, | |
-0.005423387512564659, | |
0.006319895852357149, | |
0.00346964574418962, | |
-0.007272694259881973, | |
0.0017811377765610814, | |
0.0014911156613379717, | |
-0.00017188044148497283, | |
-3.392086125935511e-18, | |
-0.0012031772639602423, | |
0.0008386399713344872, | |
0.000319937797030434, | |
-0.0004598950035870075, | |
6.70973167871125e-05, | |
1.1118547263322398e-05, | |
3.7993191654095426e-05, | |
1.9117200550086035e-20, | |
-1.653344588703476e-05 | |
], | |
[ | |
-1.653344588703476e-05, | |
3.907531936420128e-05, | |
-3.7993191654095426e-05, | |
-1.1118547263322398e-05, | |
6.70973167871125e-05, | |
-4.502824872857176e-19, | |
-0.000319937797030434, | |
0.0008386399713344872, | |
-0.0012031772639602423, | |
0.0009228164562955499, | |
0.00017188044148497283, | |
-0.0014911156613379717, | |
0.0017811377765610814, | |
2.4951655023478314e-17, | |
-0.00346964574418962, | |
0.006319895852357149, | |
-0.005423387512564659, | |
-0.0006891031516715884, | |
0.009650670923292637, | |
-0.015545716509222984, | |
0.012525110505521297, | |
-1.1046418715961388e-16, | |
-0.01440473459661007, | |
0.01749838888645172, | |
0.0012722874525934458, | |
-0.04052716866135597, | |
0.08317920565605164, | |
-0.10245303809642792, | |
0.07593876123428345, | |
-4.4721255119883424e-17, | |
-0.10464989393949509, | |
0.20081833004951477, | |
-0.25264719128608704, | |
0.24345116317272186, | |
-0.1833323836326599, | |
0.10245303809642792, | |
-0.03445395454764366, | |
4.469340891174244e-17, | |
0.0005269987159408629, | |
0.01749838888645172, | |
-0.03477610647678375, | |
0.03920914605259895, | |
-0.03023829124867916, | |
0.015545716509222984, | |
-0.003997439052909613, | |
-4.7271643199925296e-18, | |
-0.0022464406210929155, | |
0.006319895852357149, | |
-0.008376466110348701, | |
0.007272694259881973, | |
-0.0043000467121601105, | |
0.0014911156613379717, | |
-7.119520887499675e-05, | |
3.842993444776436e-18, | |
-0.0004983723629266024, | |
0.0008386399713344872, | |
-0.0007723981398157775, | |
0.0004598950035870075, | |
-0.0001619872491573915, | |
1.1118547263322398e-05, | |
1.5737294233986177e-05, | |
5.739997556022112e-20, | |
-6.848377779533621e-06 | |
] | |
] | |
], dtype=numpy.float32) | |
}, | |
'output': {'class': 'copy', 'from': 'PQMF_FunctionalConv1d'} | |
} | |
>>>> Root module calls: | |
{ | |
'melgan': <CallEntry #1 <ModuleEntry <Sequential>>>, | |
'PQMF_Cast': <CallEntry #0 <ModuleEntry <Cast>>>, | |
'PQMF_Cast_unnamed_const': <CallEntry #1 <ModuleEntry <Constant>>>, | |
'PQMF_mul': <CallEntry #0 <ModuleEntry <BinaryOperator>>>, | |
'PQMF_updown_filter': <CallEntry #1 <ModuleEntry <Constant>>>, | |
'PQMF_FunctionalConvTransposed1d': <CallEntry #0 <ModuleEntry <FunctionalConvTransposed1d>>>, | |
'pad_fn': <CallEntry #0 <ModuleEntry <ConstantPad1d>>>, | |
'PQMF_FunctionalConv1d': <CallEntry #0 <ModuleEntry <FunctionalConv1d>>>, | |
'PQMF_synthesis_filter': <CallEntry #1 <ModuleEntry <Constant>>>, | |
'output': <CallEntry #None <ModuleEntry <Copy>>> | |
} | |
>>>> Modules with params: | |
{ | |
'melgan.layer1': <Conv1d>, | |
'melgan.layer3': <ConvTranspose1d>, | |
'melgan.layer4.stack.layer2': <Conv1d>, | |
'melgan.layer4.stack.layer4': <Conv1d>, | |
'melgan.layer4.skip_layer': <Conv1d>, | |
'melgan.layer5.stack.layer2': <Conv1d>, | |
'melgan.layer5.stack.layer4': <Conv1d>, | |
'melgan.layer5.skip_layer': <Conv1d>, | |
'melgan.layer6.stack.layer2': <Conv1d>, | |
'melgan.layer6.stack.layer4': <Conv1d>, | |
'melgan.layer6.skip_layer': <Conv1d>, | |
'melgan.layer7.stack.layer2': <Conv1d>, | |
'melgan.layer7.stack.layer4': <Conv1d>, | |
'melgan.layer7.skip_layer': <Conv1d>, | |
'melgan.layer9': <ConvTranspose1d>, | |
'melgan.layer10.stack.layer2': <Conv1d>, | |
'melgan.layer10.stack.layer4': <Conv1d>, | |
'melgan.layer10.skip_layer': <Conv1d>, | |
'melgan.layer11.stack.layer2': <Conv1d>, | |
'melgan.layer11.stack.layer4': <Conv1d>, | |
'melgan.layer11.skip_layer': <Conv1d>, | |
'melgan.layer12.stack.layer2': <Conv1d>, | |
'melgan.layer12.stack.layer4': <Conv1d>, | |
'melgan.layer12.skip_layer': <Conv1d>, | |
'melgan.layer13.stack.layer2': <Conv1d>, | |
'melgan.layer13.stack.layer4': <Conv1d>, | |
'melgan.layer13.skip_layer': <Conv1d>, | |
'melgan.layer15': <ConvTranspose1d>, | |
'melgan.layer16.stack.layer2': <Conv1d>, | |
'melgan.layer16.stack.layer4': <Conv1d>, | |
'melgan.layer16.skip_layer': <Conv1d>, | |
'melgan.layer17.stack.layer2': <Conv1d>, | |
'melgan.layer17.stack.layer4': <Conv1d>, | |
'melgan.layer17.skip_layer': <Conv1d>, | |
'melgan.layer18.stack.layer2': <Conv1d>, | |
'melgan.layer18.stack.layer4': <Conv1d>, | |
'melgan.layer18.skip_layer': <Conv1d>, | |
'melgan.layer19.stack.layer2': <Conv1d>, | |
'melgan.layer19.stack.layer4': <Conv1d>, | |
'melgan.layer19.skip_layer': <Conv1d>, | |
'melgan.layer22': <Conv1d> | |
} | |
Output shape: (1, 16000, 1) | |
Output seq lens: [16000] | |
Output shape (converted to Torch): (1, 1, 16000) | |
>>>> Looks good! | |
RETURNN network layer topology: | |
extern data: data: Data(shape=(80, None), time_dim_axis=2, feature_dim_axis=1, batch_shape_meta=[B,F|80,T|'time:var:extern_data:data']) | |
used data keys: ['data'] | |
layers: | |
layer subnetwork '.tmp_root' #: 80 | |
layer cast 'PQMF_Cast' #: unknown | |
layer constant 'PQMF_Cast_unnamed_const' #: unknown | |
layer conv 'PQMF_FunctionalConv1d' #: 1 | |
layer transposed_conv 'PQMF_FunctionalConvTransposed1d' #: 4 | |
layer combine 'PQMF_mul' #: 4 | |
layer constant 'PQMF_synthesis_filter' #: 63 | |
layer constant 'PQMF_updown_filter' #: 4 | |
layer source 'data' #: 80 | |
layer subnetwork 'melgan' #: 4 | |
layer copy 'output' #: 1 | |
layer pad 'pad_fn' #: 4 | |
net params #: 2128852 | |
net trainable params: [<tf.Variable 'melgan/layer1/W:0' shape=(7, 80, 384) dtype=float32>, <tf.Variable 'melgan/layer1/bias:0' shape=(384,) dtype=float32>, <tf.Variable 'melgan/layer10/skip_layer/W:0' shape=(1, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer10/skip_layer/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer10/stack/layer2/W:0' shape=(3, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer10/stack/layer2/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer10/stack/layer4/W:0' shape=(1, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer10/stack/layer4/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer11/skip_layer/W:0' shape=(1, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer11/skip_layer/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer11/stack/layer2/W:0' shape=(3, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer11/stack/layer2/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer11/stack/layer4/W:0' shape=(1, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer11/stack/layer4/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer12/skip_layer/W:0' shape=(1, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer12/skip_layer/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer12/stack/layer2/W:0' shape=(3, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer12/stack/layer2/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer12/stack/layer4/W:0' shape=(1, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer12/stack/layer4/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer13/skip_layer/W:0' shape=(1, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer13/skip_layer/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer13/stack/layer2/W:0' shape=(3, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer13/stack/layer2/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer13/stack/layer4/W:0' shape=(1, 96, 96) dtype=float32>, <tf.Variable 'melgan/layer13/stack/layer4/bias:0' shape=(96,) dtype=float32>, <tf.Variable 'melgan/layer15/W_native_transposed_conv:0' shape=(4, 1, 48, 96) dtype=float32>, <tf.Variable 'melgan/layer15/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer16/skip_layer/W:0' shape=(1, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer16/skip_layer/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer16/stack/layer2/W:0' shape=(3, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer16/stack/layer2/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer16/stack/layer4/W:0' shape=(1, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer16/stack/layer4/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer17/skip_layer/W:0' shape=(1, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer17/skip_layer/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer17/stack/layer2/W:0' shape=(3, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer17/stack/layer2/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer17/stack/layer4/W:0' shape=(1, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer17/stack/layer4/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer18/skip_layer/W:0' shape=(1, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer18/skip_layer/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer18/stack/layer2/W:0' shape=(3, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer18/stack/layer2/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer18/stack/layer4/W:0' shape=(1, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer18/stack/layer4/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer19/skip_layer/W:0' shape=(1, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer19/skip_layer/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer19/stack/layer2/W:0' shape=(3, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer19/stack/layer2/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer19/stack/layer4/W:0' shape=(1, 48, 48) dtype=float32>, <tf.Variable 'melgan/layer19/stack/layer4/bias:0' shape=(48,) dtype=float32>, <tf.Variable 'melgan/layer22/W:0' shape=(7, 48, 4) dtype=float32>, <tf.Variable 'melgan/layer22/bias:0' shape=(4,) dtype=float32>, <tf.Variable 'melgan/layer3/W_native_transposed_conv:0' shape=(10, 1, 192, 384) dtype=float32>, <tf.Variable 'melgan/layer3/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer4/skip_layer/W:0' shape=(1, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer4/skip_layer/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer4/stack/layer2/W:0' shape=(3, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer4/stack/layer2/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer4/stack/layer4/W:0' shape=(1, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer4/stack/layer4/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer5/skip_layer/W:0' shape=(1, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer5/skip_layer/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer5/stack/layer2/W:0' shape=(3, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer5/stack/layer2/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer5/stack/layer4/W:0' shape=(1, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer5/stack/layer4/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer6/skip_layer/W:0' shape=(1, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer6/skip_layer/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer6/stack/layer2/W:0' shape=(3, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer6/stack/layer2/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer6/stack/layer4/W:0' shape=(1, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer6/stack/layer4/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer7/skip_layer/W:0' shape=(1, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer7/skip_layer/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer7/stack/layer2/W:0' shape=(3, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer7/stack/layer2/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer7/stack/layer4/W:0' shape=(1, 192, 192) dtype=float32>, <tf.Variable 'melgan/layer7/stack/layer4/bias:0' shape=(192,) dtype=float32>, <tf.Variable 'melgan/layer9/W_native_transposed_conv:0' shape=(10, 1, 96, 192) dtype=float32>, <tf.Variable 'melgan/layer9/bias:0' shape=(96,) dtype=float32>] | |
Saving TF checkpoint to '/var/folders/fk/mt9zfm3n2853v1pcy0q3y25h0000gp/T/tmputue4givtmp-returnn-tf-checkpoint/model'... | |
>>> Constructing RETURNN model, load TF checkpoint, run... | |
layer <network via _run_returnn_standalone>/'data' output: Data(name='data', shape=(80, None), time_dim_axis=2, feature_dim_axis=1, batch_shape_meta=[B,F|80,T|'time:var:extern_data:data']) | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5903935888), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5903936144), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5903933648), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5903936272)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5903936400)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904008016), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904008272), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5904007696), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904008400)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904008528)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904051344), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904051600), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5904048464), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904051728)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904051856)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904160208), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904160528), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5904159376), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904161232)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904161104)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904250064), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904250320), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5904249296), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904250448)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904250576)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904301584), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904301840), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5904299344), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904301968)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904302032)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904406416), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904406672), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5904405072), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904406800)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904406928)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904498832), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904499984), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5904499280), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904500240)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904500368)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904589264), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904589520), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5904588944), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904589648)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904589776)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904657168), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904657424), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5904654928), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904657552)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904657680)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904741520), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904741776), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5904740432), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904741904)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904742032)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904743248), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904831056), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5904831248), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904831184)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904831376)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904883280), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904883536), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5904881040), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904883664)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904941200)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5904251600), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904251792), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5904251984), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5904250064)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5904250512)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905060624), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905060880), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5905059984), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905061008)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905061136)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905150416), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905150672), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5905150224), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905150800)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905150928)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905230672), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905230928), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905228432), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905231056)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905231184)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905323216), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905323472), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905322448), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905323600)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905323728)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905274960), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905275088), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905274576), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905275216)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905274768)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905464208), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905464784), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905462288), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905464912)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905465040)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905564816), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905565072), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905564048), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905565200)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905565328)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905674320), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905674448), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905674384), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905674576)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905674704)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905712272), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905712656), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905712016), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905713552)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905712976)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905811472), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905811728), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905809552), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905811856)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905811984)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5906002064), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5906002192), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5906002128), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5906002320)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5906002448)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5906032720), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5906034000), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5906032016), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5906034256)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5906034384)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5906127312), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5906127568), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5906125072), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5906127696)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5906127824)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5906228048), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5906228304), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5906227728), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5906228432)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5906228560)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905937872), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905937936), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5905937552), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905938064)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905892496)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5750462672), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5750461200), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5750463952), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5717892880)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5717893072)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5869215632), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5869215696), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5869168592), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5869167184)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5869167376)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]), | |
Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5878956624), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5878956112), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=192, id=5878956816), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5878959184)], | |
largest source | |
(Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5878956368)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5874784272), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5874695184), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5874783632), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5874649488)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5874650960)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5878663248), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5878708176), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5878664528), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5866757520)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5866754832)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5868640208), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5868641680), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5868641424), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5868640976)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5868551504)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5866603920), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5855352720), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5855351824), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5855352272)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5855353680)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5854631120), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5854630480), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5854630672), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5854631888)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5854565584)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5866127888), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5854297680), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5854295184), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5854295632)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5854297808)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5851690320), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5851663440), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5851661840), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5851663248)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5851662352)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]), | |
Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5828863056), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5828863568), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=96, id=5828864976), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5828863824)], | |
largest source | |
(Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5828864080)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5926735376), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5926733392), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5926735440), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5926732816)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5926732304)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5834235024), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5834234128), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5834235536), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5851924560)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5805873232)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5867604624), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5717876880), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5717877648), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5717878416)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5717879696)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5844809872), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5844754128), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5844753808), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5844753232)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5844754000)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5844435728), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5844437520), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5844438800), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5844435024)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5844438160)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5845135760), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5845136144), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5845137936), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5844703376)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5844703888)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5905863120), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905864144), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5905862992), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5905863952)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5905864656)] | |
get_common_data( | |
[Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]), | |
Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])]), | |
dim tags | |
[DimensionTag(kind='batch', description='batch:output_output', id=5750452048), | |
DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5750449744), | |
DimensionTag(kind='feature', description='feature:output_output', dimension=48, id=5750450000), | |
DimensionTag(kind='spatial', description='time:var-unk:skip_layer_output', id=5750450960)], | |
largest source | |
(Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48])) | |
has incomplete dim tag info: | |
[DimensionTag(kind='spatial', description='time:var-unk:output_output', id=5750449616)] | |
layer <network via _run_returnn_standalone>/'melgan' output: Data(name='output_output', shape=(None, 4), batch_shape_meta=[B,T|?,F|4]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'data' output: Data(name='data', shape=(80, None), time_dim_axis=2, feature_dim_axis=1, batch_shape_meta=[B,F|80,T|'time:var:extern_data:data']) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer0' output: Data(name='layer0_output', shape=(80, None), time_dim_axis=2, feature_dim_axis=1, batch_shape_meta=[B,F|80,T|'time:var:extern_data:data']) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 384), batch_shape_meta=[B,T|?,F|384]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 384), batch_shape_meta=[B,T|'time:var:extern_data:data',F|384]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 192), batch_shape_meta=[B,T|?,F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer4' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/'data' output: Data(name='layer3_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/'stack' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/stack:subnet/'data' output: Data(name='layer3_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer4/stack/layer1',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/'add' output: Data(name='add_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer4:subnet/'output' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer5' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/'data' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/'stack' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer5/stack/layer1',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/'add' output: Data(name='add_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer5:subnet/'output' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer6' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/'data' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/'stack' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer6/stack/layer1',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/'add' output: Data(name='add_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer6:subnet/'output' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer7' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/'data' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/'stack' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer7/stack/layer1',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/'add' output: Data(name='add_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer7:subnet/'output' output: Data(name='output_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer8' output: Data(name='layer8_output', shape=(None, 192), batch_shape_meta=[B,T|'spatial:0:melgan/layer3',F|192]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer9' output: Data(name='layer9_output', shape=(None, 96), batch_shape_meta=[B,T|?,F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer10' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/'data' output: Data(name='layer9_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/'stack' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/stack:subnet/'data' output: Data(name='layer9_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer10/stack/layer1',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/'add' output: Data(name='add_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer10:subnet/'output' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer11' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/'data' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/'stack' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer11/stack/layer1',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/'add' output: Data(name='add_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer11:subnet/'output' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer12' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/'data' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/'stack' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer12/stack/layer1',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/'add' output: Data(name='add_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer12:subnet/'output' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer13' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/'data' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/'stack' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer13/stack/layer1',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/'add' output: Data(name='add_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer13:subnet/'output' output: Data(name='output_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer14' output: Data(name='layer14_output', shape=(None, 96), batch_shape_meta=[B,T|'spatial:0:melgan/layer9',F|96]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer15' output: Data(name='layer15_output', shape=(None, 48), batch_shape_meta=[B,T|?,F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer16' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/'data' output: Data(name='layer15_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/'stack' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/stack:subnet/'data' output: Data(name='layer15_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer16/stack/layer1',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/'add' output: Data(name='add_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer16:subnet/'output' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer17' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/'data' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/'stack' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer17/stack/layer1',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/'add' output: Data(name='add_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer17:subnet/'output' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer18' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/'data' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/'stack' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer18/stack/layer1',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/'add' output: Data(name='add_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer18:subnet/'output' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer19' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/'data' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/'stack' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/stack:subnet/'data' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/stack:subnet/'layer0' output: Data(name='layer0_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/stack:subnet/'layer1' output: Data(name='layer1_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/stack:subnet/'layer2' output: Data(name='layer2_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer19/stack/layer1',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/stack:subnet/'layer3' output: Data(name='layer3_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/stack:subnet/'layer4' output: Data(name='layer4_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/stack:subnet/'output' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/'skip_layer' output: Data(name='skip_layer_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/'add' output: Data(name='add_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/layer19:subnet/'output' output: Data(name='output_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer20' output: Data(name='layer20_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer21' output: Data(name='layer21_output', shape=(None, 48), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|48]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer22' output: Data(name='layer22_output', shape=(None, 4), batch_shape_meta=[B,T|'spatial:0:melgan/layer21',F|4]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'layer23' output: Data(name='layer23_output', shape=(None, 4), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|4]) | |
layer <network via _run_returnn_standalone>/melgan:subnet/'output' output: Data(name='output_output', shape=(None, 4), batch_shape_meta=[B,T|'spatial:0:melgan/layer15',F|4]) | |
layer <network via _run_returnn_standalone>/'PQMF_updown_filter' output: Data(name='PQMF_updown_filter_const', shape=(4, 4, 4), batch_dim_axis=None, time_dim_axis=None, batch_shape_meta=[4,4,F|4]) | |
layer <network via _run_returnn_standalone>/'PQMF_Cast_unnamed_const' output: Data(name='PQMF_Cast_unnamed_const_const', shape=(), dtype='int32', batch_dim_axis=None, time_dim_axis=None, batch_shape_meta=[]) | |
layer <network via _run_returnn_standalone>/'PQMF_Cast' output: Data(name='PQMF_Cast_output', shape=(), batch_dim_axis=None, time_dim_axis=None, batch_shape_meta=[]) | |
layer <network via _run_returnn_standalone>/'PQMF_mul' output: Data(name='PQMF_mul_output', shape=(4, 4, 4), batch_dim_axis=None, time_dim_axis=None, batch_shape_meta=[4,4,F|4]) | |
layer <network via _run_returnn_standalone>/'PQMF_FunctionalConvTransposed1d' output: Data(name='PQMF_FunctionalConvTransposed1d_output', shape=(None, 4), batch_shape_meta=[B,T|?,F|4]) | |
layer <network via _run_returnn_standalone>/'pad_fn' output: Data(name='pad_fn_output', shape=(None, 4), batch_shape_meta=[B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|4]) | |
layer <network via _run_returnn_standalone>/'PQMF_synthesis_filter' output: Data(name='PQMF_synthesis_filter_const', shape=(1, 4, 63), batch_dim_axis=None, time_dim_axis=None, batch_shape_meta=[1,4,F|63]) | |
layer <network via _run_returnn_standalone>/'PQMF_FunctionalConv1d' output: Data(name='PQMF_FunctionalConv1d_output', shape=(None, 1), batch_shape_meta=[B,T|'spatial:0:pad_fn',F|1]) | |
layer <network via _run_returnn_standalone>/'output' output: Data(name='output_output', shape=(None, 1), batch_shape_meta=[B,T|'spatial:0:PQMF_FunctionalConvTransposed1d',F|1]) | |
Output shape: (1, 16000, 1) | |
>>>> Looks good! | |
Wrote out.wav. | |
Process finished with exit code 0 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment