Skip to content

Instantly share code, notes, and snippets.

@MittalShruti
Created December 7, 2019 03:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save MittalShruti/cc5edd986e461bcfd3633afa438f9b4e to your computer and use it in GitHub Desktop.
Save MittalShruti/cc5edd986e461bcfd3633afa438f9b4e to your computer and use it in GitHub Desktop.
ASR
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "ASR",
"provenance": [],
"collapsed_sections": [],
"machine_shape": "hm",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/gist/MittalShruti/cc5edd986e461bcfd3633afa438f9b4e/asr.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "DzrgTEIvw-Ph",
"colab_type": "code",
"colab": {}
},
"source": [
"def asr_learner(data:DataBunch, loss_func, number_of_classes, window_size=255,**kwargs)->Learner:\n",
" model = DeepSpeech(num_classes=number_of_classes, window_size=window_size)\n",
"\n",
" apply_init(model, nn.init.kaiming_normal_)\n",
" \n",
" w = WerMetric(EPSTOK, hi_vocab); w.__name__ = \"wer\"\n",
" c = CerMetric(EPSTOK, hi_vocab); c.__name__ = \"cer\"\n",
" metrics = [w, c]\n",
" \n",
" x, y = db.one_batch()\n",
" pred = model(x)\n",
" \n",
" learn = Learner(data, model, loss_func(pred, *y), callback_fns=ShowGraph, metrics=metrics, **kwargs)\n",
" \n",
" return learn"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "AKHE9oEEyl2l",
"colab_type": "code",
"colab": {}
},
"source": [
"\n",
"blank_id = hi_vocab.numericalize(EPSTOK)[0]\n",
"num_saidas = len(list(hi_lbls))\n",
"\n",
"loss_func = CTC(blank=blank_id, post_reduction=lambda loss: torch.sum(loss))\n",
"\n",
"learn = asr_learner(db, loss_func, num_saidas)\n",
"learn.opt_func = optim.Adam"
],
"execution_count": 0,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "iZruuUwJzWeR",
"colab_type": "code",
"colab": {}
},
"source": [
""
],
"execution_count": 0,
"outputs": []
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment