Created
November 6, 2021 11:06
-
-
Save BadreeshShetty/3bd45483ebc032dc768caaef9ea77c1b to your computer and use it in GitHub Desktop.
NLP ML (Built-In)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"cell_type": "markdown", | |
"metadata": {}, | |
"source": [ | |
"## Stemming" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": 12, | |
"metadata": {}, | |
"outputs": [ | |
{ | |
"data": { | |
"text/html": [ | |
"<div>\n", | |
"<style scoped>\n", | |
" .dataframe tbody tr th:only-of-type {\n", | |
" vertical-align: middle;\n", | |
" }\n", | |
"\n", | |
" .dataframe tbody tr th {\n", | |
" vertical-align: top;\n", | |
" }\n", | |
"\n", | |
" .dataframe thead th {\n", | |
" text-align: right;\n", | |
" }\n", | |
"</style>\n", | |
"<table border=\"1\" class=\"dataframe\">\n", | |
" <thead>\n", | |
" <tr style=\"text-align: right;\">\n", | |
" <th></th>\n", | |
" <th>label</th>\n", | |
" <th>body_text</th>\n", | |
" <th>body_text_clean</th>\n", | |
" <th>body_text_tokenized</th>\n", | |
" <th>body_text_nostop</th>\n", | |
" <th>body_text_stemmed</th>\n", | |
" </tr>\n", | |
" </thead>\n", | |
" <tbody>\n", | |
" <tr>\n", | |
" <th>0</th>\n", | |
" <td>ham</td>\n", | |
" <td>I've been searching for the right words to tha...</td>\n", | |
" <td>Ive been searching for the right words to than...</td>\n", | |
" <td>[ive, been, searching, for, the, right, words,...</td>\n", | |
" <td>[ive, searching, right, words, thank, breather...</td>\n", | |
" <td>[ive, search, right, word, thank, breather, pr...</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>1</th>\n", | |
" <td>spam</td>\n", | |
" <td>Free entry in 2 a wkly comp to win FA Cup fina...</td>\n", | |
" <td>Free entry in 2 a wkly comp to win FA Cup fina...</td>\n", | |
" <td>[free, entry, in, 2, a, wkly, comp, to, win, f...</td>\n", | |
" <td>[free, entry, 2, wkly, comp, win, fa, cup, fin...</td>\n", | |
" <td>[free, entri, 2, wkli, comp, win, fa, cup, fin...</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>2</th>\n", | |
" <td>ham</td>\n", | |
" <td>Nah I don't think he goes to usf, he lives aro...</td>\n", | |
" <td>Nah I dont think he goes to usf he lives aroun...</td>\n", | |
" <td>[nah, i, dont, think, he, goes, to, usf, he, l...</td>\n", | |
" <td>[nah, dont, think, goes, usf, lives, around, t...</td>\n", | |
" <td>[nah, dont, think, goe, usf, live, around, tho...</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>3</th>\n", | |
" <td>ham</td>\n", | |
" <td>Even my brother is not like to speak with me. ...</td>\n", | |
" <td>Even my brother is not like to speak with me T...</td>\n", | |
" <td>[even, my, brother, is, not, like, to, speak, ...</td>\n", | |
" <td>[even, brother, like, speak, treat, like, aids...</td>\n", | |
" <td>[even, brother, like, speak, treat, like, aid,...</td>\n", | |
" </tr>\n", | |
" <tr>\n", | |
" <th>4</th>\n", | |
" <td>ham</td>\n", | |
" <td>I HAVE A DATE ON SUNDAY WITH WILL!!</td>\n", | |
" <td>I HAVE A DATE ON SUNDAY WITH WILL</td>\n", | |
" <td>[i, have, a, date, on, sunday, with, will]</td>\n", | |
" <td>[date, sunday]</td>\n", | |
" <td>[date, sunday]</td>\n", | |
" </tr>\n", | |
" </tbody>\n", | |
"</table>\n", | |
"</div>" | |
], | |
"text/plain": [ | |
" label body_text \\\n", | |
"0 ham I've been searching for the right words to tha... \n", | |
"1 spam Free entry in 2 a wkly comp to win FA Cup fina... \n", | |
"2 ham Nah I don't think he goes to usf, he lives aro... \n", | |
"3 ham Even my brother is not like to speak with me. ... \n", | |
"4 ham I HAVE A DATE ON SUNDAY WITH WILL!! \n", | |
"\n", | |
" body_text_clean \\\n", | |
"0 Ive been searching for the right words to than... \n", | |
"1 Free entry in 2 a wkly comp to win FA Cup fina... \n", | |
"2 Nah I dont think he goes to usf he lives aroun... \n", | |
"3 Even my brother is not like to speak with me T... \n", | |
"4 I HAVE A DATE ON SUNDAY WITH WILL \n", | |
"\n", | |
" body_text_tokenized \\\n", | |
"0 [ive, been, searching, for, the, right, words,... \n", | |
"1 [free, entry, in, 2, a, wkly, comp, to, win, f... \n", | |
"2 [nah, i, dont, think, he, goes, to, usf, he, l... \n", | |
"3 [even, my, brother, is, not, like, to, speak, ... \n", | |
"4 [i, have, a, date, on, sunday, with, will] \n", | |
"\n", | |
" body_text_nostop \\\n", | |
"0 [ive, searching, right, words, thank, breather... \n", | |
"1 [free, entry, 2, wkly, comp, win, fa, cup, fin... \n", | |
"2 [nah, dont, think, goes, usf, lives, around, t... \n", | |
"3 [even, brother, like, speak, treat, like, aids... \n", | |
"4 [date, sunday] \n", | |
"\n", | |
" body_text_stemmed \n", | |
"0 [ive, search, right, word, thank, breather, pr... \n", | |
"1 [free, entri, 2, wkli, comp, win, fa, cup, fin... \n", | |
"2 [nah, dont, think, goe, usf, live, around, tho... \n", | |
"3 [even, brother, like, speak, treat, like, aid,... \n", | |
"4 [date, sunday] " | |
] | |
}, | |
"execution_count": 12, | |
"metadata": {}, | |
"output_type": "execute_result" | |
} | |
], | |
"source": [ | |
"ps = nltk.PorterStemmer()\n", | |
"\n", | |
"def stemming(tokenized_text):\n", | |
" text = [ps.stem(word) for word in tokenized_text]\n", | |
" return text\n", | |
"\n", | |
"data['body_text_stemmed'] = data['body_text_nostop'].apply(lambda x: stemming(x))\n", | |
"\n", | |
"data.head()" | |
] | |
} | |
], | |
"metadata": { | |
"kernelspec": { | |
"display_name": "Python 3", | |
"language": "python", | |
"name": "python3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.7.2" | |
}, | |
"toc": { | |
"base_numbering": 1, | |
"nav_menu": {}, | |
"number_sections": false, | |
"sideBar": true, | |
"skip_h1_title": false, | |
"title_cell": "Table of Contents", | |
"title_sidebar": "Contents", | |
"toc_cell": false, | |
"toc_position": {}, | |
"toc_section_display": true, | |
"toc_window_display": false | |
}, | |
"varInspector": { | |
"cols": { | |
"lenName": 16, | |
"lenType": 16, | |
"lenVar": 40 | |
}, | |
"kernels_config": { | |
"python": { | |
"delete_cmd_postfix": "", | |
"delete_cmd_prefix": "del ", | |
"library": "var_list.py", | |
"varRefreshCmd": "print(var_dic_list())" | |
}, | |
"r": { | |
"delete_cmd_postfix": ") ", | |
"delete_cmd_prefix": "rm(", | |
"library": "var_list.r", | |
"varRefreshCmd": "cat(var_dic_list()) " | |
} | |
}, | |
"types_to_exclude": [ | |
"module", | |
"function", | |
"builtin_function_or_method", | |
"instance", | |
"_Feature" | |
], | |
"window_display": false | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 2 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment