Skip to content

Instantly share code, notes, and snippets.

use_tpu = True
if use_tpu:
# Create distribution strategy (TPU initialization)
tpu = tf.distribute.cluster_resolver.TPUClusterResolver()
tf.config.experimental_connect_to_cluster(tpu)
tf.tpu.experimental.initialize_tpu_system(tpu)
strategy = tf.distribute.experimental.TPUStrategy(tpu)
# Create model
with strategy.scope():
model = create_model()
def create_model():
# Name of the xlnet model to use
model_name = 'xlnet-large-cased'
# Max length of tokens
max_length = 10
# Load transformers config and set output_hidden_states to False
config = XLNetConfig.from_pretrained(model_name)
config.output_hidden_states = False
# Load xlnet tokenizer
tokenizer = XLNetTokenizerFast.from_pretrained(pretrained_model_name_or_path = model_name, config = config)
def create_model():
# Name of the xlnet model to use
model_name = 'xlnet-large-cased'
# Max length of tokens
max_length = 10
# Load transformers config and set output_hidden_states to False
config = XLNetConfig.from_pretrained(model_name)
config.output_hidden_states = False
# Load xlnet tokenizer
from googletrans import Translator
import time
import ast
from random_user_agent.params import SoftwareName, OperatingSystem
software_names = [SoftwareName.CHROME.value]
operating_systems = [OperatingSystem.WINDOWS.value, OperatingSystem.LINUX.value]
user_agent_rotator = UserAgent(software_names=software_names, operating_systems=operating_systems, limit=200)
user_agent2 = user_agent_rotator.get_random_user_agent()
def split_list(alist, wanted_parts): #split the list to a wanted parts
length = len(alist)
from faker.config import AVAILABLE_LOCALES
#filtering AVAILABLE_LOCALES
country=['ar_EG','fakerar_PS','ar_SA','bg_BG','bs_BA','cs_CZ','de_DE','dk_DK','el_GR'
,'en_AU','en_CA','en_GB','en_NZ','en_US','es_ES','es_MX','et_EE','fa_IR','fi_FI'
,'fr_FR','hi_IN','hr_HR','hu_HU','hy_AM','it_IT','ja_JP','ka_GE','ko_KR','lt_LT',
'lv_LV','ne_NP','nl_NL','no_NO','pl_PL','pt_BR','pt_PT','ro_RO','ru_RU','sl_SI','sv_SE','tr_TR','uk_UA','zh_CN','zh_TW']
from tqdm import tqdm
from faker import Faker
import csv
def datagenerate(records, headers):
@AmaleshV
AmaleshV / abc
Last active February 24, 2021 12:27
https://colab.research.google.com/drive/1ln_OoicKGP23ysUmPiivJup2xWXIk5u9#scrollTo=Tf0ICF3O9vL5
@AmaleshV
AmaleshV / new
Last active September 29, 2020 16:36
instance_to_explain=44
### Automative Narrative using SHAP for Transaction Monitoring###
final_ts['Alert_description_shap']=""
for instance_to_explain in range(0,50):
column_names_explain=final_ts.columns #column names
obs_instance_to_explain=final_ts.iloc[instance_to_explain,]
obs_instance_to_explain_shapvalues =np.around(shap_values[1][instance_to_explain],3)
prob_score_SAR_obs_instance_to_explain= ytest_rf[instance_to_explain,1]
mcd = create_model('mcd',fraction=outlier_fraction)
sod = create_model('sod',fraction=outlier_fraction)
abod = create_model('abod',fraction=outlier_fraction,verbose=True)