Skip to content

Instantly share code, notes, and snippets.

@masakiaota
Last active December 10, 2020 07:50
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save masakiaota/fa35f3e733bc351bfd8f04448216aa1f to your computer and use it in GitHub Desktop.
Save masakiaota/fa35f3e733bc351bfd8f04448216aa1f to your computer and use it in GitHub Desktop.
from datetime import datetime
now = datetime.now()
filename = './model/base_line_sklearn_study' + now.strftime('%Y%m%d_%H%M%S') + '.jb' #filename
from multiprocessing import cpu_count
# keras
from keras.models import Model, Sequential
from keras.callbacks import TensorBoard, EarlyStopping
from keras.optimizers import adam, Nadam
from keras import backend as K
from keras.models import load_model
from keras.layers import Input, InputLayer, Dense, RepeatVector, Lambda, TimeDistributed, Flatten, Reshape, Conv1D, MaxPool1D, Conv2DTranspose, Activation
import os
# make output if not exists
if not os.path.exists('output'):
os.mkdir('output')
# multiprocess
from multiprocessing import Pool
import multiprocessing as multi
p = Pool(int(multi.cpu_count()/2))
p.map(function, listofargs)
p.close()
#以下でforで並列処理もできる
p = Pool(int(multi.cpu_count()))
for i, l in enumerate(p.imap(関数, リスト)):
if i%50==0:
print(i,"/",len(リスト))
p.close()
# notebook basic import
from IPython.core.interactiveshell import InteractiveShell
InteractiveShell.ast_node_interactivity = "all"
import numpy as np
import pandas as pd
import joblib
from pathlib import Path
from tqdm.autonotebook import tqdm as tq
tq.pandas()
from pandarallel import pandarallel
pandarallel.initialize(nb_workers=16,progress_bar=True)
# PATH = Path("input path to directory")
# paths=list(PATH.glob("*.拡張子"))
# visualize
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
%config InlineBackend.figure_formats = {'png', 'retina'}
# import japanize_matplotlib
%load_ext autoreload
%autoreload #これをするたびにインポートしたモジュールをリロードする
%load_ext autoreload
%autoreload #これをするたびにインポートしたモジュールをリロードする
# scraping
from urllib.request import urlopen
from bs4 import BeautifulSoup
import re
html = urlopen('https://nvd.nist.gov/vuln/categories')
bs = BeautifulSoup(html.read(), 'html.parser') #第二引数はパーサーの種類でこのほかにも lxml がパーサーとしてよく使われる
cwes=bs.findAll('span',{'id':re.compile("cweIdEntry")})
from functools import partial
new_func=partial(func, arg1=hoge, arg2=unko)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment