In [1]: from huggingface_hub import snapshot_download
In [2]: snapshot_download(repo_id="bert-base-chinese", ignore_regex=["*.h5", "*.ot", "*.msgpack"], cache_dir='MODEL WHERE STORE')
ignore_regex:忽略其他非必要文件
n02119789 1 kit_fox | |
n02100735 2 English_setter | |
n02110185 3 Siberian_husky | |
n02096294 4 Australian_terrier | |
n02102040 5 English_springer | |
n02066245 6 grey_whale | |
n02509815 7 lesser_panda | |
n02124075 8 Egyptian_cat | |
n02417914 9 ibex | |
n02123394 10 Persian_cat |
In [1]: from huggingface_hub import snapshot_download
In [2]: snapshot_download(repo_id="bert-base-chinese", ignore_regex=["*.h5", "*.ot", "*.msgpack"], cache_dir='MODEL WHERE STORE')
ignore_regex:忽略其他非必要文件
import torch as th | |
import numpy as np | |
class AutoClipGradient(object): | |
def __init__(self, max_history=1000, clip_percentile=99.9, max_grad_norm=0.5): | |
self.max_history = max_history | |
self.clip_percentile = clip_percentile | |
self.history = [] | |
self.max_grad_norm = max_grad_norm | |
def _compute_grad_norms(self, params, grad_scale=1.0): |
sbatch jupyter.sh
example of jupyter.sh
#!/bin/bash
#SBATCH --nodes=1
#SBATCH --job-name=jupyter
#SBATCH --gres=gpu:1
#SBATCH --time=2-00:00:00
.env
under src
(source code) folder.env
as follows:
PYTHONPATH=path_a:path_b
, (path_a
better be abosulte path)import zipfile | |
path_to_zip_file = '...' | |
directory_to_extract_to = './' | |
with zipfile.ZipFile(path_to_zip_file, 'r') as zip_ref: | |
zip_ref.extractall(directory_to_extract_to) |