注意:本文内容适用于 Tmux 2.3 及以上的版本,但是绝大部分的特性低版本也都适用,鼠标支持、VI 模式、插件管理在低版本可能会与本文不兼容。
启动新会话:
tmux [new -s 会话名 -n 窗口名]
恢复会话:
""" | |
L1-penalized minimization using the feature sign search algorithm. | |
""" | |
import logging | |
import numpy as np | |
log = logging.getLogger("feature_sign") | |
log.setLevel(logging.INFO) | |
------- AlexNet: Using my own weight initialization | |
model = nn.Sequential() | |
model:add(cudnn.SpatialConvolution(3,96,11,11,4,4,2,2)) | |
model.modules[#model.modules].weight:normal(0, 0.01) | |
model.modules[#model.modules].bias:fill(0) | |
model:add(cudnn.ReLU()) | |
model:add(inn.SpatialCrossResponseNormalization(5, 0.0001, 0.75, 1)) | |
model:add(nn.SpatialMaxPooling(3,3,2,2)) | |
model:add(cudnn.SpatialConvolution(96,256,5,5,1,1,2,2)) | |
model.modules[#model.modules].weight:normal(0, 0.01) |
m = nn.ParallelTable() | |
layer = nn.SparseLinear(inputSize,outputSize) | |
m:add(nn.Sequential():add(layer):add(nn.Reshape(1,outputSize))) | |
for i=2,batchSize do | |
local repLayer = layer:clone('weight', 'bias', 'gradWeight', 'gradBias') | |
m:add(nn.Sequential():add(repLayer):add(nn.Reshape(1,outputSize))) | |
end | |
batchLayer = nn.Sequential():add(m):add(nn.JoinTable(1)) |
require 'nn' | |
local vgg = nn.Sequential() | |
-- building block | |
local function ConvBNReLU(nInputPlane, nOutputPlane) | |
vgg:add(nn.SpatialConvolution(nInputPlane, nOutputPlane, 3,3, 1,1, 1,1)) | |
vgg:add(nn.SpatialBatchNormalization(nOutputPlane,1e-3)) | |
vgg:add(nn.ReLU(true)) | |
return vgg | |
end |
# = = = = = include = = = = = # | |
library(MASS) | |
# = = = = = function = = = = = # | |
Obj_func <- function(Y,B,A,Lambda){ | |
MAT <- t(Y) %*% Y - Y %*% t(A) %*% solve(A%*%t(A)+Lambda) %*% t(Y%*%t(A)) - Lambda | |
return <- sum(diag(MAT)) |
require 'torch' | |
require 'xlua' | |
require 'image' | |
require 'cunn' | |
require 'cudnn' | |
require 'nn' | |
require 'torch' | |
require 'optim' | |
require 'paths' |