tested on android 6 tencent qq
-
设法将
/data/data/com.tencent.*/databases
目录拷贝出来,我假设你了解如何做到这一点。 -
运行以下命令。我假设你了解如何安装/使用sqlite,我假设你了解linux基本知识。
$ sqlite3 872222222-IndexQQMsg.db
require 'torch' | |
require 'xlua' | |
require 'image' | |
require 'cunn' | |
require 'cudnn' | |
require 'nn' | |
require 'torch' | |
require 'optim' | |
require 'paths' |
# = = = = = include = = = = = # | |
library(MASS) | |
# = = = = = function = = = = = # | |
Obj_func <- function(Y,B,A,Lambda){ | |
MAT <- t(Y) %*% Y - Y %*% t(A) %*% solve(A%*%t(A)+Lambda) %*% t(Y%*%t(A)) - Lambda | |
return <- sum(diag(MAT)) |
require 'nn' | |
local vgg = nn.Sequential() | |
-- building block | |
local function ConvBNReLU(nInputPlane, nOutputPlane) | |
vgg:add(nn.SpatialConvolution(nInputPlane, nOutputPlane, 3,3, 1,1, 1,1)) | |
vgg:add(nn.SpatialBatchNormalization(nOutputPlane,1e-3)) | |
vgg:add(nn.ReLU(true)) | |
return vgg | |
end |
m = nn.ParallelTable() | |
layer = nn.SparseLinear(inputSize,outputSize) | |
m:add(nn.Sequential():add(layer):add(nn.Reshape(1,outputSize))) | |
for i=2,batchSize do | |
local repLayer = layer:clone('weight', 'bias', 'gradWeight', 'gradBias') | |
m:add(nn.Sequential():add(repLayer):add(nn.Reshape(1,outputSize))) | |
end | |
batchLayer = nn.Sequential():add(m):add(nn.JoinTable(1)) |
------- AlexNet: Using my own weight initialization | |
model = nn.Sequential() | |
model:add(cudnn.SpatialConvolution(3,96,11,11,4,4,2,2)) | |
model.modules[#model.modules].weight:normal(0, 0.01) | |
model.modules[#model.modules].bias:fill(0) | |
model:add(cudnn.ReLU()) | |
model:add(inn.SpatialCrossResponseNormalization(5, 0.0001, 0.75, 1)) | |
model:add(nn.SpatialMaxPooling(3,3,2,2)) | |
model:add(cudnn.SpatialConvolution(96,256,5,5,1,1,2,2)) | |
model.modules[#model.modules].weight:normal(0, 0.01) |
""" | |
L1-penalized minimization using the feature sign search algorithm. | |
""" | |
import logging | |
import numpy as np | |
log = logging.getLogger("feature_sign") | |
log.setLevel(logging.INFO) | |