Skip to content

Instantly share code, notes, and snippets.

def generate(phi_1, phi_2, c, sigma, size):
# 期待値
mu = c/(1-phi_1-phi_2)
# 乱数のseedを固定
np.random.seed(17)
# データの生成
ar_data = np.zeros(size)
ar_data[0] = mu + np.random.normal(0, sigma)
from aliyun.log import *
from aliyun.log.util import base64_encodestring
# アップロード用の関数を定義
def put_logs(client, project, logstore, contents, compress=False):
topic = ''
source = ''
logitemList = [] # LogItem list
for c in contents:
logItem = LogItem()
# データの生成
ar_data = generate(1.4, -0.48, 5, 0.5, 100)
# データをアップロードしやすい形式に変換
timestamps = sorted(range(0, 100))
contents = []
for timestamp in timestamps:
contents.append([
("timestamp", str(timestamp)), # 必ず文字列に変換すること
("value", str(ar_data[timestamp]))
* | select
ts_predicate_ar(timestamp, value, 2, 10)
limit 110
* | select
ts_cp_detect(timestamp, value, 50)
limit 1000
* | select
unixtime,
predict
from (
select
ts_predicate_ar(timestamp, value, 2, 10)
from log
)
* | select
preds[1] as unixtime,
preds[3] as predict
from (
select
ts_predicate_ar(timestamp, value, 2, 10) as p
from log
), unnest(p) as t(preds)
select
timestamp,
value
where
timestamp > ( to_unixtime(localtimestamp) - 3600)
order by timestamp asc
ar_data = np.concatenate((
generate(1.4, -0.48, 5, 0.5, 450),
generate(1.4, -0.48, 10, 0.5, 100),
generate(1.4, -0.48, 5, 0.5, 450)
))
timestamps = sorted(range(1000))
contents = []
for timestamp in timestamps:
contents.append([
("timestamp", str(timestamp)),
data = pd.read_csv("./day.csv")
for i in range(data.shape[0]):
contents = [list(zip(data.columns, map(lambda x: str(x), tuple(data.loc[i,:]))))]
put_logs(client, project, logstore, contents)