This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def generate(phi_1, phi_2, c, sigma, size): | |
# 期待値 | |
mu = c/(1-phi_1-phi_2) | |
# 乱数のseedを固定 | |
np.random.seed(17) | |
# データの生成 | |
ar_data = np.zeros(size) | |
ar_data[0] = mu + np.random.normal(0, sigma) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from aliyun.log import * | |
from aliyun.log.util import base64_encodestring | |
# アップロード用の関数を定義 | |
def put_logs(client, project, logstore, contents, compress=False): | |
topic = '' | |
source = '' | |
logitemList = [] # LogItem list | |
for c in contents: | |
logItem = LogItem() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# データの生成 | |
ar_data = generate(1.4, -0.48, 5, 0.5, 100) | |
# データをアップロードしやすい形式に変換 | |
timestamps = sorted(range(0, 100)) | |
contents = [] | |
for timestamp in timestamps: | |
contents.append([ | |
("timestamp", str(timestamp)), # 必ず文字列に変換すること | |
("value", str(ar_data[timestamp])) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
* | select | |
ts_predicate_ar(timestamp, value, 2, 10) | |
limit 110 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
* | select | |
ts_cp_detect(timestamp, value, 50) | |
limit 1000 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
* | select | |
unixtime, | |
predict | |
from ( | |
select | |
ts_predicate_ar(timestamp, value, 2, 10) | |
from log | |
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
* | select | |
preds[1] as unixtime, | |
preds[3] as predict | |
from ( | |
select | |
ts_predicate_ar(timestamp, value, 2, 10) as p | |
from log | |
), unnest(p) as t(preds) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
select | |
timestamp, | |
value | |
where | |
timestamp > ( to_unixtime(localtimestamp) - 3600) | |
order by timestamp asc |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ar_data = np.concatenate(( | |
generate(1.4, -0.48, 5, 0.5, 450), | |
generate(1.4, -0.48, 10, 0.5, 100), | |
generate(1.4, -0.48, 5, 0.5, 450) | |
)) | |
timestamps = sorted(range(1000)) | |
contents = [] | |
for timestamp in timestamps: | |
contents.append([ | |
("timestamp", str(timestamp)), |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
data = pd.read_csv("./day.csv") | |
for i in range(data.shape[0]): | |
contents = [list(zip(data.columns, map(lambda x: str(x), tuple(data.loc[i,:]))))] | |
put_logs(client, project, logstore, contents) |