Skip to content

Instantly share code, notes, and snippets.

@philippslang
philippslang / settings.json
Created July 19, 2018 08:41
VS Code Python - Exclude search and watch
{
"search.exclude": {
"**/node_modules": true,
"**/bower_components": true,
"**/venv/**": true,
"**/.git/**": true,
},
"files.watcherExclude": {
"**/.git/**": true,
"**/venv/**": true
@philippslang
philippslang / notify.js
Created July 9, 2018 14:58
Azure lambda post
var request = require('request');
module.exports = function(context, perf) {
request('http://ixciperf.westeurope.cloudapp.azure.com:5000/api/update', function (error, response, body) {
if (error) {
context.log(error);
}
if (!error && response.statusCode == 200) {
context.log("JavaScript blob trigger function processed blob \n Name:", context.bindingData.name, "\n Blob Size:", perf.length, "Bytes");
@philippslang
philippslang / launch.json
Created July 6, 2018 10:28
VS Code pytest debugging settings
{
"version": "0.2.0",
"configurations": [
{
"name": "Python: loadbalancer test",
"type": "python",
"request": "launch",
"module": "pytest",
"args": [
"--no-cov"
ghci> (0.3+0.2)+0.1
0.6
ghci> 0.3+(0.2+0.1)
0.6000000000000001
time_stage_change = 15.
num_production_history = 8 # number of productin history points used for prediction
time, production, stage = calc_two_stage_decline(p0, exp_stage_zero, exp_stage_one, time_max,
time_stage_change, num=num_timesteps)
features = np.full((1, num_timesteps, num_features), NA)
features[0, :num_production_history, ifeature_production] = production[:num_production_history]
features[0, :, ifeature_stage] = stage[:]
normalizer_features.transform(features.reshape(num_timesteps, num_features))
targets = model.predict(features)
if 1:
model = make_rnn(num_features, num_targets, num_timesteps, num_units)
model.fit(features, targets, epochs=10, batch_size=24, validation_split=0.2)
model.save(FNAME_MODEL)
else:
model = kem.load_model(FNAME_MODEL)
Scaler = preproc.MinMaxScaler
normalizer_features = Scaler(copy=False)
normalizer_features.fit_transform(features.reshape(num_sequences*num_timesteps, num_features))
normalizer_targets = Scaler(feature_range=(0, 1), copy=False) # sigmoid
normalizer_targets.fit_transform(targets.reshape(num_sequences*num_timesteps, num_targets))
isample = 0
for time_stage_change in np.linspace(*bounds_stage_change_time, num_discrete_stage_changes):
for _ in range(num_realizations_per_stage_change):
_, production, stage = calc_two_stage_decline(p0, exp_stage_zero, exp_stage_one, time_max, t
time_stage_change, num=num_timesteps)
for num_sample_points in range(1, num_timesteps):
features[isample, :, ifeature_stage] = stage[:]
features[isample, :num_sample_points, ifeature_production] = production[:num_sample_points]
targets[isample, 1:, itarget_production] = production[1:]
isample += 1
p0 = 50. # production rate at time zero
na = -p0 # numeric encoding of not available value
exp_stage_zero = 0.12 # exponent of production decline for stage zero
exp_stage_one = 0.1
time_max = 55.
bounds_stage_change_time = (20., 40.) # we'll generate training data with stage changes between these
num_timesteps = 50 # so many production values per sequence
num_discrete_stage_changes = 5 # we'll generate profiles with this many different stage change times
num_realizations_per_stage_change = 10 # and for each of these times, this many realizations (random noise differs)
num_features = 2
ifeature_production = 0
ifeature_stage = 1
num_targets = 1
itarget_production = 0
# lstm units in the recurrent layer
num_units = 24