Skip to content

Instantly share code, notes, and snippets.

View Hephaestus12's full-sized avatar
🤸

Tej Sukhatme Hephaestus12

🤸
  • Berlin, Germany
  • 02:33 (UTC -12:00)
View GitHub Profile
fig = px.scatter(dff, x='real', y='cases', color='real',
labels={'cases': 'Estimate', 'real':'Influenza cases'},
opacity=0.7,
trendline='ols',
title='Correlation Graph')
fig = px.line(dff, x='week', y='cases', color='category',
title='How does the model compare to the actual values?',
labels={'cases': 'number of Influenza cases', 'week': 'Date'})
@app.callback(
Output("count_graph", "figure"),
[
Input("model_selector", "value"),
Input("country_names", "value"),
Input("year_selector", "value"),
],
)
def make_histogram(model, countries, years):
@app.callback(
Output("main_graph", "figure"),
[Input("year_selector", "value")],
)
def make_main_figure(years):
estimates = data.get_incidence()
cases = []
for key in estimates:
cases.append(estimates[key])
const auto& [Xtrain, ytrain] = generate_train_data();
const auto& [Xtest, ytest] = generate_test_data();
auto features_train = std::make_shared<DenseFeatures<float64_t>>(Xtrain);
auto labels_train = std::make_shared<RegressionLabels>(ytrain);
auto features_test = std::make_shared<DenseFeatures<float64_t>>(Xtest);
auto glm = std::make_shared<GLM>(POISSON, 0.5, 0.1, 2e-1, 1000, 1e-6, 2.0);
glm->set_labels(labels_train);
glm->train(features_train);
SGVector<float64_t> grad_w = glm_cost->get_gradient_weights(Xtrain, ytrain, w, bias, 0.1, 0.5, true, 2.0, POISSON);
float64_t grad_bias = glm_cost->get_gradient_bias(Xtrain, ytrain, w, bias, true, 2.0, POISSON);
grad = pyglmnet._grad_L2loss(distr, alpha, Tau, reg_lambda, Xtrain, ytrain, eta, beta,
fit_intercept=True)
beta0 = numpy.random.normal(0.0, 1.0, 1)[0]
beta = scipy.sparse.rand(n_features, 1, 0.1, random_state=0)
beta = numpy.array(beta.todense()).reshape(n_features)
# Following this I used the simulate_glm() method:
Xtrain = np.random.normal(0.0, 1.0, [n_samples, n_features])
ytrain = simulate_glm('poisson', beta0, beta, Xtrain, sample=True, random_state=0,)
Xtest = np.random.normal(0.0, 1.0, [n_samples, n_features])
ytest = simulate_glm('poisson', beta0, beta, Xtest, sample=True, random_state=1,)
features->dense_dot_range(out.vector, 0, num, NULL, m_w.vector, m_w.vlen, bias);
auto result = m_cost_function->non_linearity(out, m_compute_bias, m_eta, distribution);
return std::make_shared<RegressionLabels>(result);
auto n_samples = y.vlen;
auto z = compute_z(X, w, bias);
auto mu = non_linearity(z, compute_bias, eta, distribution);
auto grad_mu = gradient_non_linearity(z, eta, distribution);
SGVector<float64_t> grad_w(w.vlen);
SGVector<float64_t> a;
// grad_w = ((grad_mu.T)⚬X - ((y*grad_mu/mu).T)⚬X).T
result = SGVector<float64_t>(z.vlen);
for (auto i : range(z.vlen))
{
if (z[i] > eta)
result[i] = std::exp(eta);
else
result[i] = std::exp(z[i]);
}