Skip to content

Instantly share code, notes, and snippets.

Avatar
👹

Kemal Erdem burnpiro

👹
View GitHub Profile
@burnpiro
burnpiro / a_train.py
Last active Jan 22, 2021
Tensorflow 2 custom dataset Sequence
View a_train.py
import tensorflow as tf
from data.data_generator import DataGenerator
from config import cfg
## Create train dataset
train_datagen = DataGenerator(file_path=cfg.TRAIN.DATA_PATH, config_path=cfg.TRAIN.ANNOTATION_PATH)
## Create validation dataset
val_generator = DataGenerator(file_path=cfg.TEST.DATA_PATH, config_path=cfg.TEST.ANNOTATION_PATH, debug=False)
@burnpiro
burnpiro / tb_example.py
Created Oct 22, 2020
TensorBoard example for custom model
View tb_example.py
# Load the TensorBoard notebook extension
%load_ext tensorboard
# Clear out any prior log data. (optional)
!rm -rf logs
import datetime
import io
import itertools
import numpy as np
@burnpiro
burnpiro / helpers_tb.py
Created Oct 22, 2020
List of helpers to generate images for tensorboard
View helpers_tb.py
def plot_confusion_matrix(cm, class_names=class_names):
"""
Returns a matplotlib figure containing the plotted confusion matrix.
Args:
cm (array, shape = [n, n]): a confusion matrix of integer classes
class_names (array, shape = [n]): String names of the integer classes
"""
figure = plt.figure(figsize=(8, 8))
plt.imshow(cm, interpolation='nearest', cmap=plt.cm.Blues)
@burnpiro
burnpiro / v8.md
Last active Oct 2, 2020
Basic V8 guide
View v8.md

Install V8 on Linux

Requirements

  • git

Installation

depot_tools

View test.py
unnormalized_train_data = extract_data(path_to_train_file)
normalized_train_data, train_scale = preproc_data(unnormalized_train_data, norm_cols, scale_cols)
// Create and train model
unnormalized_test_data = extract_data(path_to_test_file)
normalized_test_data, _ = preproc_data(unnormalized_test_data, norm_cols, scale_cols, train_scale)
View test.py
if scale_cols:
# Scale year and week no but within (0,1)
new_data[scale_cols] = MinMaxScaler(feature_range=(0, 1)).fit(train_scale[scale_cols]).transform(
new_data[scale_cols])
View test.py
if norm_cols:
# Normalize temp and percipation
new_data[norm_cols] = StandardScaler().fit(train_scale[norm_cols]).transform(new_data[norm_cols])
View preproc_data.py
import pandas as pd
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from data_info import cols_to_norm, cols_to_scale
def preproc_data(data, norm_cols=cols_to_norm, scale_cols=cols_to_scale, train_scale=None):
"""
:param data: Dataframe
:param norm_cols: List<string>
:param scale_cols: List<string>
:param train_scale: Dataframe
:return: Tuple(Dataframe, Dataframe)
@burnpiro
burnpiro / data_info.py
Created Jul 31, 2020
DengAI data info
View data_info.py
LABEL_COLUMN = 'total_cases'
NUMERIC_COLUMNS = ['year',
'weekofyear',
'ndvi_ne',
'ndvi_nw',
'ndvi_se',
'ndvi_sw',
'precipitation_amt_mm',
'reanalysis_air_temp_k',
'reanalysis_avg_temp_k',
@burnpiro
burnpiro / functionOptimisation.md
Created Aug 1, 2019
Spread VS JSON.parse performance in function optimisation
View functionOptimisation.md

Spread vs JSON.parse speed when calling simple function

const N = 100000;

function test(obj) {
  var result = obj.a + obj.b;
  return result;
}
function test2(obj) {
  var result = obj.a + obj.b;