This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
blue = [65,105,225] | |
green = [34,139,34] | |
beach = [238, 214, 175] | |
snow = [255, 250, 250] | |
mountain = [139, 137, 137] | |
def add_color(world): | |
color_world = np.zeros(world.shape+(3,)) | |
for i in range(shape[0]): | |
for j in range(shape[1]): |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# keep track of losses | |
# from the generator and | |
# discriminator | |
generator_losses = [] | |
discriminator_losses = [] | |
# for some number of rounds (epochs) | |
for epoch in range(n_epochs): | |
# track losses for this epoch | |
gen_loss_epoch = 0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# in pandas | |
df[col1] = df[col1]*5 | |
# in spark | |
df = df.withColumn(col1, F.col(col1)*5) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# pandas | |
df[col2] = np.where(df[col2] == 1, True, False) | |
# pyspark | |
df = df.withColumn(col2, F.when(F.col(col2) == 1, True).otherwise(False)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# pandas | |
row_ct = df.shape[0] | |
num_ct = pd.to_numeric(df[col3], errors='coerce').count() # coerce makes nan, count drops nan | |
# another check using regex | |
num_regex = r"^((-)?[0-9]+)(,[0-9]+)*(\.[0-9]+)?$|(^$)" | |
all_are_nums = all(df[col3].fillna('').astype(str).apply(lambda x: re.match(num_regex, x))) | |
if (num_ct == row_ct) or all_are_nums: | |
df[col3] = pd.to_numeric(df[col3], errors='coerce') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# pandas | |
df[col3] = df[col3].replace(regex=r"[^0-9\\.]", value="") | |
df[col3] = pd.to_numeric(df[col3], errors='coerce') | |
# pyspark | |
df = df.withColumn(col3, F.regexp_replace(F.col(col3), r"[^0-9\\.]", '').cast("double")) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# pandas | |
df[col4] = df[col4].apply(lambda m: None if m in [None, float('nan'), np.nan, math.nan] else int(float(m))) | |
# pyspark | |
def floatint(x): | |
return int(float(x)) | |
int_udf = F.udf(lambda m: None if m is None else floatint(m)) | |
df = df.withColumn(col4, F.when(F.col(col4).isNotNull(), int_udf(F.col(col4))).otherwise(None)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# pandas | |
df[col5] = pd.to_datetime(df[col5], errors='coerce') | |
# pyspark | |
data_regex = r"\d{2,4}(\.|\-|\/|\\)+\d{2,4}(\.|\-|\/|\\)+\d{2,4}(\s)*(\d{2}\:\d{2}\:\d{2})?(\.\d{3})?|^$" | |
df = df.withColumn(col5, F.when(F.regexp_replace(F.col(col5), data_regex, '').isNotNull(),\ | |
F.to_timestamp(F.col(col5), 'yyyy/MM/dd')).otherwise(None)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import noise | |
import numpy as np | |
from scipy.misc import toimage | |
shape = (1024,1024) | |
scale = 100.0 | |
octaves = 6 | |
persistence = 0.5 | |
lacunarity = 2.0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import numpy as np | |
import matplotlib.pyplot as plt | |
import torch | |
import torch.nn as nn | |
import torch.optim as optim | |
from torch.autograd import Variable | |
from torch.utils import data | |
from torchvision.datasets import ImageFolder | |
from torchvision import transforms, datasets |
NewerOlder