CLICK ME
yes, even hidden code blocks!
print("hello world!")
import inspect | |
import ast | |
from textwrap import dedent | |
import tensorflow as tf | |
def escape_op_name(name): | |
""" | |
It has to match with "^[A-Za-z0-9.][A-Za-z0-9_.\\-/]*$" | |
""" |
alias happendToFile='hdfs dfs -appendToFile' | |
alias hcat='hdfs dfs -cat' | |
alias hchecksum='hdfs dfs -checksum' | |
alias hchgrp='hdfs dfs -chgrp' | |
alias hchmod='hdfs dfs -chmod' | |
alias hchown='hdfs dfs -chown' | |
alias hcopyFromLocal='hdfs dfs -copyFromLocal' | |
alias hcopyToLocal='hdfs dfs -copyToLocal' | |
alias hcount='hdfs dfs -count' | |
alias hcp='hdfs dfs -cp' |
Today, many datas are geolocalised (meaning that they have a position in space). They're named GIS datas.
It's not rare that we need to do operations on those, such as aggregations, and there are many optimisations existing to do that.
# Example for my blog post at: | |
# http://danijar.com/introduction-to-recurrent-networks-in-tensorflow/ | |
import functools | |
import sets | |
import tensorflow as tf | |
def lazy_property(function): | |
attribute = '_' + function.__name__ |
# Example for my blog post at: | |
# https://danijar.com/introduction-to-recurrent-networks-in-tensorflow/ | |
import functools | |
import sets | |
import tensorflow as tf | |
def lazy_property(function): | |
attribute = '_' + function.__name__ |
# Working example for my blog post at: | |
# https://danijar.github.io/structuring-your-tensorflow-models | |
import functools | |
import tensorflow as tf | |
from tensorflow.examples.tutorials.mnist import input_data | |
def doublewrap(function): | |
""" | |
A decorator decorator, allowing to use the decorator to be used without |
#http://blog.echen.me/2011/07/18/introduction-to-restricted-boltzmann-machines/ | |
#https://www.tensorflow.org/versions/r0.7/api_docs/python/constant_op.html#random_uniform | |
import tensorflow as tf | |
import numpy as np | |
import input_data | |
import Image | |
from util import tile_raster_images |
# Tiny example of 3-layer nerual network with dropout in 2nd hidden layer | |
# Output layer is linear with L2 cost (regression model) | |
# Hidden layer activation is tanh | |
import numpy as np | |
n_epochs = 100 | |
n_samples = 100 | |
n_in = 10 | |
n_hidden = 5 |