Skip to content

Instantly share code, notes, and snippets.

@vene
Created November 22, 2017 23:25
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vene/575906b9a5abf9457e6da94a895718c9 to your computer and use it in GitHub Desktop.
Save vene/575906b9a5abf9457e6da94a895718c9 to your computer and use it in GitHub Desktop.
test cpu-only node with multi-device dynet
DYNET_PATH ?= /home/vlad/code/dynet
EIGEN_PATH ?= /home/vlad/code/eigen
CC = g++
DEBUG = -g
INCLUDES = -I$(DYNET_PATH) -I$(EIGEN_PATH)
LIBS = -L$(DYNET_PATH)/build-cuda/dynet/
CFLAGS = -O3 -Wall -Wno-sign-compare -Wno-int-in-bool-context -c -fmessage-length=0 $(INCLUDES) -DEIGEN_FAST_MATH -fPIC -fno-finite-math-only -Wno-missing-braces -std=c++11 -funroll-loops
LFLAGS = $(LIBS) -ldynet
ifdef MKL_PATH
INCLUDES = $(INCLUDES) -I$(MKL_PATH)/include
LIBS = $(LIBS) -L$(MKL_PATH)/lib/intel64
LFLAGS = $(LFLAGS) -lmkl_intel_lp64 -lmkl_sequential -lmkl_core;
CFLAGS = $(CFLAGS) -DEIGEN_USE_MKL_ALL
endif
all: multidev
multidev: $(OBJS) multidev.o
$(CC) $(OBJS) multidev.o $(LFLAGS) -o multidev
multidev.o : multidev.cc
$(CC) $(CFLAGS) multidev.cc
clean:
rm -f *.o *~ multidev
#include <dynet/dynet.h>
#include <dynet/expr.h>
#include <iostream>
using namespace std;
using namespace dynet;
int main(int argc, char** argv)
{
dynet::initialize(argc, argv);
Device* cpu_device = get_device_manager()->get_global_device("CPU");
Device* gpu_device = get_device_manager()->get_global_device("GPU:0");
const unsigned DIM = 5;
ParameterCollection m;
Parameter p_W = m.add_parameters({DIM, DIM}, gpu_device);
Parameter p_o = m.add_parameters({DIM}, gpu_device);
ComputationGraph cg;
auto W = parameter(cg, p_W);
auto o = parameter(cg, p_o);
auto x = dynet::random_normal(cg, {DIM});
auto y = W * x;
y = to_device(y, cpu_device);
auto z = sparsemax(y);
z = to_device(z, gpu_device);
auto l = dot_product(o, z);
cg.forward(l);
cout << "z: " << endl << z.value() << endl;
cout << "l: " << l.value() << endl;
cg.backward(l);
cout << "grad W" << endl << W.gradient() << endl;
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment