Skip to content

Instantly share code, notes, and snippets.

@karlnapf
Last active July 17, 2018 08:39
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save karlnapf/95a9c72a642d61ec268a39407f8761b2 to your computer and use it in GitHub Desktop.
Save karlnapf/95a9c72a642d61ec268a39407f8761b2 to your computer and use it in GitHub Desktop.
// second try: move the dispatching into subclasses but re-use code via mixins
CMachine::train(CFeatures* f)
{
...
// this is to not force ourselves to deploy this shogun wide at one go
if (supports_dense_train_dispatching() && f->get_feature_class()==C_DENSE)
dispatched_dense_train(f);
// could do a big if then else here that says: i cannot train this machine with the provided feature type
// eg user passes string features but algorithm doesnt support that. Would need to fully move to new system for that though
...
// default call uses existing system of untemplated train_machine calls
train_machine()
...
}
// both of those (they are protected) to be overloaded by mixin
virtual CMachine::dispatched_dense_train(CFeatures*); // throws error
virtual bool CMachine::supports_dense_train_dispatching() { return false }
// mixin class to add template parameter dispatcher code for CDenseFeatures to any class.
// Need one for every feature type (dense,string, etc)
template <class T>
CDenseTrainMixIn : public T
{
bool CMachine::supports_dense_train_dispatching() { return true; }
void dispatched_dense_train(CFeatures* f)
{
// note that this only dispatches the template type, so all those calls are for a single method
// forgetting to implement it results in a compile error
switch(f->get_feature_type)
case FT_DENSEREAL:
T::train_machine_dense<float64_t>(f->as<CDenseFeatures<float64_t>());
break;
case FT_SHORTREAL:
T::train_machine_dense<float32_t>(f->as<CDenseFeatures<float32_t>());
break;
...
default:
SG_ERROR("Algorithm does not support training with %s)
}
}
CLARS : public CDenseTrainMixIn<CLinearMachine>
{
// compile/link error if this method is not implemented by LARS
template <class X>
void train_machine_dense(CDenseFeatures<X> f)
{
// the developer in here has compile time typed features, i.e. no more dispatching, can use typed linalg calls etc
}
}
// First attempt (doesnt work so ignore)
CMachine::train(CFeatures* f)
{
switch(f->get_feature_type)
{
case FT_DENSEREAL:
// this method throws an error by default, but can be overloaded by subclasses
train_machine_dense<float64_t>(f->as<CDenseFeatures<float64_t>());
break;
...
}
}
//Subclasses just implement a general typed version of train, i.e.
template <class T_FEATURES>
CPerceptron::train_machine_dense(CDenseFeatures<T_FEATURES>* f)
{
}
//what we dont want is explicit typed implementations, as then we cannot distinguish e.g. float64 and float32. I.e.
CPerceptron::train_machine_dense(CDenseFeatures<float64_t>* f)
{
}
// Problem: Cannot have templated virtual base class methods. Doh!
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment