PLearn 0.1
|
output = log(exp(input1)+exp(input2)) but it is computed in such a way as to preserve precision More...
#include <LogAddVariable.h>
Public Member Functions | |
LogAddVariable () | |
Default constructor. | |
LogAddVariable (Variable *input1, Variable *input2, const string &the_vector_logadd="none", bool call_build_=true) | |
Convenience constructor. | |
virtual string | classname () const |
virtual OptionList & | getOptionList () const |
virtual OptionMap & | getOptionMap () const |
virtual RemoteMethodMap & | getRemoteMethodMap () const |
virtual LogAddVariable * | deepCopy (CopiesMap &copies) const |
virtual void | build () |
Post-constructor. | |
virtual void | makeDeepCopyFromShallowCopy (CopiesMap &copies) |
Does the necessary operations to transform a shallow copy (this) into a deep copy by deep-copying all the members that need to be. | |
virtual void | recomputeSize (int &l, int &w) const |
Recomputes the length l and width w that this variable should have, according to its parent variables. | |
virtual void | fprop () |
compute output given input | |
virtual void | bprop () |
virtual void | symbolicBprop () |
compute a piece of new Var graph that represents the symbolic derivative of this Var | |
Static Public Member Functions | |
static string | _classname_ () |
static OptionList & | _getOptionList_ () |
static RemoteMethodMap & | _getRemoteMethodMap_ () |
static Object * | _new_instance_for_typemap_ () |
static bool | _isa_ (const Object *o) |
static void | _static_initialize_ () |
static const PPath & | declaringFile () |
Public Attributes | |
string | vector_logadd |
Static Public Attributes | |
static StaticInitializer | _static_initializer_ |
Static Protected Member Functions | |
static void | declareOptions (OptionList &ol) |
Declare options (data fields) for the class. | |
Protected Attributes | |
int | vector_logadd_id |
Integer coding for 'vector_logadd': 0 <-> 'none'. | |
Vec | work |
Temporary work vector. | |
Vec | work_ptr |
Temporary work vector whose content must not be modified: it can only be used to point to other data in memory. | |
Private Types | |
typedef BinaryVariable | inherited |
Private Member Functions | |
void | build_ () |
This does the actual building. |
output = log(exp(input1)+exp(input2)) but it is computed in such a way as to preserve precision
Definition at line 60 of file LogAddVariable.h.
typedef BinaryVariable PLearn::LogAddVariable::inherited [private] |
Reimplemented from PLearn::BinaryVariable.
Definition at line 62 of file LogAddVariable.h.
PLearn::LogAddVariable::LogAddVariable | ( | ) |
Default constructor.
Definition at line 73 of file LogAddVariable.cc.
: vector_logadd("none"), vector_logadd_id(0) {}
PLearn::LogAddVariable::LogAddVariable | ( | Variable * | input1, |
Variable * | input2, | ||
const string & | the_vector_logadd = "none" , |
||
bool | call_build_ = true |
||
) |
Convenience constructor.
Definition at line 78 of file LogAddVariable.cc.
References build_().
: inherited(input1, input2, vl == "none" || vl == "per_row" ? input1->length() : 1, vl == "none" || vl == "per_column" ? input1->width() : 1, call_build_), vector_logadd(vl), vector_logadd_id(0) { if (call_build_) build_(); }
string PLearn::LogAddVariable::_classname_ | ( | ) | [static] |
Reimplemented from PLearn::BinaryVariable.
Definition at line 68 of file LogAddVariable.cc.
OptionList & PLearn::LogAddVariable::_getOptionList_ | ( | ) | [static] |
Reimplemented from PLearn::BinaryVariable.
Definition at line 68 of file LogAddVariable.cc.
RemoteMethodMap & PLearn::LogAddVariable::_getRemoteMethodMap_ | ( | ) | [static] |
Reimplemented from PLearn::BinaryVariable.
Definition at line 68 of file LogAddVariable.cc.
Reimplemented from PLearn::BinaryVariable.
Definition at line 68 of file LogAddVariable.cc.
Object * PLearn::LogAddVariable::_new_instance_for_typemap_ | ( | ) | [static] |
Reimplemented from PLearn::Object.
Definition at line 68 of file LogAddVariable.cc.
StaticInitializer LogAddVariable::_static_initializer_ & PLearn::LogAddVariable::_static_initialize_ | ( | ) | [static] |
Reimplemented from PLearn::BinaryVariable.
Definition at line 68 of file LogAddVariable.cc.
void PLearn::LogAddVariable::bprop | ( | ) | [virtual] |
Implements PLearn::Variable.
Definition at line 209 of file LogAddVariable.cc.
References PLearn::apply(), PLearn::Variable::gradient, i, PLearn::BinaryVariable::input1, PLearn::BinaryVariable::input2, PLearn::Variable::length(), PLearn::multiplyAcc(), n, PLearn::Variable::nelems(), PLearn::TVec< T >::resize(), PLearn::safeexp(), PLearn::Var::subMat(), PLearn::TVec< T >::toMat(), PLearn::Variable::value, vector_logadd_id, PLearn::Variable::width(), and work.
{ if (!vector_logadd_id) { // TODO Note that these computations are not efficient at all. Vec grad1(nelems()); grad1 = input1->value - value; apply(grad1, grad1, safeexp); input1->gradient += grad1%gradient; Vec grad2(nelems()); grad2 = input2->value - value; apply(grad2, grad2, safeexp); input2->gradient += grad2%gradient; } else if (vector_logadd_id > 0) { int n = input2 ? int(round(input2->value[0])) : width(); work.resize(n); for (int i = 0; i < length(); i++) { work << input1->matValue.subMat(i, 0, 1, n); work -= value[i]; apply(work, work, safeexp); multiplyAcc(input1->matGradient.subMat(i, 0, 1, n).toVec(), work, gradient[i]); } } else { int n = input2 ? int(round(input2->value[0])) : length(); work.resize(n); for (int i = 0; i < width(); i++) { work << input1->matValue.subMat(0, i, n, 1); work -= value[i]; apply(work, work, safeexp); work *= gradient[i]; input1->matGradient.subMat(0, i, n, 1) += work.toMat(n, 1); } } }
void PLearn::LogAddVariable::build | ( | ) | [virtual] |
Post-constructor.
The normal implementation should call simply inherited::build(), then this class's build_(). This method should be callable again at later times, after modifying some option fields to change the "architecture" of the object.
Reimplemented from PLearn::BinaryVariable.
Definition at line 114 of file LogAddVariable.cc.
References PLearn::BinaryVariable::build(), and build_().
{ inherited::build(); build_(); }
void PLearn::LogAddVariable::build_ | ( | ) | [private] |
This does the actual building.
Reimplemented from PLearn::BinaryVariable.
Definition at line 123 of file LogAddVariable.cc.
References PLearn::BinaryVariable::build(), PLearn::BinaryVariable::input1, PLearn::BinaryVariable::input2, PLearn::Var::length(), PLERROR, vector_logadd, vector_logadd_id, and PLearn::Var::width().
Referenced by build(), and LogAddVariable().
{ // Transform the string 'vector_logadd' into an integer for faster // computations. if (vector_logadd == "none") vector_logadd_id = 0; else if (vector_logadd == "per_row") vector_logadd_id = 1; else if (vector_logadd == "per_column") vector_logadd_id = -1; else PLERROR("In LogAddVariable::build_ - Invalid value for " "'vector_logadd': %s", vector_logadd.c_str()); if (!vector_logadd_id && input1 && input2) { if (input1->length() != input2->length() || input1->width() != input2->width()) PLERROR("In LogAddVariable::build_ - input1 and input2 must " "have the same size"); } // Need to rebuild since correct sizes depend on 'vector_logadd_id'. inherited::build(); }
string PLearn::LogAddVariable::classname | ( | ) | const [virtual] |
Reimplemented from PLearn::Object.
Definition at line 68 of file LogAddVariable.cc.
void PLearn::LogAddVariable::declareOptions | ( | OptionList & | ol | ) | [static, protected] |
Declare options (data fields) for the class.
Redefine this in subclasses: call declareOption
(...) for each option, and then call inherited::declareOptions(options)
. Please call the inherited
method AT THE END to get the options listed in a consistent order (from most recently defined to least recently defined).
static void MyDerivedClass::declareOptions(OptionList& ol) { declareOption(ol, "inputsize", &MyObject::inputsize_, OptionBase::buildoption, "The size of the input; it must be provided"); declareOption(ol, "weights", &MyObject::weights, OptionBase::learntoption, "The learned model weights"); inherited::declareOptions(ol); }
ol | List of options that is progressively being constructed for the current class. |
Reimplemented from PLearn::BinaryVariable.
Definition at line 97 of file LogAddVariable.cc.
References PLearn::OptionBase::buildoption, PLearn::declareOption(), PLearn::BinaryVariable::declareOptions(), and vector_logadd.
{ declareOption(ol, "vector_logadd", &LogAddVariable::vector_logadd, OptionBase::buildoption, "Must be one of:\n" " - 'none' : element-wize logadd over the two input matrices\n" " - 'per_column': vector logadd on each column of input1, using\n" " the first 'n' rows as given by input2\n" " - 'per_row' : vector logadd on each row of input1, using the\n" " first 'n' columns as given by input2."); inherited::declareOptions(ol); }
static const PPath& PLearn::LogAddVariable::declaringFile | ( | ) | [inline, static] |
LogAddVariable * PLearn::LogAddVariable::deepCopy | ( | CopiesMap & | copies | ) | const [virtual] |
Reimplemented from PLearn::BinaryVariable.
Definition at line 68 of file LogAddVariable.cc.
void PLearn::LogAddVariable::fprop | ( | ) | [virtual] |
compute output given input
Implements PLearn::Variable.
Definition at line 174 of file LogAddVariable.cc.
References PLearn::apply(), PLearn::Var::column(), i, PLearn::BinaryVariable::input1, PLearn::BinaryVariable::input2, PLearn::Variable::length(), PLearn::logadd(), n, PLearn::TVec< T >::resize(), PLearn::Var::subMat(), PLearn::TVec< T >::subVec(), PLearn::Variable::value, vector_logadd_id, PLearn::Variable::width(), work, and work_ptr.
{ if (!vector_logadd_id) { // Ugly hack to make it compile with ICC. #ifdef __INTEL_COMPILER PLearn::apply(input1->value, input2->value, value, logadd_for_icc); #else PLearn::apply(input1->value, input2->value, value, logadd); #endif } else if (vector_logadd_id > 0) { int n = input2 ? int(round(input2->value[0])) : width(); for (int i = 0; i < length(); i++) { work_ptr = input1->matValue(i); if (input2) work_ptr = work_ptr.subVec(0, n); value[i] = logadd(work_ptr); } } else { int n = input2 ? int(round(input2->value[0])) : length(); work.resize(n); for (int i = 0; i < width(); i++) { if (input2) work << input1->matValue.subMat(0, i, n, 1); else work << input1->matValue.column(i); value[i] = logadd(work); } } }
OptionList & PLearn::LogAddVariable::getOptionList | ( | ) | const [virtual] |
Reimplemented from PLearn::Object.
Definition at line 68 of file LogAddVariable.cc.
OptionMap & PLearn::LogAddVariable::getOptionMap | ( | ) | const [virtual] |
Reimplemented from PLearn::Object.
Definition at line 68 of file LogAddVariable.cc.
RemoteMethodMap & PLearn::LogAddVariable::getRemoteMethodMap | ( | ) | const [virtual] |
Reimplemented from PLearn::Object.
Definition at line 68 of file LogAddVariable.cc.
void PLearn::LogAddVariable::makeDeepCopyFromShallowCopy | ( | CopiesMap & | copies | ) | [virtual] |
Does the necessary operations to transform a shallow copy (this) into a deep copy by deep-copying all the members that need to be.
This needs to be overridden by every class that adds "complex" data members to the class, such as Vec
, Mat
, PP<Something>
, etc. Typical implementation:
void CLASS_OF_THIS::makeDeepCopyFromShallowCopy(CopiesMap& copies) { inherited::makeDeepCopyFromShallowCopy(copies); deepCopyField(complex_data_member1, copies); deepCopyField(complex_data_member2, copies); ... }
copies | A map used by the deep-copy mechanism to keep track of already-copied objects. |
Reimplemented from PLearn::BinaryVariable.
Definition at line 151 of file LogAddVariable.cc.
References PLearn::deepCopyField(), PLearn::BinaryVariable::makeDeepCopyFromShallowCopy(), work, and work_ptr.
{ inherited::makeDeepCopyFromShallowCopy(copies); deepCopyField(work, copies); deepCopyField(work_ptr, copies); }
Recomputes the length l and width w that this variable should have, according to its parent variables.
This is used for ex. by sizeprop() The default version stupidly returns the current dimensions, so make sure to overload it in subclasses if this is not appropriate.
Reimplemented from PLearn::Variable.
Definition at line 160 of file LogAddVariable.cc.
References PLearn::BinaryVariable::input1, PLearn::Var::length(), vector_logadd_id, and PLearn::Var::width().
{ if (input1) { l = vector_logadd_id >= 0 ? input1->length() : 1; w = vector_logadd_id <= 0 ? input1->width() : 1; } else l = w = 0; }
void PLearn::LogAddVariable::symbolicBprop | ( | ) | [virtual] |
compute a piece of new Var graph that represents the symbolic derivative of this Var
Reimplemented from PLearn::Variable.
Definition at line 250 of file LogAddVariable.cc.
References PLearn::exp(), PLearn::Variable::g, PLearn::BinaryVariable::input1, PLearn::BinaryVariable::input2, PLERROR, and vector_logadd_id.
{ if (!vector_logadd_id) { input1->accg(g * (exp(input1)/(exp(input1)+exp(input2)))); input2->accg(g * (exp(input2)/(exp(input1)+exp(input2)))); } else { PLERROR("In LogAddVariable::symbolicBprop - Not implemented"); } }
Reimplemented from PLearn::BinaryVariable.
Definition at line 76 of file LogAddVariable.h.
Definition at line 66 of file LogAddVariable.h.
Referenced by build_(), and declareOptions().
int PLearn::LogAddVariable::vector_logadd_id [protected] |
Integer coding for 'vector_logadd': 0 <-> 'none'.
-1 <-> 'per_column' +1 <-> 'per_row'
Definition at line 93 of file LogAddVariable.h.
Referenced by bprop(), build_(), fprop(), recomputeSize(), and symbolicBprop().
Vec PLearn::LogAddVariable::work [protected] |
Temporary work vector.
Definition at line 96 of file LogAddVariable.h.
Referenced by bprop(), fprop(), and makeDeepCopyFromShallowCopy().
Vec PLearn::LogAddVariable::work_ptr [protected] |
Temporary work vector whose content must not be modified: it can only be used to point to other data in memory.
Definition at line 100 of file LogAddVariable.h.
Referenced by fprop(), and makeDeepCopyFromShallowCopy().