Skip to content

Commit

Permalink
doc
Browse files Browse the repository at this point in the history
  • Loading branch information
Hanjun-Dai committed Dec 20, 2016
1 parent 944c194 commit 34c59c0
Show file tree
Hide file tree
Showing 21 changed files with 3,142 additions and 19 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
build/
.vscode/
html/
*~
*.swp
make_common
Expand Down
2,303 changes: 2,303 additions & 0 deletions Doxyfile

Large diffs are not rendered by default.

23 changes: 23 additions & 0 deletions include/nn/arg_max.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,12 @@
namespace gnn
{

/**
* @brief Operator for argument maximum.
*
* @tparam mode { CPU/GPU }
* @tparam Dtype { float/double/int }
*/
template<typename mode, typename Dtype>
class ArgMax : public Factor
{
Expand All @@ -20,19 +26,36 @@ class ArgMax : public Factor
return "ArgMax";
}

/**
* return an int vector
*/
using OutType = std::shared_ptr< DTensorVar<mode, int> >;

/**
* @brief Creates an out variable.
*
* @return { return an int vector }
*/
OutType CreateOutVar()
{
auto out_name = fmt::sprintf("%s:out_0", this->name);
return std::make_shared< DTensorVar<mode, int> >(out_name);
}

/**
* @brief constructor
*
* @param[in] _name The name
* @param[in] _axis keep which axis?
*/
ArgMax(std::string _name, uint _axis = 0);

virtual void Forward(std::vector< std::shared_ptr<Variable> >& operands,
std::vector< std::shared_ptr<Variable> >& outputs) override;

/**
* the axis to be kept
*/
uint axis;
};

Expand Down
26 changes: 26 additions & 0 deletions include/nn/cross_entropy.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,12 @@ void CalcCrossEntropy(DTensor<CPU, Dtype>& prob, SpTensor<CPU, Dtype>& label, DT
template<typename Dtype>
void CalcCrossEntropy(DTensor<GPU, Dtype>& prob, SpTensor<GPU, Dtype>& label, DTensor<CPU, Dtype>& out);

/**
* @brief Operator for cross entropy
*
* @tparam mode { CPU/GPU }
* @tparam Dtype { float/double }
*/
template<typename mode, typename Dtype>
class CrossEntropy : public Factor
{
Expand All @@ -28,17 +34,37 @@ class CrossEntropy : public Factor

using OutType = std::shared_ptr< DTensorVar<mode, Dtype> >;

/**
* @brief Creates an out variable.
*
* @return { return a vector with same Dtype as prediction }
*/
OutType CreateOutVar()
{
auto out_name = fmt::sprintf("%s:out_0", this->name);
return std::make_shared< DTensorVar<mode, Dtype> >(out_name);
}

/**
* @brief constructor
*
* @param[in] _name The name
* @param[in] _need_softmax Whether need to do softmax before calculating cross entropy
* @param[in] _properr Whether propagete error
*/
CrossEntropy(std::string _name, bool _need_softmax, PropErr _properr = PropErr::T);

virtual void Forward(std::vector< std::shared_ptr<Variable> >& operands,
std::vector< std::shared_ptr<Variable> >& outputs) override;

/**
* whether need to do softmax for the input (whether the input is logits)
*/
bool need_softmax;

/**
* temporary variable used for calculating probability
*/
DTensor<mode, Dtype> probs;
};

Expand Down
29 changes: 29 additions & 0 deletions include/nn/factor.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,47 @@
namespace gnn
{

/**
* @brief Abstract class of operators. Since we represent the computation graph as a
* factor graph, the factors here represent the relations between variables
*/
class Factor
{
public:
/**
* @brief Constructor
*
* @param[in] _name The name of this factor
* @param[in] _properr Whether propagate error
*/
Factor(std::string _name, PropErr _properr = PropErr::T) : name(_name), properr(_properr) {}

/**
* @brief Forward function
*
* @param operands The input arguments (variables) to this operator
* @param outputs The output variables produced by this operator
*/
virtual void Forward(std::vector< std::shared_ptr<Variable> >& operands,
std::vector< std::shared_ptr<Variable> >& outputs) NOT_IMPLEMENTED

/**
* @brief Backward function
*
* @param operands The input arguments (variables) to this operator
* @param outputs The output variables produced by this operator
*/
virtual void Backward(std::vector< std::shared_ptr<Variable> >& operands,
std::vector< std::shared_ptr<Variable> >& outputs) NOT_IMPLEMENTED

/**
* the name (identifier) of this operator
*/
std::string name;

/**
* whether propagate error in backward stage
*/
PropErr properr;
};

Expand Down
128 changes: 120 additions & 8 deletions include/nn/factor_graph.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,39 @@ namespace gnn

class Variable;

/**
* @brief the computation graph; responsible for representing the factor graph, as well as the execution
*/
class FactorGraph
{
public:
typedef std::shared_ptr<Variable> VarPtr;
typedef std::vector<std::shared_ptr<Variable> > VarList;
typedef std::vector<std::shared_ptr<Factor> > FactorList;

/**
* @brief constructor
*/
FactorGraph();

/**
* @brief Adds a variable to this computation graph
*
* @param[in] var The variable to be add
* @param[in] need_feed Whether this is an actual variable or an placeholder variable
*/
void AddVar(VarPtr var, bool need_feed = true);

/**
* @brief Adds a factor (operator) to the graph
*
* @param[in] factor The factor
* @param[in] vars_in The variables (inputs) to the factor
* @param[in] var_out The variable (output) produced by the factor
*
* @tparam FactorPtr { the factor class type }
* @tparam VarPtrList { for the variable class }
*/
template<typename FactorPtr, typename VarPtrList>
void AddFactor(FactorPtr factor, VarPtrList vars_in, VarPtr var_out)
{
Expand Down Expand Up @@ -57,33 +79,123 @@ class FactorGraph
factor_list.push_back(factor);
}

/**
* @brief feed forward function
*
* @param[in] targets The targets which the user wants to fetch
* @param[in] feed_dict The feed dictionary; used to set the placeholders
* @param[in] n_thread # threads used in this function
*
* @return { The targets required by user}
*/
VarList FeedForward(std::initializer_list< VarPtr > targets,
std::map<std::string, void*> feed_dict,
uint n_thread = 1);

/**
* @brief back propagation funciton; used to calculate the gradient with respect to each variable
*
* @param[in] targets The top variables that contributes to the objective
* (typically only loss should be provided here)
* @param[in] n_thread # threads used in this function
*/
void BackPropagate(std::initializer_list< VarPtr > targets, uint n_thread = 1);

void SequentialForward(std::initializer_list< VarPtr > targets,
std::map<std::string, void*> feed_dict);
void DependencyParse(std::initializer_list< VarPtr > targets);
/**
* @brief Variable index in this graph
*
* @param[in] var The shared_ptr to variable
*
* @return { the integer index }
*/
size_t VarIdx(VarPtr var);

/**
* @brief Variable index in this graph
*
* @param[in] var_name The variable name
*
* @return { the integer index }
*/
size_t VarIdx(std::string var_name);

/**
* @brief Factor index in this graph
*
* @param[in] fac The shared_ptr to factor
*
* @return { the integer index }
*/
size_t FacIdx(std::shared_ptr<Factor> fac);

/**
* the map: string name -> (variable index, variable ptr)
*/
std::map<std::string, std::pair< size_t, VarPtr> > var_dict;

/**
* the map: string name -> (factor index, factor ptr)
*/
std::map<std::string, std::pair< size_t, std::shared_ptr<Factor> > > factor_dict;

/**
* variable list; used for integer indexing
*/
std::vector< VarPtr > var_list;

/**
* factor list; used for integer indexing
*/
std::vector< std::shared_ptr< Factor > > factor_list;
/**
* the in/out edges of a factor: factor_name -> (in variables, out variables)
*/
std::map<std::string, std::pair< VarList, VarList > > factorEdges;
/**
* the in/out edges of an variable: variable_name -> (factor(s) produce this var, factors use this var)
*/
std::map<std::string, std::pair< FactorList, FactorList > > varEdges;

protected:
/**
* @brief The single threaded feed forward function
*
* @param[in] targets The targets which the user wants to fetch
* @param[in] feed_dict The feed dictionary; used to set the placeholders
*/
void SequentialForward(std::initializer_list< VarPtr > targets,
std::map<std::string, void*> feed_dict);

std::map<std::string, VarPtr> ready_dict;
std::map<std::string, std::pair< size_t, VarPtr> > var_dict;
std::map<std::string, std::pair< size_t, std::shared_ptr<Factor> > > factor_dict;
/**
* @brief Parse the dependency to see which variables are required
*
* @param[in] targets The targets which the user wants to fetch
*/
void DependencyParse(std::initializer_list< VarPtr > targets);

std::vector< VarPtr > var_list;
std::vector< std::shared_ptr< Factor > > factor_list;
/**
* whether the variable (indexed by name) is ready
*/
std::map<std::string, VarPtr> ready_dict;

/**
* whether the variable (indexed by integer) is ready
*/
std::vector<bool> isReady;

/**
* whether the variable (indexed by integer) is required by user
*/
std::vector<bool> isRequired;

/**
* num of pending variables each factor have; when it is zero, this factor can be executed
*/
std::vector<size_t> n_pending;

/**
* queue data structure used for topo_sort/BFS
*/
std::queue<std::string> q;
};

Expand Down
20 changes: 20 additions & 0 deletions include/nn/in_top_k.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,12 @@ void IsInTopK(DTensor<CPU, Dtype>& pred, DTensor<CPU, int>& label, DTensor<CPU,
template<typename Dtype>
void IsInTopK(DTensor<GPU, Dtype>& pred, DTensor<GPU, int>& label, DTensor<CPU, int>& out, int k);

/**
* @brief Operator: whether the true label is in top-k of prediction
*
* @tparam mode { CPU/GPU }
* @tparam Dtype { prediction ele_type (float/double) }
*/
template<typename mode, typename Dtype>
class InTopK : public Factor
{
Expand All @@ -26,17 +32,31 @@ class InTopK : public Factor

using OutType = std::shared_ptr< DTensorVar<mode, int> >;

/**
* @brief Creates an out variable.
*
* @return an integer tensor (a 0/1 vector)
*/
OutType CreateOutVar()
{
auto out_name = fmt::sprintf("%s:out_0", this->name);
return std::make_shared< DTensorVar<mode, int> >(out_name);
}

/**
* @brief constructor
*
* @param[in] _name The name
* @param[in] _topK The top K
*/
InTopK(std::string _name, int _topK = 1);

virtual void Forward(std::vector< std::shared_ptr<Variable> >& operands,
std::vector< std::shared_ptr<Variable> >& outputs) override;

/**
* top K (by default 1, which corresponds to accuracy)
*/
int topK;
};

Expand Down
Loading

0 comments on commit 34c59c0

Please sign in to comment.