Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 22 additions & 5 deletions core/src/main/java/net/librec/recommender/AbstractRecommender.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import net.librec.math.structure.MatrixEntry;
import net.librec.math.structure.SparseMatrix;
import net.librec.recommender.item.*;
//import net.librec.util.ModelDataUtil;
import net.librec.util.ReflectionUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
Expand Down Expand Up @@ -186,7 +185,7 @@ protected void setup() throws LibrecException {
Collections.sort(ratingScale);
maxRate = Collections.max(trainMatrix.getValueSet());
minRate = Collections.min(trainMatrix.getValueSet());
globalMean = trainMatrix.sum() / numRates;
globalMean = trainMatrix.mean();

int[] numDroppedItemsArray = new int[numUsers]; // for AUCEvaluator
int maxNumTestItemsByUser = 0; //for idcg
Expand Down Expand Up @@ -380,7 +379,7 @@ protected void cleanup() throws LibrecException {
*/
@Override
public void loadModel(String filePath) {
// ModelDataUtil.loadRecommenderModel(this, filePath);

}

/**
Expand All @@ -390,7 +389,7 @@ public void loadModel(String filePath) {
*/
@Override
public void saveModel(String filePath) {
// ModelDataUtil.saveRecommenderModel(this, filePath);

}

/**
Expand Down Expand Up @@ -460,6 +459,24 @@ public List<RecommendedItem> getRecommendedList() {
* @throws LibrecException if error occurs
*/
protected boolean isConverged(int iter) throws LibrecException{
return false;
float delta_loss = (float) (lastLoss - loss);

// print out debug info
if (verbose) {
String recName = getClass().getSimpleName().toString();
String info = recName + " iter " + iter + ": loss = " + loss + ", delta_loss = " + delta_loss;
LOG.info(info);
}

if (Double.isNaN(loss) || Double.isInfinite(loss)) {
// LOG.error("Loss = NaN or Infinity: current settings does not fit the recommender! Change the settings and try again!");
throw new LibrecException("Loss = NaN or Infinity: current settings does not fit the recommender! Change the settings and try again!");
}

// check if converged
boolean converged = Math.abs(loss) < 1e-5;
lastLoss = loss;

return converged;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,6 @@ public abstract class MatrixFactorizationRecommender extends AbstractRecommender
*/
protected float regItem;

/**
* global mean
*/
protected double globalMean;

/**
* setup
* init member method
Expand All @@ -68,7 +63,7 @@ public abstract class MatrixFactorizationRecommender extends AbstractRecommender
*/
protected void setup() throws LibrecException {
super.setup();
numIterations = conf.getInt("rec.iterator.maximum");
numIterations = conf.getInt("rec.iterator.maximum",100);
learnRate = conf.getFloat("rec.iterator.learnrate", 0.01f);
maxLearnRate = conf.getFloat("rec.iterator.learnrate.maximum", 1000.0f);

Expand All @@ -81,7 +76,6 @@ protected void setup() throws LibrecException {

userFactors = new DenseMatrix(numUsers, numFactors);
itemFactors = new DenseMatrix(numItems, numFactors);
globalMean = trainMatrix.mean();

initMean = 0.0f;
initStd = 0.1f;
Expand All @@ -103,38 +97,6 @@ protected double predict(int userIdx, int itemIdx) throws LibrecException {
return DenseMatrix.rowMult(userFactors, userIdx, itemFactors, itemIdx);
}

/**
* Post each iteration, we do things:
* <ol>
* <li>print debug information</li>
* <li>check if converged</li>
* <li>if not, adjust learning rate</li>
* </ol>
*
* @param iter current iteration
* @return boolean: true if it is converged; false otherwise
*/
protected boolean isConverged(int iter) throws LibrecException{
float delta_loss = (float) (lastLoss - loss);

// print out debug info
if (verbose) {
String recName = getClass().getSimpleName().toString();
String info = recName + " iter " + iter + ": loss = " + loss + ", delta_loss = " + delta_loss;
LOG.info(info);
}

if (Double.isNaN(loss) || Double.isInfinite(loss)) {
// LOG.error("Loss = NaN or Infinity: current settings does not fit the recommender! Change the settings and try again!");
throw new LibrecException("Loss = NaN or Infinity: current settings does not fit the recommender! Change the settings and try again!");
}

// check if converged
boolean converged = Math.abs(loss) < 1e-5;
lastLoss = loss;

return converged;
}

/**
* Update current learning rate after each epoch <br>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ public class LDARecommender extends ProbabilisticGraphicalRecommender {
* Dirichlet hyper-parameters of topic-item distribution, typical value is 0.01
*/
protected float initBeta;

/**
* entry[k, i]: number of tokens assigned to topic k, given item i.
*/
Expand Down Expand Up @@ -140,9 +141,9 @@ protected void setup() throws LibrecException {
for (MatrixEntry matrixEntry : trainMatrix) {
int userIdx = matrixEntry.row();
int itemIdx = matrixEntry.column();
int num = (int) (matrixEntry.get());
int num = (int) (matrixEntry.get()); // problem 1 : the for cycle is not necessary
for(int numIdx = 0; numIdx < num; numIdx++) {
int topicIdx = (int) (Math.random() * numTopics); // 0 ~ k-1
int topicIdx = (int) (Math.random() * numTopics); // 0 ~ k-1 // problem 2 : the random

// assign a topic t to pair (u, i)
topicAssignments.add(topicIdx);
Expand Down Expand Up @@ -171,7 +172,7 @@ protected void eStep() {
int itemIdx = matrixEntry.column();

int num = (int) (matrixEntry.get());
for (int numIdx = 0; numIdx < num; numIdx++) {
for (int numIdx = 0; numIdx < num; numIdx++) { // problem 1 again
int topicIdx = topicAssignments.get(topicAssignmentsIdx); // topic

userTopicNumbers.add(userIdx, topicIdx, -1);
Expand Down Expand Up @@ -220,7 +221,7 @@ protected void mStep() {

topicAlpha = alpha.get(topicIdx);
double numerator = 0, denominator = 0;
for (int itemIdx = 0; itemIdx < numUsers; itemIdx++) {
for (int itemIdx = 0; itemIdx < numUsers; itemIdx++) { // problem 2 : numUsers should be numItems ? or item index should be user index?
numerator += digamma(userTopicNumbers.get(itemIdx, topicIdx) + topicAlpha) - digamma(topicAlpha);
denominator += digamma(userTokenNumbers.get(itemIdx) + sumAlpha) - digamma(sumAlpha);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import net.librec.math.structure.DenseVector;
import net.librec.math.structure.MatrixEntry;
import net.librec.math.structure.SparseVector;
import net.librec.recommender.AbstractRecommender;
import net.librec.recommender.MatrixFactorizationRecommender;

/**
Expand All @@ -31,26 +32,42 @@
* @author guoguibing and Keqiang Wang
*/
@ModelData({"isRating", "nmf", "transUserFactors", "transItemFactors"})
public class NMFRecommender extends MatrixFactorizationRecommender {
public class NMFRecommender extends AbstractRecommender {
/**
* userFactors and itemFactors matrix transpose
*/
DenseMatrix transUserFactors;
DenseMatrix transItemFactors;

/**
* the number of latent factors;
*/
protected int numFactors;

/**
* the number of iterations
*/
protected int numIterations;

@Override
protected void setup() throws LibrecException {
super.setup();

transUserFactors = userFactors.transpose();
transItemFactors = itemFactors.transpose();
numFactors = conf.getInt("rec.factor.number", 10);
numIterations = conf.getInt("rec.iterator.maximum",100);


transUserFactors = new DenseMatrix(numFactors, numUsers);
transItemFactors = new DenseMatrix(numFactors, numItems);
transUserFactors.init(0.01);
transItemFactors.init(0.01);
}

@Override
protected void trainModel() throws LibrecException {
for (int iter = 0; iter <= numIterations; ++iter) {
// update userFactors by fixing itemFactors
for (int userIdx = 0; userIdx < numRates; userIdx++) {
for (int userIdx = 0; userIdx < numUsers; userIdx++) {
SparseVector itemRatingsVector = trainMatrix.row(userIdx);

if (itemRatingsVector.getCount() > 0) {
Expand All @@ -65,7 +82,8 @@ protected void trainModel() throws LibrecException {
double realValue = factorItemsVector.inner(itemRatingsVector);
double estmValue = factorItemsVector.inner(itemPredictsVector) + 1e-9;

transUserFactors.set(factorIdx, userIdx, transUserFactors.get(factorIdx, userIdx) * (realValue / estmValue));
transUserFactors.set(factorIdx, userIdx, transUserFactors.get(factorIdx, userIdx)
* (realValue / estmValue));
}
}
}
Expand All @@ -78,15 +96,16 @@ protected void trainModel() throws LibrecException {
SparseVector userPredictsVector = new SparseVector(numUsers, userRatingsVector.size());

for (int userIdx : userRatingsVector.getIndex()) {
userPredictsVector.set(userIdx, predict(userIdx, itemIdx));
userPredictsVector.append(userIdx, predict(userIdx, itemIdx));
}

for (int factorIdx = 0; factorIdx < numFactors; factorIdx++) {
DenseVector factorUsersVector = transUserFactors.row(factorIdx, false);
double realValue = factorUsersVector.inner(userRatingsVector);
double estmValue = factorUsersVector.inner(userPredictsVector) + 1e-9;

transItemFactors.set(factorIdx, itemIdx, transItemFactors.get(factorIdx, itemIdx) * (realValue / estmValue));
transItemFactors.set(factorIdx, itemIdx, transItemFactors.get(factorIdx, itemIdx)
* (realValue / estmValue));
}
}
}
Expand All @@ -109,7 +128,6 @@ protected void trainModel() throws LibrecException {
if (isConverged(iter) && earlyStop) {
break;
}
updateLRate(iter);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
/**
* EFM Recommender
* Zhang Y, Lai G, Zhang M, et al. Explicit factor models for explainable recommendation based on phrase-level sentiment analysis[C]
* //Proceedings of the 37th international ACM SIGIR conference on Research & development in information retrieval. ACM, 2014: 83-92.
* {@code Proceedings of the 37th international ACM SIGIR conference on Research & development in information retrieval. ACM, 2014: 83-92}.
*
* @author ChenXu
*/
Expand All @@ -53,7 +53,6 @@ public class EFMRecommender extends BiasedMFRecommender {
protected DenseMatrix userFeatureAttention;
protected DenseMatrix itemFeatureQuality;
protected DenseMatrix rating;
protected SparseMatrix trainData,testData;
protected double lambdaX;
protected double lambdaY;
protected double lambdaU;
Expand Down Expand Up @@ -90,7 +89,6 @@ protected void setup() throws LibrecException {
numberOfItems = 0;
String user = "";
String item = "";
int r = 0;
String line = null;

try {
Expand Down Expand Up @@ -134,14 +132,14 @@ protected void setup() throws LibrecException {
} catch (IOException e) {
e.printStackTrace();
}


// Create V,U1,H1,U2,H2
featureMatrix = new DenseMatrix(numberOfFeatures, featureFactor);
userFactors = new DenseMatrix(numberOfUsers, numFactors);
itemFactors = new DenseMatrix(numberOfItems, numFactors);


featureMatrix.init(initMean, initStd);
userFeatureMatrix = userFactors.getSubMatrix(0, userFactors.numRows() - 1, 0, featureFactor - 1);
userHiddenMatrix = userFactors.getSubMatrix(0, userFactors.numRows() - 1, featureFactor, userFactors.numColumns() - 1);
Expand All @@ -152,7 +150,7 @@ protected void setup() throws LibrecException {
userFeatureAttention.init(0);
itemFeatureQuality = new DenseMatrix(itemFactors.numRows(), numberOfFeatures);
itemFeatureQuality.init(0);


// compute UserFeatureAttention
double[] featureValues = new double[numberOfFeatures];
Expand Down Expand Up @@ -203,7 +201,7 @@ protected void trainModel() throws LibrecException {
for (int j = 0; j < featureFactor; j++) {
double updateValue = ((userFeatureAttention.transpose().mult(userFeatureMatrix).scale(lambdaX)).add(itemFeatureQuality.transpose().mult(itemFeatureMatrix).scale(lambdaX))).get(i, j);
updateValue /= featureMatrix.mult((userFeatureMatrix.transpose().mult(userFeatureMatrix).scale(lambdaX)).add(itemFeatureMatrix.transpose().mult(itemFeatureMatrix).scale(lambdaY))
.add(DenseMatrix.eye(featureFactor).scale(lambdaV))).get(i, j);
.add(DenseMatrix.eye(featureFactor).scale(lambdaV))).get(i, j);
updateValue = Math.sqrt(updateValue);
featureMatrix.set(i, j, featureMatrix.get(i, j) * updateValue);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@ protected void setup() throws LibrecException {
numberOfItems = 0;
String user = "";
String item = "";
int r = 0;
String line = null;
Table<Integer, Integer, String> res = HashBasedTable.create();
Table<Integer, Integer, Double> ratings = HashBasedTable.create();
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
rec.recommender.class=mostpopular
rec.recommender.isranking=true
4 changes: 0 additions & 4 deletions core/src/main/resources/rec/cf/ranking/wrmf-test.properties
Original file line number Diff line number Diff line change
@@ -1,12 +1,8 @@
rec.recommender.class=wrmf
rec.iterator.learnrate=0.01
rec.iterator.learnrate.maximum=0.01
rec.iterator.maximum=20
rec.user.regularization=0.01
rec.item.regularization=0.01
rec.factor.number=10
rec.learnrate.bolddriver=false
rec.learnrate.decay=1.0
rec.recommender.isranking=true
rec.recommender.ranking.topn=10

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
rec.recommender.class=biasedmf
rec.iterator.learnrate=0.01
rec.iterator.learnrate.maximum=0.01
rec.iterator.maximum=1
rec.iterator.maximum=10
rec.user.regularization=0.01
rec.item.regularization=0.01
rec.bias.regularization=0.01
Expand Down
8 changes: 4 additions & 4 deletions core/src/main/resources/rec/cf/rating/bpmf-test.properties
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
rec.recommender.class=bpmf
rec.iterator.learnrate=0.01
rec.iterator.learnrate.maximum=0.01
# rec.iterator.learnrate=0.01
# rec.iterator.learnrate.maximum=0.01
rec.iterator.maximum=100
rec.user.regularization=0.01
rec.item.regularization=0.01
rec.factor.number=10
rec.learnrate.bolddriver=false
rec.learnrate.decay=1.0
# rec.learnrate.bolddriver=false
# rec.learnrate.decay=1.0
4 changes: 0 additions & 4 deletions core/src/main/resources/rec/cf/rating/mfals-test.properties
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
rec.recommender.class=mfals
rec.iterator.learnrate=0.01
rec.iterator.learnrate.maximum=0.01
rec.iterator.maximum=100
rec.user.regularization=0.01
rec.item.regularization=0.01
rec.factor.number=10
rec.learnrate.bolddriver=false
rec.learnrate.decay=1.0
8 changes: 2 additions & 6 deletions core/src/main/resources/rec/cf/rating/nmf-test.properties
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
rec.recommender.class=nmf
rec.iterator.learnrate=0.01
rec.iterator.learnrate.maximum=0.01
rec.iterator.maximum=100
rec.user.regularization=0.01
rec.item.regularization=0.01
rec.factor.number=10
rec.iterator.maximum=10
rec.factor.number=100
rec.learnrate.bolddriver=false
rec.learnrate.decay=1.0
1 change: 0 additions & 1 deletion core/src/main/resources/rec/content/hft-test.properties
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ rec.item.regularization=0.01
rec.factor.number=10
rec.learnrate.bolddriver=false
rec.learnrate.decay=1.0
rec.eval.enable = 1
rec.recommender.lambda.user=0.05
rec.recommender.lambda.item=0.05
rec.bias.regularization = 0.01
Expand Down
Loading