use of org.apache.mahout.cf.taste.model.Preference in project facebook-recommender-demo by ManuelB.
the class AnimalFoodRecommender method initDataModel.
public void initDataModel() {
FastByIDMap<PreferenceArray> preferenceMap = new FastByIDMap<PreferenceArray>();
for (int i = 0; i < animals.size(); i++) {
List<Preference> userPreferences = new ArrayList<Preference>();
long userId = id2thing.toLongID(animals.get(i));
for (int j = 0; j < foods.size(); j++) {
if (preferences[i][j] != null) {
userPreferences.add(new GenericPreference(userId, id2thing.toLongID(foods.get(j)), preferences[i][j]));
}
}
GenericUserPreferenceArray userArray = new GenericUserPreferenceArray(userPreferences);
preferenceMap.put(userId, userArray);
}
model = new GenericDataModel(preferenceMap);
}
use of org.apache.mahout.cf.taste.model.Preference in project facebook-recommender-demo by ManuelB.
the class FacebookRecommender method initRecommender.
/**
* This function will init the recommender
* it will load the CSV file from the resource folder,
* parse it and create the necessary data structures
* to create a recommender.
* The
*/
@PostConstruct
public void initRecommender() {
try {
// get the file which is part of the WAR as
URL url = getClass().getClassLoader().getResource(DATA_FILE_NAME);
// create a file out of the resource
File data = new File(url.toURI());
// create a map for saving the preferences (likes) for
// a certain person
Map<Long, List<Preference>> preferecesOfUsers = new HashMap<Long, List<Preference>>();
// use a CSV parser for reading the file
// use UTF-8 as character set
CSVParser parser = new CSVParser(new InputStreamReader(new FileInputStream(data), "UTF-8"));
// parse out the header
// we are not using the header
String[] header = parser.getLine();
// should output person name
log.fine(header[0] + " " + header[1]);
String[] line;
// go through every line
while ((line = parser.getLine()) != null) {
String person = line[0];
String likeName = line[1];
// other lines contained but not used
// String category = line[2];
// String id = line[3];
// String created_time = line[4];
// create a long from the person name
long userLong = thing2long.toLongID(person);
// store the mapping for the user
thing2long.storeMapping(userLong, person);
// create a long from the like name
long itemLong = thing2long.toLongID(likeName);
// store the mapping for the item
thing2long.storeMapping(itemLong, likeName);
List<Preference> userPrefList;
// otherwise create a new one.
if ((userPrefList = preferecesOfUsers.get(userLong)) == null) {
userPrefList = new ArrayList<Preference>();
preferecesOfUsers.put(userLong, userPrefList);
}
// add the like that we just found to this user
userPrefList.add(new GenericPreference(userLong, itemLong, 1));
log.fine("Adding " + person + "(" + userLong + ") to " + likeName + "(" + itemLong + ")");
}
// create the corresponding mahout data structure from the map
FastByIDMap<PreferenceArray> preferecesOfUsersFastMap = new FastByIDMap<PreferenceArray>();
for (Entry<Long, List<Preference>> entry : preferecesOfUsers.entrySet()) {
preferecesOfUsersFastMap.put(entry.getKey(), new GenericUserPreferenceArray(entry.getValue()));
}
// create a data model
dataModel = new GenericDataModel(preferecesOfUsersFastMap);
// Instantiate the recommender
recommender = new GenericBooleanPrefItemBasedRecommender(dataModel, new LogLikelihoodSimilarity(dataModel));
} catch (URISyntaxException e) {
log.log(Level.SEVERE, "Problem with the file URL", e);
} catch (FileNotFoundException e) {
log.log(Level.SEVERE, DATA_FILE_NAME + " was not found", e);
} catch (IOException e) {
log.log(Level.SEVERE, "Error during reading line of file", e);
}
}
Aggregations