use of org.haiku.haikudepotserver.dataobjects.auto._User in project haikudepotserver by haiku.
the class PkgJobApiImpl method queuePkgCategoryCoverageImportSpreadsheetJob.
@Override
public QueuePkgCategoryCoverageImportSpreadsheetJobResult queuePkgCategoryCoverageImportSpreadsheetJob(QueuePkgCategoryCoverageImportSpreadsheetJobRequest request) {
Preconditions.checkArgument(null != request, "the request must be supplied");
Preconditions.checkArgument(!Strings.isNullOrEmpty(request.inputDataGuid), "the input data must be identified by guid");
final ObjectContext context = serverRuntime.newContext();
Optional<User> user = tryObtainAuthenticatedUser(context);
if (!permissionEvaluator.hasPermission(SecurityContextHolder.getContext().getAuthentication(), null, Permission.BULK_PKGCATEGORYCOVERAGEIMPORTSPREADSHEET)) {
throw new AccessDeniedException("attempt to import package categories, but was not authorized");
}
// now check that the data is present.
jobService.tryGetData(request.inputDataGuid).orElseThrow(() -> new ObjectNotFoundException(JobData.class.getSimpleName(), request.inputDataGuid));
// setup and go
PkgCategoryCoverageImportSpreadsheetJobSpecification spec = new PkgCategoryCoverageImportSpreadsheetJobSpecification();
spec.setOwnerUserNickname(user.map(_User::getNickname).orElse(null));
spec.setInputDataGuid(request.inputDataGuid);
return new QueuePkgCategoryCoverageImportSpreadsheetJobResult(jobService.submit(spec, JobSnapshot.COALESCE_STATUSES_NONE));
}
use of org.haiku.haikudepotserver.dataobjects.auto._User in project IR_Base by Linda-sunshine.
the class MultiThreadedLMAnalyzer method findFriends.
public void findFriends(String filename) {
_User ui, uj;
// Detect all co-purchase.
for (int i = 0; i < m_users.size(); i++) {
ui = m_users.get(i);
for (int j = i + 1; j < m_users.size(); j++) {
uj = m_users.get(j);
if (hasCoPurchase(ui, uj)) {
ui.addAmazonFriend(uj.getUserID());
uj.addAmazonFriend(ui.getUserID());
}
}
}
try {
double avg = 0;
PrintWriter writer = new PrintWriter(new File(filename));
for (_User u : m_users) {
avg += u.getAmazonFriends().size();
writer.write(u.getUserID() + "\t");
for (String frd : u.getAmazonFriends()) writer.write(frd + "\t");
writer.write("\n");
}
System.out.println("[Info] Avg friends: " + avg / m_users.size());
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
use of org.haiku.haikudepotserver.dataobjects.auto._User in project IR_Base by Linda-sunshine.
the class CLRWithHDP method loadUsers.
@Override
public void loadUsers(ArrayList<_User> userList) {
m_userList = new ArrayList<_AdaptStruct>();
for (_User user : userList) m_userList.add(new _HDPAdaptStruct(user));
m_pWeights = new double[m_gWeights.length];
}
use of org.haiku.haikudepotserver.dataobjects.auto._User in project IR_Base by Linda-sunshine.
the class MultiThreadedLMAnalyzer method getStat.
public void getStat() {
ArrayList<Integer> medians = new ArrayList<Integer>();
double pos = 0, total = 0;
for (_User u : m_users) {
medians.add(u.getReviewSize());
for (_Review r : u.getReviews()) {
if (r.getYLabel() == 1)
pos++;
total++;
}
}
Collections.sort(medians);
double median = 0;
if (medians.size() % 2 == 0)
median = (medians.get(medians.size() / 2) + medians.get(medians.size() / 2 - 1)) / 2;
else
median = medians.get(medians.size() / 2);
System.out.println("median: " + median);
System.out.println("pos: " + pos);
System.out.println("total: " + total);
System.out.println("pos ratio: " + pos / total);
}
use of org.haiku.haikudepotserver.dataobjects.auto._User in project IR_Base by Linda-sunshine.
the class MultiThreadedLMAnalyzer method estimateGlobalLM.
// Estimate a global language model.
// We traverse all review documents instead of using the global TF
public double[] estimateGlobalLM() {
double[] lm = new double[getLMFeatureSize()];
double sum = 0;
for (_User u : m_users) {
for (_Review r : u.getReviews()) {
for (_SparseFeature fv : r.getLMSparse()) {
lm[fv.getIndex()] += fv.getValue();
sum += fv.getValue();
}
}
}
for (int i = 0; i < lm.length; i++) {
lm[i] /= sum;
if (lm[i] == 0)
lm[i] = 0.0001;
}
return lm;
}
Aggregations