use of com.google.samples.apps.iosched.server.schedule.model.JsonDataSources in project iosched by google.
the class APIExtractor method run.
public void run(OutputStream optionalOutput, boolean extractUnpublished) throws IOException {
// fill sources with extra input:
JsonDataSources sources = new ExtraInput().fetchAllDataSources();
// fill sources with vendor API input:
VendorDynamicInput vendorInput = new VendorDynamicInput();
vendorInput.setExtractUnpublished(extractUnpublished);
sources.putAll(vendorInput.fetchAllDataSources());
// extract session data from inputs:
JsonObject newData = new DataExtractor(false).extractFromDataSources(sources);
// send data to the outputstream
Writer writer = Channels.newWriter(Channels.newChannel(optionalOutput), "UTF-8");
JsonWriter optionalOutputWriter = new JsonWriter(writer);
optionalOutputWriter.setIndent(" ");
new Gson().toJson(newData, optionalOutputWriter);
optionalOutputWriter.flush();
}
use of com.google.samples.apps.iosched.server.schedule.model.JsonDataSources in project iosched by google.
the class APIUpdater method run.
public void run(boolean force, boolean obfuscate, OutputStream optionalOutput) throws IOException {
RemoteFilesEntityFetcherFactory.setBuilder(new RemoteFilesEntityFetcherFactory.FetcherBuilder() {
String[] filenames;
@Override
public RemoteFilesEntityFetcherFactory.FetcherBuilder setSourceFiles(String... filenames) {
this.filenames = filenames;
return this;
}
@Override
public EntityFetcher build() {
return new CloudStorageRemoteFilesEntityFetcher(filenames);
}
});
UpdateRunLogger logger = new UpdateRunLogger();
CloudFileManager fileManager = new CloudFileManager();
logger.startTimer();
JsonDataSources sources = new ExtraInput().fetchAllDataSources();
logger.stopTimer("fetchExtraAPI");
logger.startTimer();
sources.putAll(new VendorStaticInput().fetchAllDataSources());
logger.stopTimer("fetchVendorStaticAPI");
logger.startTimer();
JsonObject newData = new DataExtractor(obfuscate).extractFromDataSources(sources);
logger.stopTimer("extractOurData");
logger.startTimer();
byte[] newHash = CloudFileManager.calulateHash(newData);
logger.stopTimer("calculateHash");
// compare current Vendor API log with the one from previous run:
logger.startTimer();
if (!force && isUpToDate(newHash, logger)) {
logger.logNoopRun();
return;
}
logger.stopTimer("compareHash");
logger.startTimer();
ManifestData dataProduction = extractManifestData(fileManager.readProductionManifest(), null);
//ManifestData dataStaging = extractManifestData(fileManager.readStagingManifest(), dataProduction);
logger.stopTimer("readManifest");
JsonWriter optionalOutputWriter = null;
logger.startTimer();
// Upload a new version of the sessions file
if (optionalOutput != null) {
// send data to the outputstream
Writer writer = Channels.newWriter(Channels.newChannel(optionalOutput), "UTF-8");
optionalOutputWriter = new JsonWriter(writer);
optionalOutputWriter.setIndent(" ");
new Gson().toJson(newData, optionalOutputWriter);
optionalOutputWriter.flush();
} else {
// save data to the CloudStorage
fileManager.createOrUpdate(dataProduction.sessionsFilename, newData, false);
}
logger.stopTimer("uploadNewSessionsFile");
// Check data consistency
logger.startTimer();
DataCheck checker = new DataCheck(fileManager);
CheckResult result = checker.check(sources, newData, dataProduction);
if (!result.failures.isEmpty()) {
reportDataCheckFailures(result, optionalOutput);
}
logger.stopTimer("runDataCheck");
if (optionalOutput == null) {
// Only update manifest and log if saving to persistent storage
logger.startTimer();
// Create new manifests
JsonObject newProductionManifest = new JsonObject();
newProductionManifest.add("format", new JsonPrimitive(Config.MANIFEST_FORMAT_VERSION));
newProductionManifest.add("data_files", dataProduction.dataFiles);
JsonObject newStagingManifest = new JsonObject();
newStagingManifest.add("format", new JsonPrimitive(Config.MANIFEST_FORMAT_VERSION));
// newStagingManifest.add("data_files", dataStaging.dataFiles);
// save manifests to the CloudStorage
fileManager.createOrUpdateProductionManifest(newProductionManifest);
fileManager.createOrUpdateStagingManifest(newStagingManifest);
try {
// notify production GCM server:
new GCMPing().notifyGCMServer(Config.GCM_URL, Config.GCM_API_KEY);
} catch (Throwable t) {
Logger.getLogger(APIUpdater.class.getName()).log(Level.SEVERE, "Error while pinging GCM server", t);
}
logger.stopTimer("uploadManifest");
logger.logUpdateRun(dataProduction.majorVersion, dataProduction.minorVersion, dataProduction.sessionsFilename, newHash, newData, force);
}
}
use of com.google.samples.apps.iosched.server.schedule.model.JsonDataSources in project iosched by google.
the class DataSourceInput method fetchAllDataSources.
public JsonDataSources fetchAllDataSources() throws IOException {
JsonDataSources sources = new JsonDataSources();
for (EnumType type : getType().getEnumConstants()) {
JsonArray data = fetch(type);
if (LOG.isLoggable(Level.INFO)) {
LOG.info("result for " + type + ": entities=" + data.size());
}
sources.addSource(new JsonDataSource(type, data));
}
return sources;
}
use of com.google.samples.apps.iosched.server.schedule.model.JsonDataSources in project iosched by google.
the class CMSUpdateServlet method process.
private void process(HttpServletResponse resp, boolean showOnly) throws IOException {
// everything ok, let's update
StringBuilder summary = new StringBuilder();
JsonObject contents = new JsonObject();
JsonDataSources sources = new VendorDynamicInput().fetchAllDataSources();
for (String entity : sources) {
JsonArray array = new JsonArray();
JsonDataSource source = sources.getSource(entity);
for (JsonObject obj : source) {
array.add(obj);
}
summary.append(entity).append(": ").append(source.size()).append("\n");
contents.add(entity, array);
}
if (showOnly) {
// Show generated contents to the output
resp.setContentType("application/json");
Writer writer = Channels.newWriter(Channels.newChannel(resp.getOutputStream()), "UTF-8");
JsonWriter outputWriter = new JsonWriter(writer);
outputWriter.setIndent(" ");
new Gson().toJson(contents, outputWriter);
outputWriter.flush();
} else {
// Write file to cloud storage
CloudFileManager fileManager = new CloudFileManager();
fileManager.createOrUpdate("__raw_session_data.json", contents, true);
// send email
Message message = new Message();
message.setSender(Config.EMAIL_FROM);
message.setSubject("[iosched-data-update] Manual sync from CMS");
message.setTextBody("Hey,\n\n" + "(this message is autogenerated)\n" + "This is a heads up that " + userService.getCurrentUser().getEmail() + " has just updated the IOSched 2015 data from the Vendor CMS.\n\n" + "Here is a brief status of what has been extracted from the Vendor API:\n" + summary + "\n\n" + "If you want to check the most current data that will soon be sync'ed to the IOSched Android app, " + "check this link: http://storage.googleapis.com/iosched-updater-dev.appspot.com/__raw_session_data.json\n" + "This data will remain unchanged until someone with proper privileges updates it again on https://iosched-updater-dev.appspot.com/cmsupdate\n\n" + "Thanks!\n\n" + "A robot on behalf of the IOSched team!\n\n" + "PS: you are receiving this either because you are an admin of the IOSched project or " + "because you are in a hard-coded list of I/O organizers. If you don't want to " + "receive it anymore, pay me a beer and ask kindly.");
// TODO(arthurthompson): Reimplement mailing, it currently fails due to invalid sender.
//MailServiceFactory.getMailService().sendToAdmins(message);
resp.sendRedirect("/admin/schedule/updateok.html");
}
}
use of com.google.samples.apps.iosched.server.schedule.model.JsonDataSources in project iosched by google.
the class ExtraInputTest method testFetch.
@Test
public void testFetch() throws IOException {
ExtraInput api = new ExtraInput();
JsonDataSources dataSources = api.fetchAllDataSources();
assertNotNull(dataSources.getSource(MainTypes.tag_conf.name()));
assertNotNull(dataSources.getSource(MainTypes.tag_category_mapping.name()));
assertEquals(13, dataSources.getSource(MainTypes.tag_conf.name()).size());
assertEquals(3, dataSources.getSource(MainTypes.tag_category_mapping.name()).size());
}
Aggregations