use of java.util.logging.FileHandler in project MyPet by xXKeyleXx.
the class MyPetLogger method setupDebugLogger.
protected boolean setupDebugLogger() {
if (getHandlers().length > 0) {
for (Handler h : getHandlers()) {
if (h.toString().equals("MyPet-Debug-Logger-FileHandler")) {
if (Configuration.Log.LEVEL.equalsIgnoreCase("OFF")) {
removeHandler(h);
h.close();
return false;
}
debugLogFileHandler = (FileHandler) h;
return true;
}
}
}
if (Configuration.Log.LEVEL.equalsIgnoreCase("OFF")) {
return false;
}
if (debugLogFileHandler != null) {
addHandler(debugLogFileHandler);
return true;
}
try {
File logsFolder = new File(MyPetApi.getPlugin().getDataFolder(), "logs");
logsFolder.mkdirs();
File logFile = new File(logsFolder, File.separator + "MyPet.log");
FileHandler fileHandler = new FileHandler(logFile.getAbsolutePath(), true) {
@Override
public String toString() {
return "MyPet-Debug-Logger-FileHandler";
}
};
Level level;
try {
level = Level.parse(Configuration.Log.LEVEL);
} catch (IllegalArgumentException e) {
level = Level.OFF;
this.warning(e.getMessage());
}
fileHandler.setLevel(level);
System.out.println("Level: " + level);
fileHandler.setFormatter(new LogFormat());
addHandler(fileHandler);
debugLogFileHandler = fileHandler;
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
use of java.util.logging.FileHandler in project OsmAnd-tools by osmandapp.
the class IndexBatchCreator method generateIndex.
protected void generateIndex(File file, String rName, RegionSpecificData rdata, Set<String> alreadyGeneratedFiles) {
try {
// be independent of previous results
RTree.clearCache();
String regionName = file.getName();
log.warn("-------------------------------------------");
log.warn("----------- Generate " + file.getName() + "\n\n\n");
int i = file.getName().indexOf('.');
if (i > -1) {
regionName = Algorithms.capitalizeFirstLetterAndLowercase(file.getName().substring(0, i));
}
if (Algorithms.isEmpty(rName)) {
rName = regionName;
} else {
rName = Algorithms.capitalizeFirstLetterAndLowercase(rName);
}
DBDialect osmDb = this.osmDbDialect;
if (file.length() / 1024 / 1024 > INMEM_LIMIT && osmDb == DBDialect.SQLITE_IN_MEMORY) {
log.warn("Switching SQLITE in memory dialect to SQLITE");
osmDb = DBDialect.SQLITE;
}
IndexCreator indexCreator = new IndexCreator(workDir);
boolean worldMaps = rName.toLowerCase().contains("world");
if (srtmDir != null && (rdata == null || rdata.indexSRTM) && !worldMaps) {
indexCreator.setSRTMData(srtmDir);
}
indexCreator.setDialects(osmDb, osmDb);
final boolean indAddr = indexAddress && (rdata == null || rdata.indexAddress);
final boolean indPoi = indexPOI && (rdata == null || rdata.indexPOI);
final boolean indTransport = indexTransport && (rdata == null || rdata.indexTransport);
final boolean indMap = indexMap && (rdata == null || rdata.indexMap);
final boolean indRouting = indexRouting && (rdata == null || rdata.indexRouting);
if (!indAddr && !indPoi && !indTransport && !indMap && !indRouting) {
log.warn("! Skip country because nothing to index !");
file.delete();
return;
}
indexCreator.setIndexAddress(indAddr);
indexCreator.setIndexPOI(indPoi);
indexCreator.setIndexTransport(indTransport);
indexCreator.setIndexMap(indMap);
indexCreator.setIndexRouting(indRouting);
indexCreator.setLastModifiedDate(file.lastModified());
indexCreator.setRegionName(rName);
if (rdata != null && rdata.cityAdminLevel != null) {
indexCreator.setCityAdminLevel(rdata.cityAdminLevel);
}
if (zoomWaySmoothness != null) {
indexCreator.setZoomWaySmoothness(zoomWaySmoothness);
}
String mapFileName = regionName + "_" + IndexConstants.BINARY_MAP_VERSION + IndexConstants.BINARY_MAP_INDEX_EXT;
indexCreator.setMapFileName(mapFileName);
try {
alreadyGeneratedFiles.add(file.getName());
Log warningsAboutMapData = null;
File logFileName = new File(workDir, mapFileName + GEN_LOG_EXT);
FileHandler fh = null;
// configure log path
try {
FileOutputStream fout = new FileOutputStream(logFileName);
fout.write((new Date() + "\n").getBytes());
fout.write((MapCreatorVersion.APP_MAP_CREATOR_FULL_NAME + "\n").getBytes());
fout.close();
fh = new FileHandler(logFileName.getAbsolutePath(), 10 * 1000 * 1000, 1, true);
fh.setFormatter(new SimpleFormatter());
fh.setLevel(Level.ALL);
Jdk14Logger jdk14Logger = new Jdk14Logger("tempLogger");
jdk14Logger.getLogger().setLevel(Level.ALL);
jdk14Logger.getLogger().setUseParentHandlers(false);
jdk14Logger.getLogger().addHandler(fh);
warningsAboutMapData = jdk14Logger;
} catch (SecurityException e1) {
e1.printStackTrace();
} catch (IOException e1) {
e1.printStackTrace();
}
if (fh != null) {
LogManager.getLogManager().getLogger("").addHandler(fh);
}
try {
indexCreator.generateIndexes(file, new ConsoleProgressImplementation(1), null, mapZooms, new MapRenderingTypesEncoder(renderingTypesFile, file.getName()), warningsAboutMapData);
} finally {
if (fh != null) {
fh.close();
LogManager.getLogManager().getLogger("").removeHandler(fh);
}
}
File generated = new File(workDir, mapFileName);
File dest = new File(indexDirFiles, generated.getName());
if (!generated.renameTo(dest)) {
FileOutputStream fout = new FileOutputStream(dest);
FileInputStream fin = new FileInputStream(generated);
Algorithms.streamCopy(fin, fout);
fin.close();
fout.close();
}
File copyLog = new File(indexDirFiles, logFileName.getName());
FileOutputStream fout = new FileOutputStream(copyLog);
FileInputStream fin = new FileInputStream(logFileName);
Algorithms.streamCopy(fin, fout);
fin.close();
fout.close();
// logFileName.renameTo(new File(indexDirFiles, logFileName.getName()));
} catch (Exception e) {
// $NON-NLS-1$
log.error("Exception generating indexes for " + file.getName(), e);
}
} catch (OutOfMemoryError e) {
System.gc();
log.error("OutOfMemory", e);
}
System.gc();
}
use of java.util.logging.FileHandler in project jgnash by ccavanaugh.
the class ImportQifAction method importQif.
private static void importQif() {
final ResourceBundle rb = ResourceUtils.getBundle();
final Preferences pref = Preferences.userNodeForPackage(ImportQifAction.class);
final Logger logger = Logger.getLogger("qifimport");
if (debug) {
try {
Handler fh = new FileHandler("%h/jgnash%g.log");
fh.setFormatter(new SimpleFormatter());
logger.addHandler(fh);
logger.setLevel(Level.FINEST);
} catch (IOException ioe) {
logger.log(Level.SEVERE, "Could not install file handler", ioe);
}
}
final Engine engine = EngineFactory.getEngine(EngineFactory.DEFAULT);
Objects.requireNonNull(engine);
if (engine.getRootAccount() == null) {
StaticUIMethods.displayError(rb.getString("Message.Error.CreateBasicAccounts"));
return;
}
final JFileChooser chooser = new JFileChooser(pref.get(QIFDIR, null));
chooser.setMultiSelectionEnabled(false);
chooser.addChoosableFileFilter(new FileNameExtensionFilter("Qif Files (*.qif)", "qif"));
if (chooser.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) {
pref.put(QIFDIR, chooser.getCurrentDirectory().getAbsolutePath());
boolean fullFile = QifUtils.isFullFile(chooser.getSelectedFile());
if (fullFile) {
// prompt for date format
final DateFormat dateFormat = getQIFDateFormat();
class ImportFile extends SwingWorker<Void, Void> {
@Override
protected Void doInBackground() throws Exception {
UIApplication.getFrame().displayWaitMessage(rb.getString("Message.ImportWait"));
QifImport imp = new QifImport();
try {
imp.doFullParse(chooser.getSelectedFile(), dateFormat);
} catch (NoAccountException e) {
logger.log(Level.SEVERE, "Mistook partial qif file as a full qif file", e);
}
imp.dumpStats();
imp.doFullImport();
if (imp.getDuplicateCount() > 0) {
String message = imp.getDuplicateCount() + " duplicate transactions were found";
logger.info(message);
}
return null;
}
@Override
protected void done() {
UIApplication.getFrame().stopWaitMessage();
}
}
new ImportFile().execute();
} else {
final QifImport imp = new QifImport();
if (!imp.doPartialParse(chooser.getSelectedFile())) {
StaticUIMethods.displayError(rb.getString("Message.Error.ParseTransactions"));
return;
}
imp.dumpStats();
if (imp.getParser().accountList.isEmpty()) {
StaticUIMethods.displayError(rb.getString("Message.Error.ParseTransactions"));
return;
}
PartialDialog dlg = new PartialDialog(imp.getParser());
DialogUtils.addBoundsListener(dlg);
dlg.setVisible(true);
if (dlg.isWizardValid()) {
imp.doPartialImport(dlg.getAccount());
if (imp.getDuplicateCount() > 0) {
if (YesNoDialog.showYesNoDialog(UIApplication.getFrame(), new MultiLineLabel(TextResource.getString("DupeTransImport.txt")), rb.getString("Title.DuplicateTransactionsFound"), YesNoDialog.WARNING_MESSAGE)) {
Transaction[] t = imp.getDuplicates();
for (Transaction element : t) {
engine.addTransaction(element);
}
}
}
}
}
}
}
use of java.util.logging.FileHandler in project freeplane by freeplane.
the class LogUtils method createLogger.
public static void createLogger() {
if (loggerCreated) {
return;
}
loggerCreated = true;
FileHandler mFileHandler = null;
final Logger parentLogger = Logger.getAnonymousLogger().getParent();
final Handler[] handlers = parentLogger.getHandlers();
for (int i = 0; i < handlers.length; i++) {
final Handler handler = handlers[i];
if (handler instanceof ConsoleHandler) {
parentLogger.removeHandler(handler);
}
}
try {
final String logDirectoryPath = getLogDirectory();
final File logDirectory = new File(logDirectoryPath);
logDirectory.mkdirs();
if (logDirectory.isDirectory()) {
final String pathPattern = logDirectoryPath + File.separatorChar + "log";
mFileHandler = new FileHandler(pathPattern, 1400000, 5, false);
mFileHandler.setFormatter(new StdFormatter());
parentLogger.addHandler(mFileHandler);
}
final ConsoleHandler stdConsoleHandler = new ConsoleHandler();
stdConsoleHandler.setFormatter(new StdFormatter());
if (System.getProperty("java.util.logging.config.file", null) == null) {
mFileHandler.setLevel(Level.INFO);
stdConsoleHandler.setLevel(Level.INFO);
}
parentLogger.addHandler(stdConsoleHandler);
LoggingOutputStream los;
Logger logger = Logger.getLogger(StdFormatter.STDOUT.getName());
los = new LoggingOutputStream(logger, StdFormatter.STDOUT, MAX_LOG_SIZE);
System.setOut(new PrintStream(los, true));
logger = Logger.getLogger(StdFormatter.STDERR.getName());
los = new LoggingOutputStream(logger, StdFormatter.STDERR, MAX_LOG_SIZE);
System.setErr(new PrintStream(los, true));
} catch (final Exception e) {
LogUtils.warn("Error creating logging File Handler", e);
}
}
use of java.util.logging.FileHandler in project dataverse by IQSS.
the class DatasetServiceBean method exportAllDatasets.
public void exportAllDatasets(boolean forceReExport) {
Integer countAll = 0;
Integer countSuccess = 0;
Integer countError = 0;
String logTimestamp = logFormatter.format(new Date());
Logger exportLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.DatasetServiceBean." + "ExportAll" + logTimestamp);
String logFileName = "../logs" + File.separator + "export_" + logTimestamp + ".log";
FileHandler fileHandler;
boolean fileHandlerSuceeded;
try {
fileHandler = new FileHandler(logFileName);
exportLogger.setUseParentHandlers(false);
fileHandlerSuceeded = true;
} catch (IOException | SecurityException ex) {
Logger.getLogger(DatasetServiceBean.class.getName()).log(Level.SEVERE, null, ex);
return;
}
if (fileHandlerSuceeded) {
exportLogger.addHandler(fileHandler);
} else {
exportLogger = logger;
}
exportLogger.info("Starting an export all job");
for (Long datasetId : findAllLocalDatasetIds()) {
// Potentially, there's a godzillion datasets in this Dataverse.
// This is why we go through the list of ids here, and instantiate
// only one dataset at a time.
Dataset dataset = this.find(datasetId);
if (dataset != null) {
// it is indeed an accurate test.
if (dataset.isReleased() && dataset.getReleasedVersion() != null && !dataset.isDeaccessioned()) {
// can't trust dataset.getPublicationDate(), no.
// we know this dataset has a non-null released version! Maybe not - SEK 8/19 (We do now! :)
Date publicationDate = dataset.getReleasedVersion().getReleaseTime();
if (forceReExport || (publicationDate != null && (dataset.getLastExportTime() == null || dataset.getLastExportTime().before(publicationDate)))) {
countAll++;
try {
recordService.exportAllFormatsInNewTransaction(dataset);
exportLogger.info("Success exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalId());
countSuccess++;
} catch (Exception ex) {
exportLogger.info("Error exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalId() + "; " + ex.getMessage());
countError++;
}
}
}
}
}
exportLogger.info("Datasets processed: " + countAll.toString());
exportLogger.info("Datasets exported successfully: " + countSuccess.toString());
exportLogger.info("Datasets failures: " + countError.toString());
exportLogger.info("Finished export-all job.");
if (fileHandlerSuceeded) {
fileHandler.close();
}
}
Aggregations