use of com.axelor.data.csv.CSVInput in project axelor-open-suite by axelor.
the class DataImportServiceImpl method process.
private List<CSVInput> process(DataReaderService reader, AdvancedImport advancedImport) throws AxelorException, IOException, ClassNotFoundException {
String[] sheets = reader.getSheetNames();
boolean isConfig = advancedImport.getIsConfigInFile();
int linesToIgnore = advancedImport.getNbOfFirstLineIgnore();
boolean isTabConfig = advancedImport.getIsFileTabConfigAdded();
List<CSVInput> inputList = new ArrayList<CSVInput>();
validatorService.sortFileTabList(advancedImport.getFileTabList());
for (FileTab fileTab : advancedImport.getFileTabList()) {
if (!Arrays.stream(sheets).anyMatch(sheet -> sheet.equals(fileTab.getName()))) {
continue;
}
this.initializeVariables();
String fileName = createDataFileName(fileTab);
csvInput = this.createCSVInput(fileTab, fileName);
ifList = new ArrayList<String>();
try (CSVWriter csvWriter = new CSVWriter(new FileWriter(new File(dataDir, fileName)), CSV_SEPRATOR)) {
int totalLines = reader.getTotalLines(fileTab.getName());
if (totalLines == 0) {
continue;
}
Mapper mapper = advancedImportService.getMapper(fileTab.getMetaModel().getFullName());
List<String[]> allLines = new ArrayList<String[]>();
int startIndex = isConfig ? 1 : linesToIgnore;
String[] row = reader.read(fileTab.getName(), startIndex, 0);
String[] headers = this.createHeader(row, fileTab, isConfig, mapper);
allLines.add(headers);
int tabConfigRowCount = 0;
if (isTabConfig) {
String[] objectRow = reader.read(fileTab.getName(), 0, 0);
tabConfigRowCount = advancedImportService.getTabConfigRowCount(fileTab.getName(), reader, totalLines, objectRow);
}
startIndex = isConfig ? tabConfigRowCount + 3 : fileTab.getAdvancedImport().getIsHeader() ? linesToIgnore + 1 : linesToIgnore;
for (int line = startIndex; line < totalLines; line++) {
String[] dataRow = reader.read(fileTab.getName(), line, row.length);
if (dataRow == null || Arrays.stream(dataRow).allMatch(StringUtils::isBlank)) {
continue;
}
String[] data = this.createData(dataRow, fileTab, isConfig, mapper);
allLines.add(data);
}
csvWriter.writeAll(allLines);
csvWriter.flush();
}
inputList.add(csvInput);
importContext.put("ifConditions" + fileTab.getId(), ifList);
importContext.put("jsonContextValues" + fileTab.getId(), createJsonContext(fileTab));
importContext.put("actionsToApply" + fileTab.getId(), fileTab.getActions());
XStream stream = XStreamUtils.createXStream();
stream.processAnnotations(CSVConfig.class);
LOG.debug("CSV Config created :" + "\n" + stream.toXML(csvInput));
}
return inputList;
}
use of com.axelor.data.csv.CSVInput in project axelor-open-suite by axelor.
the class DataBackupCreateService method writeCSVData.
protected CSVInput writeCSVData(MetaModel metaModel, CSVWriter csvWriter, DataBackup dataBackup, long totalRecord, List<String> subClasses, String dirPath) {
CSVInput csvInput = new CSVInput();
boolean headerFlag = true;
List<String> dataArr;
List<String> headerArr = new ArrayList<>();
List<Model> dataList;
try {
Mapper metaModelMapper = Mapper.of(Class.forName(metaModel.getFullName()));
Property[] pro = metaModelMapper.getProperties();
Integer fetchLimit = dataBackup.getFetchLimit();
boolean isRelativeDate = dataBackup.getIsRelativeDate();
boolean updateImportId = dataBackup.getUpdateImportId();
csvInput.setFileName(metaModel.getName() + ".csv");
csvInput.setTypeName(metaModel.getFullName());
csvInput.setBindings(new ArrayList<>());
for (int i = 0; i < totalRecord; i = i + fetchLimit) {
dataList = getMetaModelDataList(metaModel, i, fetchLimit, subClasses);
if (dataList != null && !dataList.isEmpty()) {
for (Object dataObject : dataList) {
dataArr = new ArrayList<>();
for (Property property : pro) {
if (isPropertyExportable(property)) {
if (headerFlag) {
String headerStr = getMetaModelHeader(property, csvInput, isRelativeDate);
headerArr.add(headerStr);
}
dataArr.add(getMetaModelData(metaModel.getName(), metaModelMapper, property, dataObject, dirPath, isRelativeDate, updateImportId));
}
}
if (headerFlag) {
if (byteArrFieldFlag) {
csvInput.setCallable("com.axelor.apps.base.service.app.DataBackupRestoreService:importObjectWithByteArray");
byteArrFieldFlag = false;
}
csvWriter.writeNext(headerArr.toArray(new String[headerArr.size()]), true);
headerFlag = false;
}
csvWriter.writeNext(dataArr.toArray(new String[dataArr.size()]), true);
}
}
}
if (AutoImportModelMap.containsKey(csvInput.getTypeName())) {
csvInput.setSearch(AutoImportModelMap.get(csvInput.getTypeName()).toString());
} else if (Class.forName(metaModel.getFullName()).getSuperclass() == App.class) {
csvInput.setSearch("self.code = :code");
} else {
csvInput.setSearch("self.importId = :importId");
}
} catch (ClassNotFoundException e) {
}
return csvInput;
}
use of com.axelor.data.csv.CSVInput in project axelor-open-suite by axelor.
the class AppServiceImpl method refreshApp.
@Override
public void refreshApp() throws IOException {
File dataDir = Files.createTempDirectory(null).toFile();
File imgDir = new File(dataDir, "img");
imgDir.mkdir();
CSVConfig csvConfig = new CSVConfig();
csvConfig.setInputs(new ArrayList<>());
List<MetaModel> metaModels = metaModelRepo.all().filter("self.name != 'App' and self.name like 'App%' and self.packageName = ?1", App.class.getPackage().getName()).fetch();
final List<String> appFieldTargetList = Stream.of(JPA.fields(App.class)).filter(p -> p.getType() == PropertyType.ONE_TO_ONE).filter(p -> p.getName().startsWith("app")).map(Property::getTarget).map(Class::getName).collect(Collectors.toList());
log.debug("Total app models: {}", metaModels.size());
for (MetaModel metaModel : metaModels) {
if (!appFieldTargetList.contains(metaModel.getFullName())) {
log.debug("Not a App class : {}", metaModel.getName());
continue;
}
Class<?> klass;
try {
klass = Class.forName(metaModel.getFullName());
} catch (ClassNotFoundException e) {
continue;
}
Object obj = null;
Query query = JPA.em().createQuery("SELECT id FROM " + metaModel.getName());
try {
obj = query.setMaxResults(1).getSingleResult();
} catch (Exception ex) {
}
if (obj != null) {
continue;
}
log.debug("App without app record: {}", metaModel.getName());
String csvName = "base_" + inflector.camelize(klass.getSimpleName(), true) + ".csv";
String pngName = inflector.dasherize(klass.getSimpleName()) + ".png";
CSVInput input = new CSVInput();
input.setFileName(csvName);
input.setTypeName(App.class.getName());
input.setCallable("com.axelor.csv.script.ImportApp:importApp");
input.setSearch("self.code =:code");
input.setSeparator(';');
csvConfig.getInputs().add(input);
CSVInput appInput = new CSVInput();
appInput.setFileName(csvName);
appInput.setTypeName(klass.getName());
appInput.setSearch("self.app.code =:code");
appInput.setSeparator(';');
CSVBind appBind = new CSVBind();
appBind.setColumn("code");
appBind.setField("app");
appBind.setSearch("self.code = :code");
appInput.getBindings().add(appBind);
csvConfig.getInputs().add(appInput);
InputStream stream = klass.getResourceAsStream("/data-init/input/" + csvName);
copyStream(stream, new File(dataDir, csvName));
stream = klass.getResourceAsStream("/data-init/input/img/" + pngName);
copyStream(stream, new File(imgDir, pngName));
}
if (!csvConfig.getInputs().isEmpty()) {
CSVImporter importer = new CSVImporter(csvConfig, dataDir.getAbsolutePath());
importer.run();
}
}
use of com.axelor.data.csv.CSVInput in project axelor-open-suite by axelor.
the class DataBackupCreateService method create.
/* Generate csv Files for each individual MetaModel and single config file */
public DataBackup create(DataBackup dataBackup) throws IOException {
File tempDir = Files.createTempDirectory(null).toFile();
String tempDirectoryPath = tempDir.getAbsolutePath();
int fetchLimit = dataBackup.getFetchLimit();
int errorsCount = 0;
fileNameList = new ArrayList<>();
List<MetaModel> metaModelList = getMetaModels();
LinkedList<CSVInput> simpleCsvs = new LinkedList<>();
LinkedList<CSVInput> refernceCsvs = new LinkedList<>();
LinkedList<CSVInput> notNullReferenceCsvs = new LinkedList<>();
Map<String, List<String>> subClassesMap = getSubClassesMap();
if (dataBackup.getCheckAllErrorFirst()) {
dataBackup.setFetchLimit(1);
errorsCount = checkErrors(dataBackup, metaModelList, tempDirectoryPath, subClassesMap);
dataBackup.setFetchLimit(fetchLimit);
fileNameList.clear();
}
if (errorsCount == 0) {
for (MetaModel metaModel : metaModelList) {
try {
List<String> subClasses = subClassesMap.get(metaModel.getFullName());
long totalRecord = getMetaModelDataCount(metaModel, subClasses);
if (totalRecord > 0) {
LOG.debug("Exporting Model : " + metaModel.getFullName());
notNullReferenceFlag = false;
referenceFlag = false;
CSVWriter csvWriter = new CSVWriter(new FileWriter(new File(tempDirectoryPath, metaModel.getName() + ".csv")), SEPARATOR, QUOTE_CHAR);
CSVInput csvInput = writeCSVData(metaModel, csvWriter, dataBackup, totalRecord, subClasses, tempDirectoryPath);
csvWriter.close();
if (notNullReferenceFlag) {
notNullReferenceCsvs.add(csvInput);
} else if (referenceFlag) {
refernceCsvs.add(csvInput);
CSVInput temcsv = new CSVInput();
temcsv.setFileName(csvInput.getFileName());
temcsv.setTypeName(csvInput.getTypeName());
if (dataBackup.getIsRelativeDate()) {
temcsv.setBindings(new ArrayList<>());
getCsvInputForDateorDateTime(metaModel, temcsv);
}
if (AutoImportModelMap.containsKey(csvInput.getTypeName())) {
temcsv.setSearch(AutoImportModelMap.get(csvInput.getTypeName()).toString());
}
if (Class.forName(metaModel.getFullName()).getSuperclass() == App.class) {
temcsv.setSearch("self.code = :code");
}
if (!AutoImportModelMap.containsKey(csvInput.getTypeName()) && !((Class.forName(metaModel.getFullName()).getSuperclass()).equals(App.class))) {
temcsv.setSearch("self.importId = :importId");
}
simpleCsvs.add(temcsv);
} else {
simpleCsvs.add(csvInput);
}
fileNameList.add(metaModel.getName() + ".csv");
}
} catch (ClassNotFoundException | IOException e) {
TraceBackService.trace(e, DataBackupService.class.getName());
} catch (Exception e) {
JPA.em().getTransaction().rollback();
if (!dataBackup.getCheckAllErrorFirst()) {
sb.append("\nError occured while processing model : " + metaModel.getFullName() + "\n");
sb.append(e.getMessage() + "\n");
}
JPA.em().getTransaction().begin();
dataBackup = Beans.get(DataBackupRepository.class).find(dataBackup.getId());
errorsCount++;
}
}
CSVConfig csvConfig = new CSVConfig();
csvConfig.setInputs(simpleCsvs);
csvConfig.getInputs().addAll(notNullReferenceCsvs);
csvConfig.getInputs().addAll(refernceCsvs);
csvConfig.getInputs().addAll(notNullReferenceCsvs);
generateConfig(tempDirectoryPath, csvConfig);
fileNameList.add(DataBackupServiceImpl.CONFIG_FILE_NAME);
}
try {
if (!Strings.isNullOrEmpty(sb.toString())) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMddHHmmSS");
String logFileName = "DataBackupLog_" + LocalDateTime.now().format(formatter) + ".log";
File file = new File(tempDir.getAbsolutePath(), logFileName);
PrintWriter pw = new PrintWriter(file);
pw.write(sb.toString());
pw.close();
if (file != null) {
dataBackup.setLogMetaFile(metaFiles.upload(file));
}
}
if (errorsCount == 0) {
File zippedFile = generateZIP(tempDirectoryPath, fileNameList);
dataBackup.setBackupMetaFile(metaFiles.upload(zippedFile));
} else {
dataBackup.setStatusSelect(DataBackupRepository.DATA_BACKUP_STATUS_ERROR);
}
} catch (IOException e) {
e.printStackTrace();
}
return dataBackup;
}
use of com.axelor.data.csv.CSVInput in project axelor-open-suite by axelor.
the class DataImportServiceImpl method importData.
@Override
public MetaFile importData(AdvancedImport advancedImport) throws IOException, AxelorException, ClassNotFoundException {
adapterMap = new HashMap<String, DataAdapter>();
importContext = new HashMap<String, Object>();
language = advancedImport.getLanguageSelect();
dataDir = Files.createTempDir();
String extension = Files.getFileExtension(advancedImport.getImportFile().getFileName());
DataReaderService reader = dataReaderFactory.getDataReader(extension);
reader.initialize(advancedImport.getImportFile(), advancedImport.getFileSeparator());
List<CSVInput> inputs = this.process(reader, advancedImport);
if (advancedImport.getAttachment() != null) {
this.processAttachments(advancedImport.getAttachment());
}
MetaFile logFile = this.importData(inputs);
FileUtils.forceDelete(dataDir);
return logFile;
}
Aggregations