Search in sources :

Example 46 with InMemoryJarfile

use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.

the class RealVoltDB method outputProcedures.

private int outputProcedures(Configuration config) {
    File outputFile = new File(config.m_getOutput);
    if (outputFile.exists() && !config.m_forceGetCreate) {
        consoleLog.fatal("Failed to save classes, file already exists: " + config.m_getOutput);
        return -1;
    }
    try {
        InMemoryJarfile catalogJar = CatalogUtil.loadInMemoryJarFile(MiscUtils.fileToBytes(new File(config.m_pathToCatalog)));
        InMemoryJarfile filteredJar = CatalogUtil.getCatalogJarWithoutDefaultArtifacts(catalogJar);
        filteredJar.writeToFile(outputFile);
        consoleLog.info("Classes saved in " + outputFile.getPath());
    } catch (IOException e) {
        consoleLog.fatal("Failed to read classes " + config.m_pathToCatalog + " : " + e.getMessage());
        return -1;
    }
    return 0;
}
Also used : InMemoryJarfile(org.voltdb.utils.InMemoryJarfile) IOException(java.io.IOException) VoltFile(org.voltdb.utils.VoltFile) File(java.io.File)

Example 47 with InMemoryJarfile

use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.

the class RestoreAgent method checkSnapshotIsComplete.

private SnapshotInfo checkSnapshotIsComplete(Long key, Snapshot s) {
    int partitionCount = -1;
    for (TableFiles tf : s.m_tableFiles.values()) {
        // Check if the snapshot is complete
        if (tf.m_completed.stream().anyMatch(b -> !b)) {
            m_snapshotErrLogStr.append("\nRejected snapshot ").append(s.getNonce()).append(" because it was not completed.");
            return null;
        }
        // Replicated table doesn't check partition count
        if (tf.m_isReplicated) {
            continue;
        }
        // Everyone has to agree on the total partition count
        for (int count : tf.m_totalPartitionCounts) {
            if (partitionCount == -1) {
                partitionCount = count;
            } else if (count != partitionCount) {
                m_snapshotErrLogStr.append("\nRejected snapshot ").append(s.getNonce()).append(" because it had the wrong partition count ").append(count).append(", expecting ").append(partitionCount);
                return null;
            }
        }
    }
    if (s.m_digests.isEmpty()) {
        m_snapshotErrLogStr.append("\nRejected snapshot ").append(s.getNonce()).append(" because it had no valid digest file.");
        return null;
    }
    File digest = s.m_digests.get(0);
    Long catalog_crc = null;
    Map<Integer, Long> pidToTxnMap = new TreeMap<Integer, Long>();
    Set<String> digestTableNames = new HashSet<String>();
    // Create a valid but meaningless InstanceId to support pre-instanceId checking versions
    InstanceId instanceId = new InstanceId(0, 0);
    int newParitionCount = -1;
    try {
        JSONObject digest_detail = SnapshotUtil.CRCCheck(digest, LOG);
        if (digest_detail == null)
            throw new IOException();
        catalog_crc = digest_detail.getLong("catalogCRC");
        if (digest_detail.has("partitionTransactionIds")) {
            JSONObject pidToTxnId = digest_detail.getJSONObject("partitionTransactionIds");
            Iterator<String> it = pidToTxnId.keys();
            while (it.hasNext()) {
                String pidkey = it.next();
                Long txnidval = pidToTxnId.getLong(pidkey);
                pidToTxnMap.put(Integer.valueOf(pidkey), txnidval);
            }
        }
        if (digest_detail.has("instanceId")) {
            instanceId = new InstanceId(digest_detail.getJSONObject("instanceId"));
        }
        if (digest_detail.has("newPartitionCount")) {
            newParitionCount = digest_detail.getInt("newPartitionCount");
        }
        if (digest_detail.has("tables")) {
            JSONArray tableObj = digest_detail.getJSONArray("tables");
            for (int i = 0; i < tableObj.length(); i++) {
                digestTableNames.add(tableObj.getString(i));
            }
        }
    } catch (IOException ioe) {
        m_snapshotErrLogStr.append("\nUnable to read digest file: ").append(digest.getAbsolutePath()).append(" due to: ").append(ioe.getMessage());
        return null;
    } catch (JSONException je) {
        m_snapshotErrLogStr.append("\nUnable to extract catalog CRC from digest: ").append(digest.getAbsolutePath()).append(" due to: ").append(je.getMessage());
        return null;
    }
    if (s.m_catalogFile == null) {
        m_snapshotErrLogStr.append("\nRejected snapshot ").append(s.getNonce()).append(" because it had no catalog.");
        return null;
    }
    try {
        byte[] bytes = MiscUtils.fileToBytes(s.m_catalogFile);
        InMemoryJarfile jarfile = CatalogUtil.loadInMemoryJarFile(bytes);
        if (jarfile.getCRC() != catalog_crc) {
            m_snapshotErrLogStr.append("\nRejected snapshot ").append(s.getNonce()).append(" because catalog CRC did not match digest.");
            return null;
        }
        // Make sure this is not a partial snapshot.
        // Compare digestTableNames with all normal table names in catalog file.
        // A normal table is one that's NOT a materialized view, nor an export table.
        Set<String> catalogNormalTableNames = CatalogUtil.getNormalTableNamesFromInMemoryJar(jarfile);
        if (!catalogNormalTableNames.equals(digestTableNames)) {
            m_snapshotErrLogStr.append("\nRejected snapshot ").append(s.getNonce()).append(" because this is a partial snapshot.");
            return null;
        }
    } catch (IOException ioe) {
        m_snapshotErrLogStr.append("\nRejected snapshot ").append(s.getNonce()).append(" because catalog file could not be validated");
        return null;
    }
    SnapshotInfo info = new SnapshotInfo(key, digest.getParent(), SnapshotUtil.parseNonceFromDigestFilename(digest.getName()), partitionCount, newParitionCount, catalog_crc, m_hostId, instanceId, digestTableNames, s.m_stype);
    // populate table to partition map.
    for (Entry<String, TableFiles> te : s.m_tableFiles.entrySet()) {
        TableFiles tableFile = te.getValue();
        HashSet<Integer> ids = new HashSet<Integer>();
        for (Set<Integer> idSet : tableFile.m_validPartitionIds) {
            ids.addAll(idSet);
        }
        if (!tableFile.m_isReplicated) {
            info.partitions.put(te.getKey(), ids);
        }
        // keep track of tables for which we've seen files while we're here
        info.fileTables.add(te.getKey());
    }
    info.setPidToTxnIdMap(pidToTxnMap);
    return info;
}
Also used : InstanceId(org.voltcore.utils.InstanceId) JSONArray(org.json_voltpatches.JSONArray) TableFiles(org.voltdb.sysprocs.saverestore.SnapshotUtil.TableFiles) JSONException(org.json_voltpatches.JSONException) IOException(java.io.IOException) TreeMap(java.util.TreeMap) JSONObject(org.json_voltpatches.JSONObject) InMemoryJarfile(org.voltdb.utils.InMemoryJarfile) File(java.io.File) HashSet(java.util.HashSet)

Example 48 with InMemoryJarfile

use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.

the class RealVoltDB method outputSchema.

private int outputSchema(Configuration config) {
    if ((new File(config.m_getOutput)).exists() && !config.m_forceGetCreate) {
        consoleLog.fatal("Failed to save schema file, file already exists: " + config.m_getOutput);
        return -1;
    }
    try {
        InMemoryJarfile catalogJar = CatalogUtil.loadInMemoryJarFile(MiscUtils.fileToBytes(new File(config.m_pathToCatalog)));
        String ddl = CatalogUtil.getAutoGenDDLFromJar(catalogJar);
        try (FileOutputStream fos = new FileOutputStream(config.m_getOutput.trim())) {
            fos.write(ddl.getBytes());
        } catch (IOException e) {
            consoleLog.fatal("Failed to write schema to " + config.m_getOutput + " : " + e.getMessage());
            return -1;
        }
        consoleLog.info("Schema saved in " + config.m_getOutput.trim());
    } catch (IOException e) {
        consoleLog.fatal("Failed to load the catalog jar from " + config.m_pathToCatalog + " : " + e.getMessage());
        return -1;
    }
    return 0;
}
Also used : InMemoryJarfile(org.voltdb.utils.InMemoryJarfile) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) VoltFile(org.voltdb.utils.VoltFile) File(java.io.File)

Example 49 with InMemoryJarfile

use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.

the class CatalogUpgradeTools method dorkDowngradeVersion.

public static void dorkDowngradeVersion(String srcJar, String dstJar, String buildstring) throws Exception {
    InMemoryJarfile memCatalog = CatalogUpgradeTools.loadFromPath(srcJar);
    String[] bi = getBuildInfoLines(memCatalog);
    bi[0] = buildstring;
    memCatalog.put(CatalogUtil.CATALOG_BUILDINFO_FILENAME, StringUtils.join(bi, '\n').getBytes());
    memCatalog.writeToFile(new File(dstJar));
}
Also used : InMemoryJarfile(org.voltdb.utils.InMemoryJarfile) File(java.io.File)

Example 50 with InMemoryJarfile

use of org.voltdb.utils.InMemoryJarfile in project voltdb by VoltDB.

the class CatalogUpgradeTools method dorkJar.

public static void dorkJar(String srcJar, String dstJar, String statement) throws IOException {
    InMemoryJarfile memCatalog = CatalogUpgradeTools.loadFromPath(srcJar);
    dorkVersion(memCatalog);
    if (statement != null) {
        dorkDDL(memCatalog, statement);
    }
    memCatalog.writeToFile(new File(dstJar));
}
Also used : InMemoryJarfile(org.voltdb.utils.InMemoryJarfile) File(java.io.File)

Aggregations

InMemoryJarfile (org.voltdb.utils.InMemoryJarfile)52 VoltCompiler (org.voltdb.compiler.VoltCompiler)26 File (java.io.File)20 ProcCallException (org.voltdb.client.ProcCallException)19 ClientResponse (org.voltdb.client.ClientResponse)18 Test (org.junit.Test)15 Configuration (org.voltdb.VoltDB.Configuration)15 IOException (java.io.IOException)14 VoltProjectBuilder (org.voltdb.compiler.VoltProjectBuilder)12 VoltDB (org.voltdb.VoltDB)9 Catalog (org.voltdb.catalog.Catalog)6 VoltTable (org.voltdb.VoltTable)5 VoltFile (org.voltdb.utils.VoltFile)5 ArrayList (java.util.ArrayList)4 JSONObject (org.json_voltpatches.JSONObject)3 CatalogContext (org.voltdb.CatalogContext)3 UnsupportedEncodingException (java.io.UnsupportedEncodingException)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 JAXBException (javax.xml.bind.JAXBException)2