Search in sources :

Example 41 with SimpleDateFormat

use of java.text.SimpleDateFormat in project kafka by apache.

the class TimestampRouter method configure.

@Override
public void configure(Map<String, ?> props) {
    final SimpleConfig config = new SimpleConfig(CONFIG_DEF, props);
    topicFormat = config.getString(ConfigName.TOPIC_FORMAT);
    final String timestampFormatStr = config.getString(ConfigName.TIMESTAMP_FORMAT);
    timestampFormat = new ThreadLocal<SimpleDateFormat>() {

        @Override
        protected SimpleDateFormat initialValue() {
            final SimpleDateFormat fmt = new SimpleDateFormat(timestampFormatStr);
            fmt.setTimeZone(TimeZone.getTimeZone("UTC"));
            return fmt;
        }
    };
}
Also used : SimpleConfig(org.apache.kafka.connect.transforms.util.SimpleConfig) SimpleDateFormat(java.text.SimpleDateFormat)

Example 42 with SimpleDateFormat

use of java.text.SimpleDateFormat in project hive by apache.

the class ASTBuilder method literal.

public static ASTNode literal(RexLiteral literal, boolean useTypeQualInLiteral) {
    Object val = null;
    int type = 0;
    SqlTypeName sqlType = literal.getType().getSqlTypeName();
    switch(sqlType) {
        case BINARY:
        case DATE:
        case TIME:
        case TIMESTAMP:
        case INTERVAL_DAY:
        case INTERVAL_DAY_HOUR:
        case INTERVAL_DAY_MINUTE:
        case INTERVAL_DAY_SECOND:
        case INTERVAL_HOUR:
        case INTERVAL_HOUR_MINUTE:
        case INTERVAL_HOUR_SECOND:
        case INTERVAL_MINUTE:
        case INTERVAL_MINUTE_SECOND:
        case INTERVAL_MONTH:
        case INTERVAL_SECOND:
        case INTERVAL_YEAR:
        case INTERVAL_YEAR_MONTH:
            if (literal.getValue() == null) {
                return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
            }
            break;
        case TINYINT:
        case SMALLINT:
        case INTEGER:
        case BIGINT:
        case DOUBLE:
        case DECIMAL:
        case FLOAT:
        case REAL:
        case VARCHAR:
        case CHAR:
        case BOOLEAN:
            if (literal.getValue3() == null) {
                return ASTBuilder.construct(HiveParser.TOK_NULL, "TOK_NULL").node();
            }
    }
    switch(sqlType) {
        case TINYINT:
            if (useTypeQualInLiteral) {
                val = literal.getValue3() + "Y";
            } else {
                val = literal.getValue3();
            }
            type = HiveParser.IntegralLiteral;
            break;
        case SMALLINT:
            if (useTypeQualInLiteral) {
                val = literal.getValue3() + "S";
            } else {
                val = literal.getValue3();
            }
            type = HiveParser.IntegralLiteral;
            break;
        case INTEGER:
            val = literal.getValue3();
            type = HiveParser.IntegralLiteral;
            break;
        case BIGINT:
            if (useTypeQualInLiteral) {
                val = literal.getValue3() + "L";
            } else {
                val = literal.getValue3();
            }
            type = HiveParser.IntegralLiteral;
            break;
        case DOUBLE:
            val = literal.getValue3() + "D";
            type = HiveParser.NumberLiteral;
            break;
        case DECIMAL:
            val = literal.getValue3() + "BD";
            type = HiveParser.NumberLiteral;
            break;
        case FLOAT:
        case REAL:
            val = literal.getValue3();
            type = HiveParser.Number;
            break;
        case VARCHAR:
        case CHAR:
            val = literal.getValue3();
            String escapedVal = BaseSemanticAnalyzer.escapeSQLString(String.valueOf(val));
            type = HiveParser.StringLiteral;
            val = "'" + escapedVal + "'";
            break;
        case BOOLEAN:
            val = literal.getValue3();
            type = ((Boolean) val).booleanValue() ? HiveParser.KW_TRUE : HiveParser.KW_FALSE;
            break;
        case DATE:
            {
                val = literal.getValue();
                type = HiveParser.TOK_DATELITERAL;
                DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
                val = df.format(((Calendar) val).getTime());
                val = "'" + val + "'";
            }
            break;
        case TIME:
        case TIMESTAMP:
            {
                val = literal.getValue();
                type = HiveParser.TOK_TIMESTAMPLITERAL;
                DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
                val = df.format(((Calendar) val).getTime());
                val = "'" + val + "'";
            }
            break;
        case INTERVAL_YEAR:
        case INTERVAL_MONTH:
        case INTERVAL_YEAR_MONTH:
            {
                type = HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL;
                BigDecimal monthsBd = (BigDecimal) literal.getValue();
                HiveIntervalYearMonth intervalYearMonth = new HiveIntervalYearMonth(monthsBd.intValue());
                val = "'" + intervalYearMonth.toString() + "'";
            }
            break;
        case INTERVAL_DAY:
        case INTERVAL_DAY_HOUR:
        case INTERVAL_DAY_MINUTE:
        case INTERVAL_DAY_SECOND:
        case INTERVAL_HOUR:
        case INTERVAL_HOUR_MINUTE:
        case INTERVAL_HOUR_SECOND:
        case INTERVAL_MINUTE:
        case INTERVAL_MINUTE_SECOND:
        case INTERVAL_SECOND:
            {
                type = HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL;
                BigDecimal millisBd = (BigDecimal) literal.getValue();
                // Calcite literal is in millis, convert to seconds
                BigDecimal secsBd = millisBd.divide(BigDecimal.valueOf(1000));
                HiveIntervalDayTime intervalDayTime = new HiveIntervalDayTime(secsBd);
                val = "'" + intervalDayTime.toString() + "'";
            }
            break;
        case NULL:
            type = HiveParser.TOK_NULL;
            break;
        //binary type should not be seen.
        case BINARY:
        default:
            throw new RuntimeException("Unsupported Type: " + sqlType);
    }
    return (ASTNode) ParseDriver.adaptor.create(type, String.valueOf(val));
}
Also used : HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) SimpleDateFormat(java.text.SimpleDateFormat) DateFormat(java.text.DateFormat) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SimpleDateFormat(java.text.SimpleDateFormat) BigDecimal(java.math.BigDecimal) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 43 with SimpleDateFormat

use of java.text.SimpleDateFormat in project hive by apache.

the class MetaDataExportListener method export_meta_data.

/** Export the metadata to a given path, and then move it to the user's trash */
private void export_meta_data(PreDropTableEvent tableEvent) throws MetaException {
    FileSystem fs = null;
    Table tbl = tableEvent.getTable();
    String name = tbl.getTableName();
    org.apache.hadoop.hive.ql.metadata.Table mTbl = new org.apache.hadoop.hive.ql.metadata.Table(tbl);
    HMSHandler handler = tableEvent.getHandler();
    HiveConf hiveconf = handler.getHiveConf();
    Warehouse wh = new Warehouse(hiveconf);
    Path tblPath = new Path(tbl.getSd().getLocation());
    fs = wh.getFs(tblPath);
    Date now = new Date();
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
    String dateString = sdf.format(now);
    String exportPathString = hiveconf.getVar(HiveConf.ConfVars.METADATA_EXPORT_LOCATION);
    boolean moveMetadataToTrash = hiveconf.getBoolVar(HiveConf.ConfVars.MOVE_EXPORTED_METADATA_TO_TRASH);
    Path exportPath = null;
    if (exportPathString != null && exportPathString.length() == 0) {
        exportPath = fs.getHomeDirectory();
    } else {
        exportPath = new Path(exportPathString);
    }
    Path metaPath = new Path(exportPath, name + "." + dateString);
    LOG.info("Exporting the metadata of table " + tbl.toString() + " to path " + metaPath.toString());
    try {
        fs.mkdirs(metaPath);
    } catch (IOException e) {
        throw new MetaException(e.getMessage());
    }
    Path outFile = new Path(metaPath, name + EximUtil.METADATA_NAME);
    try {
        SessionState.getConsole().printInfo("Beginning metadata export");
        EximUtil.createExportDump(fs, outFile, mTbl, null, null);
        if (moveMetadataToTrash == true) {
            wh.deleteDir(metaPath, true);
        }
    } catch (IOException e) {
        throw new MetaException(e.getMessage());
    } catch (SemanticException e) {
        throw new MetaException(e.getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) Table(org.apache.hadoop.hive.metastore.api.Table) IOException(java.io.IOException) Date(java.util.Date) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) SimpleDateFormat(java.text.SimpleDateFormat) HMSHandler(org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 44 with SimpleDateFormat

use of java.text.SimpleDateFormat in project hive by apache.

the class SparkJobMonitor method getReport.

private String getReport(Map<String, SparkStageProgress> progressMap) {
    StringBuilder reportBuffer = new StringBuilder();
    SimpleDateFormat dt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
    String currentDate = dt.format(new Date());
    reportBuffer.append(currentDate + "\t");
    // Num of total and completed tasks
    int sumTotal = 0;
    int sumComplete = 0;
    SortedSet<String> keys = new TreeSet<String>(progressMap.keySet());
    for (String s : keys) {
        SparkStageProgress progress = progressMap.get(s);
        final int complete = progress.getSucceededTaskCount();
        final int total = progress.getTotalTaskCount();
        final int running = progress.getRunningTaskCount();
        final int failed = progress.getFailedTaskCount();
        sumTotal += total;
        sumComplete += complete;
        String stageName = "Stage-" + s;
        if (total <= 0) {
            reportBuffer.append(String.format("%s: -/-\t", stageName));
        } else {
            if (complete == total && !completed.contains(s)) {
                completed.add(s);
                if (!perfLogger.startTimeHasMethod(PerfLogger.SPARK_RUN_STAGE + s)) {
                    perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.SPARK_RUN_STAGE + s);
                }
                perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.SPARK_RUN_STAGE + s);
            }
            if (complete < total && (complete > 0 || running > 0 || failed > 0)) {
                /* stage is started, but not complete */
                if (!perfLogger.startTimeHasMethod(PerfLogger.SPARK_RUN_STAGE + s)) {
                    perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.SPARK_RUN_STAGE + s);
                }
                if (failed > 0) {
                    reportBuffer.append(String.format("%s: %d(+%d,-%d)/%d\t", stageName, complete, running, failed, total));
                } else {
                    reportBuffer.append(String.format("%s: %d(+%d)/%d\t", stageName, complete, running, total));
                }
            } else {
                /* stage is waiting for input/slots or complete */
                if (failed > 0) {
                    /* tasks finished but some failed */
                    reportBuffer.append(String.format("%s: %d(-%d)/%d Finished with failed tasks\t", stageName, complete, failed, total));
                } else {
                    if (complete == total) {
                        reportBuffer.append(String.format("%s: %d/%d Finished\t", stageName, complete, total));
                    } else {
                        reportBuffer.append(String.format("%s: %d/%d\t", stageName, complete, total));
                    }
                }
            }
        }
    }
    if (SessionState.get() != null) {
        final float progress = (sumTotal == 0) ? 1.0f : (float) sumComplete / (float) sumTotal;
        SessionState.get().updateProgressedPercentage(progress);
    }
    return reportBuffer.toString();
}
Also used : TreeSet(java.util.TreeSet) SimpleDateFormat(java.text.SimpleDateFormat) Date(java.util.Date)

Example 45 with SimpleDateFormat

use of java.text.SimpleDateFormat in project hive by apache.

the class GenericUDFDateFormat method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    checkArgsSize(arguments, 2, 2);
    checkArgPrimitive(arguments, 0);
    checkArgPrimitive(arguments, 1);
    // the function should support both short date and full timestamp format
    // time part of the timestamp should not be skipped
    checkArgGroups(arguments, 0, tsInputTypes, STRING_GROUP, DATE_GROUP);
    checkArgGroups(arguments, 0, dtInputTypes, STRING_GROUP, DATE_GROUP);
    checkArgGroups(arguments, 1, tsInputTypes, STRING_GROUP);
    obtainTimestampConverter(arguments, 0, tsInputTypes, tsConverters);
    obtainDateConverter(arguments, 0, dtInputTypes, dtConverters);
    if (arguments[1] instanceof ConstantObjectInspector) {
        String fmtStr = getConstantStringValue(arguments, 1);
        if (fmtStr != null) {
            try {
                formatter = new SimpleDateFormat(fmtStr);
            } catch (IllegalArgumentException e) {
            // ignore
            }
        }
    } else {
        throw new UDFArgumentTypeException(1, getFuncName() + " only takes constant as " + getArgOrder(1) + " argument");
    }
    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    return outputOI;
}
Also used : ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) SimpleDateFormat(java.text.SimpleDateFormat)

Aggregations

SimpleDateFormat (java.text.SimpleDateFormat)2847 Date (java.util.Date)1590 ParseException (java.text.ParseException)463 DateFormat (java.text.DateFormat)425 Calendar (java.util.Calendar)307 Test (org.junit.Test)305 ArrayList (java.util.ArrayList)232 File (java.io.File)230 IOException (java.io.IOException)185 GregorianCalendar (java.util.GregorianCalendar)139 HashMap (java.util.HashMap)121 Locale (java.util.Locale)70 DateField (edu.uci.ics.textdb.api.field.DateField)64 DoubleField (edu.uci.ics.textdb.api.field.DoubleField)64 IField (edu.uci.ics.textdb.api.field.IField)64 IntegerField (edu.uci.ics.textdb.api.field.IntegerField)64 StringField (edu.uci.ics.textdb.api.field.StringField)63 TextField (edu.uci.ics.textdb.api.field.TextField)63 Map (java.util.Map)63 Tuple (edu.uci.ics.textdb.api.tuple.Tuple)61