use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.
the class MScheduledQuery method toThrift.
public static ScheduledQuery toThrift(MScheduledQuery s) {
ScheduledQuery ret = new ScheduledQuery();
ret.setScheduleKey(new ScheduledQueryKey(s.scheduleName, s.clusterNamespace));
ret.setEnabled(s.enabled);
ret.setSchedule(s.schedule);
ret.setUser(s.user);
ret.setQuery(s.query);
ret.setNextExecution(s.nextExecution);
return ret;
}
use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project carat by amplab.
the class SampleReader method writeSample.
/**
* For simplicity, this method relies on the fact that
* only the piList of Sample has any List type elements.
* This will fail to record those from substructs (NetworkDetails, BatteryDetails, CpuStatus),
* and will need to be changed if those are added.
*
* Does not record CallInfo, CellInfo, or CallMonth types.
*/
public static final HashMap<String, String> writeSample(Sample s) {
HashMap<String, String> m = new HashMap<String, String>();
for (_Fields sf : Sample.metaDataMap.keySet()) {
FieldMetaData md = Sample.metaDataMap.get(sf);
switch(md.valueMetaData.type) {
case org.apache.thrift.protocol.TType.STRING:
m.put(sf.getFieldName(), cleanStr(s.getFieldValue(sf).toString()));
break;
case org.apache.thrift.protocol.TType.I32:
case org.apache.thrift.protocol.TType.DOUBLE:
m.put(sf.getFieldName(), s.getFieldValue(sf).toString());
break;
case org.apache.thrift.protocol.TType.STRUCT:
if (md.fieldName.equals(Sample._Fields.NETWORK_DETAILS.getFieldName()) && s.networkDetails != null) {
int len = NetworkDetails._Fields.values().length;
StringBuilder b = new StringBuilder();
for (int i = 1; i <= len; i++) {
b.append(cleanStr("" + s.networkDetails.getFieldValue(NetworkDetails._Fields.findByThriftId(i))));
if (i < len)
b.append("\n");
}
m.put(sf.getFieldName(), b.toString());
} else if (md.fieldName.equals(Sample._Fields.BATTERY_DETAILS.getFieldName()) && s.batteryDetails != null) {
int len = BatteryDetails._Fields.values().length;
StringBuilder b = new StringBuilder();
for (int i = 1; i <= len; i++) {
b.append(cleanStr("" + s.batteryDetails.getFieldValue(BatteryDetails._Fields.findByThriftId(i))));
if (i < len)
b.append("\n");
}
m.put(sf.getFieldName(), b.toString());
} else if (md.fieldName.equals(Sample._Fields.CPU_STATUS.getFieldName()) && s.cpuStatus != null) {
int len = CpuStatus._Fields.values().length;
StringBuilder b = new StringBuilder();
for (int i = 1; i <= len; i++) {
b.append(cleanStr("" + s.cpuStatus.getFieldValue(CpuStatus._Fields.findByThriftId(i))));
if (i < len)
b.append("\n");
}
m.put(sf.getFieldName(), b.toString());
}
/*
* else if (md.fieldName.equals("CallInfo")){ }
*/
break;
case org.apache.thrift.protocol.TType.LIST:
if (md.fieldName.equals(Sample._Fields.EXTRA.getFieldName()) && s.extra != null) {
StringBuilder b = new StringBuilder();
for (Feature f : s.extra) {
b.append(cleanStr(f.key) + ";" + cleanStr(f.value) + "\n");
}
if (b.length() > 1)
b.deleteCharAt(b.lastIndexOf("\n"));
m.put(sf.getFieldName(), b.toString());
} else if (md.fieldName.equals(Sample._Fields.LOCATION_PROVIDERS.getFieldName()) && s.locationProviders != null) {
StringBuilder b = new StringBuilder();
for (String lp : s.locationProviders) b.append(lp + "\n");
if (b.length() > 1)
b.deleteCharAt(b.lastIndexOf("\n"));
m.put(sf.getFieldName(), b.toString());
} else if (md.fieldName.equals(Sample._Fields.PI_LIST.getFieldName()) && s.piList != null) {
StringBuilder b = new StringBuilder();
for (ProcessInfo pi : s.piList) {
int len = ProcessInfo._Fields.values().length;
for (int i = 1; i <= len; i++) {
ProcessInfo._Fields pif = ProcessInfo._Fields.findByThriftId(i);
FieldMetaData pmd = ProcessInfo.metaDataMap.get(pif);
if (pmd.valueMetaData.type == org.apache.thrift.protocol.TType.LIST) {
if (pi.appSignatures != null) {
for (int j = 0; j < pi.appSignatures.size(); j++) {
String sig = pi.appSignatures.get(j);
b.append(sig);
if (j + 1 < len)
b.append("#");
}
}
} else {
b.append(cleanStr("" + pi.getFieldValue(pif)));
}
if (i < len)
b.append(";");
}
b.append("\n");
}
if (b.length() > 1)
b.deleteCharAt(b.lastIndexOf("\n"));
m.put(sf.getFieldName(), b.toString());
}
break;
default:
}
}
return m;
}
use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project carat by amplab.
the class SampleReader method readSample.
/**
* Read a Sample from a HashMap stored in the Carat Sample db.
* @param data
* @return
*/
public static final Sample readSample(Object data) {
Sample s = null;
if (data != null && data instanceof HashMap<?, ?>) {
HashMap<String, String> m = (HashMap<String, String>) data;
s = new Sample();
NetworkDetails n = new NetworkDetails();
BatteryDetails bd = new BatteryDetails();
// CellInfo ci = new CellInfo();
// CallInfo calli = new CallInfo();
// CallMonth cm = new CallMonth();
CpuStatus cs = new CpuStatus();
// Set single fields automatically:
for (String k : m.keySet()) {
_Fields sf = Sample._Fields.findByName(k);
if (sf != null) {
// Top level Sample field.
FieldMetaData md = Sample.metaDataMap.get(sf);
String cleaned = origStr(m.get(k));
switch(md.valueMetaData.type) {
case org.apache.thrift.protocol.TType.STRING:
s.setFieldValue(sf, cleaned);
break;
case org.apache.thrift.protocol.TType.I32:
try {
s.setFieldValue(sf, Integer.parseInt(cleaned));
} catch (NumberFormatException e) {
Log.e(TAG, "Could not read " + md.fieldName + ": \"" + cleaned + "\" as an int");
}
break;
case org.apache.thrift.protocol.TType.DOUBLE:
try {
s.setFieldValue(sf, Double.parseDouble(cleaned));
} catch (NumberFormatException e) {
Log.e(TAG, "Could not read " + md.fieldName + ": \"" + cleaned + "\" as a double");
}
break;
case org.apache.thrift.protocol.TType.STRUCT:
if (md.fieldName.equals(Sample._Fields.NETWORK_DETAILS.getFieldName())) {
fillNetworkDetails(m.get(k), n);
s.setNetworkDetails(n);
} else if (md.fieldName.equals(Sample._Fields.BATTERY_DETAILS.getFieldName())) {
fillBatteryDetails(m.get(k), bd);
s.setBatteryDetails(bd);
} else if (md.fieldName.equals(Sample._Fields.CPU_STATUS.getFieldName())) {
fillCpuStatusDetails(m.get(k), cs);
s.setCpuStatus(cs);
}
/*
* else if (md.fieldName.equals("CallInfo")){ }
*/
break;
case org.apache.thrift.protocol.TType.LIST:
if (md.fieldName.equals(Sample._Fields.EXTRA.getFieldName())) {
List<Feature> list = new LinkedList<Feature>();
String[] extras = m.get(k).split("\n");
for (String e : extras) {
Feature f = new Feature();
String[] feat = e.split(";");
if (feat.length > 1) {
f.setKey(origStr(feat[0]));
f.setValue(origStr(feat[1]));
}
list.add(f);
}
s.setExtra(list);
} else if (md.fieldName.equals(Sample._Fields.LOCATION_PROVIDERS.getFieldName())) {
List<String> list = new LinkedList<String>();
String[] arr = m.get(k).split("\n");
for (String lp : arr) list.add(lp);
s.setLocationProviders(list);
} else if (md.fieldName.equals(Sample._Fields.PI_LIST.getFieldName())) {
// Set piList fields automatically:
LinkedList<ProcessInfo> piList = new LinkedList<ProcessInfo>();
String[] processes = m.get(md.fieldName).split("\n");
for (String process : processes) {
String[] items = process.split(";");
ProcessInfo pi = new ProcessInfo();
/*
* Items are in the same order as they appear in ProcessInfo
* protocol class, so I can use Thrift ID for setting the fields
* automatically.
*/
for (int i = 1; i <= items.length; i++) {
if (items[i - 1] == null)
continue;
ProcessInfo._Fields pif = ProcessInfo._Fields.findByThriftId(i);
FieldMetaData pmd = ProcessInfo.metaDataMap.get(pif);
cleaned = origStr(items[i - 1]);
switch(pmd.valueMetaData.type) {
case org.apache.thrift.protocol.TType.STRING:
pi.setFieldValue(pif, cleaned);
break;
case org.apache.thrift.protocol.TType.I32:
try {
pi.setFieldValue(pif, Integer.parseInt(cleaned));
} catch (NumberFormatException e) {
Log.e(TAG, "Could not read " + md.fieldName + ": \"" + cleaned + "\" as an int");
}
break;
case org.apache.thrift.protocol.TType.DOUBLE:
try {
pi.setFieldValue(pif, Double.parseDouble(cleaned));
} catch (NumberFormatException e) {
Log.e(TAG, "Could not read " + md.fieldName + ": \"" + cleaned + "\" as a double");
}
break;
case org.apache.thrift.protocol.TType.BOOL:
try {
pi.setFieldValue(pif, Boolean.parseBoolean(cleaned));
} catch (NumberFormatException e) {
Log.e(TAG, "Could not read " + md.fieldName + ": \"" + cleaned + "\" as a bool");
}
break;
case org.apache.thrift.protocol.TType.LIST:
List<String> list = new LinkedList<String>();
String[] arr = cleaned.split("#");
for (String sig : arr) list.add(sig);
pi.setFieldValue(pif, list);
break;
default:
}
}
piList.add(pi);
}
s.setPiList(piList);
}
break;
default:
}
}
}
}
return s;
}
use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.
the class ScheduledQueryAnalyzer method buildEmptySchq.
private ScheduledQuery buildEmptySchq() {
ScheduledQuery ret = new ScheduledQuery();
ret.setEnabled(conf.getBoolVar(ConfVars.HIVE_SCHEDULED_QUERIES_CREATE_AS_ENABLED));
ret.setUser(getUserName());
return ret;
}
use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.
the class ScheduledQueryAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode ast) throws SemanticException {
ScheduledQueryMaintenanceWork work;
ScheduledQueryMaintenanceRequestType type = translateAstType(ast.getToken().getType());
ScheduledQuery parsedSchq = interpretAstNode(ast);
ScheduledQuery schq = fillScheduledQuery(type, parsedSchq);
checkAuthorization(type, schq);
LOG.info("scheduled query operation: " + type + " " + schq);
try {
schq.validate();
} catch (TException e) {
throw new SemanticException("ScheduledQuery is invalid", e);
}
work = new ScheduledQueryMaintenanceWork(type, schq);
rootTasks.add(TaskFactory.get(work));
queryState.setCommandType(toHiveOperation(type));
}
Aggregations