use of com.alibaba.alink.operator.stream.dataproc.TypeConvertStreamOp in project Alink by alibaba.
the class DLTypeUtils method doubleColumnsToFloat.
public static StreamOperator<?> doubleColumnsToFloat(StreamOperator<?> input) {
List<String> doubleColNames = new ArrayList<>();
String[] colNames = input.getColNames();
TypeInformation<?>[] colTypes = input.getColTypes();
for (int i = 0; i < colTypes.length; i += 1) {
if (colTypes[i].equals(Types.DOUBLE)) {
doubleColNames.add(colNames[i]);
}
}
if (doubleColNames.size() > 0) {
TypeConvertStreamOp typeConvertStreamOp = new TypeConvertStreamOp().setTargetType(TargetType.FLOAT).setSelectedCols(doubleColNames.toArray(new String[0])).setMLEnvironmentId(input.getMLEnvironmentId());
input = typeConvertStreamOp.linkFrom(input);
}
return input;
}
use of com.alibaba.alink.operator.stream.dataproc.TypeConvertStreamOp in project Alink by alibaba.
the class BaseDLStreamOp method linkFrom.
// User's main script is renamed to `userScriptMainFileName`, and `main` is called
@Override
public T linkFrom(StreamOperator<?>... inputs) {
initDLSystemParams();
StreamOperator<?> in = checkAndGetFirst(inputs);
if (null != getSelectedCols()) {
in = in.select(getSelectedCols());
}
in = DLTypeUtils.doubleColumnsToFloat(in);
List<String> doubleColNames = new ArrayList<>();
String[] colNames = in.getColNames();
TypeInformation<?>[] colTypes = in.getColTypes();
for (int i = 0; i < colTypes.length; i += 1) {
if (colTypes[i].equals(Types.DOUBLE)) {
doubleColNames.add(colNames[i]);
}
}
if (doubleColNames.size() > 0) {
TypeConvertStreamOp typeConvertStreamOp = new TypeConvertStreamOp().setTargetType(TargetType.FLOAT).setSelectedCols(doubleColNames.toArray(new String[0]));
in = typeConvertStreamOp.linkFrom(in);
}
ExternalFilesConfig externalFilesConfig = getUserFiles().addFilePaths(resPyFiles).addRenameMap(getMainScriptFile(), userScriptMainFileName);
DLLauncherStreamOp dlLauncherStreamOp = new DLLauncherStreamOp().setOutputSchemaStr(getOutputSchemaStr()).setEntryFunc(entryFuncName).setMainScriptFile(mainScriptFileName).setUserFiles(externalFilesConfig).setUserParams(getUserParams()).setNumWorkers(getNumWorkers()).setNumPSs(numPss).setPythonEnv(getPythonEnv()).linkFrom(in);
setOutputTable(dlLauncherStreamOp.getOutputTable());
return (T) this;
}
use of com.alibaba.alink.operator.stream.dataproc.TypeConvertStreamOp in project Alink by alibaba.
the class TensorFlow2StreamOpTest method testAllReduce.
@Test
public void testAllReduce() throws Exception {
int savedStreamParallelism = MLEnvironmentFactory.getDefault().getStreamExecutionEnvironment().getParallelism();
AlinkGlobalConfiguration.setPrintProcessInfo(true);
PluginDownloader pluginDownloader = AlinkGlobalConfiguration.getPluginDownloader();
RegisterKey registerKey = DLEnvConfig.getRegisterKey(Version.TF231);
pluginDownloader.downloadPlugin(registerKey.getName(), registerKey.getVersion());
StreamOperator.setParallelism(3);
DLLauncherStreamOp.DL_CLUSTER_START_TIME = 30 * 1000;
StreamOperator<?> source = new RandomTableSourceStreamOp().setMaxRows(1000L).setNumCols(10);
String[] colNames = source.getColNames();
source = source.select("*, case when RAND() > 0.5 then 1. else 0. end as label");
source = source.link(new TypeConvertStreamOp().setSelectedCols("num").setTargetType(TargetType.DOUBLE));
String label = "label";
Map<String, Object> userParams = new HashMap<>();
userParams.put("featureCols", JsonConverter.toJson(colNames));
userParams.put("labelCol", label);
userParams.put("batch_size", 16);
userParams.put("num_epochs", 1);
TensorFlow2StreamOp tensorFlow2StreamOp = new TensorFlow2StreamOp().setUserFiles(new String[] { "res:///tf_dnn_stream.py" }).setMainScriptFile("res:///tf_dnn_stream.py").setUserParams(JsonConverter.toJson(userParams)).setNumWorkers(3).setNumPSs(0).setOutputSchemaStr("model_id long, model_info string").linkFrom(source);
tensorFlow2StreamOp.print();
StreamOperator.execute();
StreamOperator.setParallelism(savedStreamParallelism);
}
use of com.alibaba.alink.operator.stream.dataproc.TypeConvertStreamOp in project Alink by alibaba.
the class TensorFlow2StreamOpTest method testWithAutoWorkersPSs.
@Test
public void testWithAutoWorkersPSs() throws Exception {
AlinkGlobalConfiguration.setPrintProcessInfo(true);
PluginDownloader pluginDownloader = AlinkGlobalConfiguration.getPluginDownloader();
RegisterKey registerKey = DLEnvConfig.getRegisterKey(Version.TF231);
pluginDownloader.downloadPlugin(registerKey.getName(), registerKey.getVersion());
int savedStreamParallelism = MLEnvironmentFactory.getDefault().getStreamExecutionEnvironment().getParallelism();
StreamOperator.setParallelism(3);
DLLauncherStreamOp.DL_CLUSTER_START_TIME = 30 * 1000;
StreamOperator<?> source = new RandomTableSourceStreamOp().setMaxRows(1000L).setNumCols(10);
String[] colNames = source.getColNames();
source = source.select("*, case when RAND() > 0.5 then 1. else 0. end as label");
source = source.link(new TypeConvertStreamOp().setSelectedCols("num").setTargetType(TargetType.DOUBLE));
String label = "label";
Map<String, Object> userParams = new HashMap<>();
userParams.put("featureCols", JsonConverter.toJson(colNames));
userParams.put("labelCol", label);
userParams.put("batch_size", 16);
userParams.put("num_epochs", 1);
TensorFlow2StreamOp tensorFlow2StreamOp = new TensorFlow2StreamOp().setUserFiles(new String[] { "res:///tf_dnn_stream.py" }).setMainScriptFile("res:///tf_dnn_stream.py").setUserParams(JsonConverter.toJson(userParams)).setOutputSchemaStr("model_id long, model_info string").linkFrom(source);
tensorFlow2StreamOp.print();
StreamOperator.execute();
StreamOperator.setParallelism(savedStreamParallelism);
}
use of com.alibaba.alink.operator.stream.dataproc.TypeConvertStreamOp in project Alink by alibaba.
the class TensorFlowStreamOpTest method testWithAutoWorkersPSs.
@Test
public void testWithAutoWorkersPSs() throws Exception {
AlinkGlobalConfiguration.setPrintProcessInfo(true);
PluginDownloader pluginDownloader = AlinkGlobalConfiguration.getPluginDownloader();
RegisterKey registerKey = DLEnvConfig.getRegisterKey(Version.TF115);
pluginDownloader.downloadPlugin(registerKey.getName(), registerKey.getVersion());
int savedStreamParallelism = MLEnvironmentFactory.getDefault().getStreamExecutionEnvironment().getParallelism();
StreamOperator.setParallelism(3);
DLLauncherStreamOp.DL_CLUSTER_START_TIME = 30 * 1000;
StreamOperator<?> source = new RandomTableSourceStreamOp().setMaxRows(1000L).setNumCols(10);
String[] colNames = source.getColNames();
source = source.select("*, case when RAND() > 0.5 then 1. else 0. end as label");
source = source.link(new TypeConvertStreamOp().setSelectedCols("num").setTargetType(TargetType.DOUBLE));
String label = "label";
Map<String, Object> userParams = new HashMap<>();
userParams.put("featureCols", JsonConverter.toJson(colNames));
userParams.put("labelCol", label);
userParams.put("batch_size", 16);
userParams.put("num_epochs", 1);
TensorFlowStreamOp tensorFlowStreamOp = new TensorFlowStreamOp().setUserFiles(new String[] { "res:///tf_dnn_stream.py" }).setMainScriptFile("res:///tf_dnn_stream.py").setUserParams(JsonConverter.toJson(userParams)).setOutputSchemaStr("model_id long, model_info string").linkFrom(source);
tensorFlowStreamOp.print();
StreamOperator.execute();
StreamOperator.setParallelism(savedStreamParallelism);
}
Aggregations