use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class Main method loadProbesFile.
/**
* load the probes into the model
*
* @param file file containing probe information
* @throws IOException if an I/O or syntax error occurred
*/
public boolean loadProbesFile(File file) throws IOException {
if (getWorkspace() == null) {
return false;
}
ReaderTokenizer rtok = ArtisynthIO.newReaderTokenizer(file);
getWorkspace().scanProbes(rtok);
rtok.close();
myProbeFile = file;
if (myTimeline != null) {
myTimeline.requestResetAll();
// ============================================================
// Check the model zoom level and set the timeline zoom
// accordingly
myTimeline.updateTimeDisplay(0);
myTimeline.updateComponentSizes();
myTimeline.automaticProbesZoom();
}
return true;
}
use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class ModelHistory method read.
public void read(File file) throws IOException {
// read history
// format: longname "short name" time
ReaderTokenizer rtok = new ReaderTokenizer(new FileReader(file));
rtok.commentChar('#');
rtok.eolIsSignificant(true);
rtok.wordChar('.');
rtok.nextToken();
while (rtok.ttype == ReaderTokenizer.TT_WORD) {
String longName = rtok.sval;
String shortName = rtok.scanQuotedString('"');
long tval = rtok.scanLong();
String[] args = readToEOL(rtok);
Date dateTime = new Date(tval);
update(new ModelInfo(longName, shortName, args), dateTime);
// read next token
rtok.nextToken();
}
rtok.close();
}
use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class TetGenReader method readFaces.
public static PolygonalMesh readFaces(Vector3d scale, Reader nodeReader, Reader faceReader) throws IOException {
PolygonalMesh mesh = new PolygonalMesh();
ReaderTokenizer nodeFile = new ReaderTokenizer(new BufferedReader(nodeReader));
nodeFile.nextToken();
nodeFile.nextToken();
nodeFile.nextToken();
nodeFile.nextToken();
while (nodeFile.nextToken() != ReaderTokenizer.TT_EOF) {
Point3d coords = new Point3d();
for (int i = 0; i < 3; i++) {
coords.set(i, nodeFile.scanNumber());
}
if (scale != null) {
coords.x *= scale.x;
coords.y *= scale.y;
coords.z *= scale.z;
}
mesh.addVertex(coords.x, coords.y, coords.z);
}
ReaderTokenizer faceFile = new ReaderTokenizer(new BufferedReader(faceReader));
faceFile.nextToken();
faceFile.nextToken();
while (faceFile.nextToken() != ReaderTokenizer.TT_EOF) {
Vertex3d[] vtxs = new Vertex3d[3];
for (int i = 0; i < vtxs.length; i++) {
vtxs[i] = mesh.getVertices().get(faceFile.scanInteger());
}
// discard
faceFile.scanInteger();
mesh.addFace(vtxs);
}
return mesh;
}
use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class UCDReader method read.
/**
* Creates an FemModel with uniform density based on UCD data read from a
* Reader. The node coordinate data can be scaled non-uniformly using an
* optional parameter giving scale values about the x, y, and z axes.
*
* @param model FEM model to be populated by UCD data
* @param reader reader from which to read UCD data
* @param density density of the model
* @param scale if non-null, gives scaling about the x, y, and z axes
* @throws IOException if this is a problem reading the file
*/
public static FemModel3d read(FemModel3d model, Reader reader, double density, Vector3d scale) throws IOException {
if (model == null) {
model = new FemModel3d();
} else {
model.clear();
}
if (density >= 0) {
model.setDensity(density);
} else {
model.setDensity(1);
}
ReaderTokenizer rtok = new ReaderTokenizer(new BufferedReader(reader));
int numNodes = rtok.scanInteger();
int numCells = rtok.scanInteger();
int numNData = rtok.scanInteger();
int numCData = rtok.scanInteger();
int numMData = rtok.scanInteger();
if (numNData > 0) {
System.out.println("Warning: UCD data contains extra node data; ignoring");
}
if (numCData > 0) {
System.out.println("Warning: UCD data contains extra cell data; ignoring");
}
if (numMData > 0) {
System.out.println("Warning: UCD data contains extra model data; ignoring");
}
boolean nodeIdWarningGiven = false;
Point3d coords = new Point3d();
for (int i = 0; i < numNodes; i++) {
int nodeId = rtok.scanInteger();
if (i != nodeId && !nodeIdWarningGiven) {
System.out.println("Warning: UCD data contains non-sequential nodeIds; ignoring");
nodeIdWarningGiven = true;
}
coords.x = rtok.scanNumber();
coords.y = rtok.scanNumber();
coords.z = rtok.scanNumber();
if (scale != null) {
coords.x *= scale.x;
coords.y *= scale.y;
coords.z *= scale.z;
}
model.addNode(new FemNode3d(coords));
}
boolean cellIdWarningGiven = false;
boolean matWarningGiven = false;
for (int i = 0; i < numCells; i++) {
int cellId = rtok.scanInteger();
if (i != cellId && !cellIdWarningGiven) {
System.out.println("Warning: UCD data contains nonsequential cellIds; ignoring");
cellIdWarningGiven = true;
}
int mat = rtok.scanInteger();
if (mat != 0 && !matWarningGiven) {
System.out.println("Warning: UCD data contains material references for cells; ignoring");
matWarningGiven = true;
}
String cellType = rtok.scanWord();
if (cellType.equals("tet")) {
readTet(rtok, cellId, model);
} else if (cellType.equals("hex")) {
readHex(rtok, cellId, model);
} else {
throw new IOException("Element type '" + cellType + "' is not supported");
}
}
return model;
}
use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class AbaqusReader method readFile.
private static void readFile(Reader reader, LinkedHashMap<Integer, Point3d> nodeMap, LinkedHashMap<Integer, ArrayList<Integer>> elemMap, File[] includeDirs) throws IOException {
ReaderTokenizer rtok = new ReaderTokenizer(new BufferedReader(reader));
rtok.eolIsSignificant(true);
rtok.wordChar('*');
// ignore commas
rtok.whitespaceChar(',');
FileSection mySection = FileSection.OTHER;
ElemType myElemType = ElemType.UNKNOWN;
int nodeId = 0;
int elemId = 0;
while (rtok.nextToken() != ReaderTokenizer.TT_EOF) {
// determine type
if (rtok.ttype == ReaderTokenizer.TT_WORD) {
if (rtok.sval.startsWith(COMMENT)) {
// ignore
} else if (rtok.sval.charAt(0) == KEYWORD) {
// potentially change mode
String keyword = rtok.sval.substring(1);
if (keyword.equalsIgnoreCase("NODE")) {
mySection = FileSection.NODE;
} else if (keyword.equalsIgnoreCase("ELEMENT")) {
mySection = FileSection.ELEM;
// determine type
String line = readLine(rtok);
String type = parseKey("TYPE=", line);
myElemType = ElemType.UNKNOWN;
for (ElemType et : ElemType.values()) {
if (et.getString().equalsIgnoreCase(type)) {
myElemType = et;
break;
}
}
if (myElemType == ElemType.UNKNOWN) {
System.err.println("Warning: unknown element type '" + type + "'");
}
} else if (keyword.equalsIgnoreCase("INCLUDE")) {
String line = readLine(rtok);
String fileName = parseKey("INPUT=", line);
// find file
File input = findFile(fileName, includeDirs);
if (input == null) {
throw new IOException("Cannot find INCLUDE file '" + fileName + "'");
}
FileReader inputReader = null;
try {
inputReader = new FileReader(input);
readFile(inputReader, nodeMap, elemMap, includeDirs);
} catch (IOException e) {
throw e;
} finally {
if (inputReader != null) {
inputReader.close();
}
}
} else {
mySection = FileSection.OTHER;
System.out.println("Warning: ignoring section '" + keyword + "'");
}
}
// skip to end-of-line
toEOL(rtok);
} else {
rtok.pushBack();
// action depends on mode
switch(mySection) {
case ELEM:
elemId = rtok.scanInteger();
ArrayList<Integer> nodes = new ArrayList<Integer>();
while (rtok.nextToken() == ReaderTokenizer.TT_NUMBER) {
nodes.add((int) rtok.nval);
}
elemMap.put(elemId, nodes);
toEOL(rtok);
break;
case NODE:
nodeId = rtok.scanInteger();
double x = rtok.scanNumber();
double y = rtok.scanNumber();
double z = rtok.scanNumber();
nodeMap.put(nodeId, new Point3d(x, y, z));
toEOL(rtok);
break;
case OTHER:
toEOL(rtok);
}
}
}
}
Aggregations