use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class AmiraLandmarkReader method read.
/**
* Creates an array of 3D points from the landmark data from a Reader
*
* @param reader
* reader from which to read amira landmark data
* @param scale
* factor by which node coordinate data should be scaled
* @return list of points read from file
* @throws IOException
* if this is a problem reading the file
*/
public static Point3d[] read(Reader reader, double scale) throws IOException {
ArrayList<Point3d> pts = new ArrayList<Point3d>();
ReaderTokenizer rtok = new ReaderTokenizer(new BufferedReader(reader));
rtok.wordChars("./@");
while (rtok.nextToken() != ReaderTokenizer.TT_EOF) {
if (rtok.ttype == ReaderTokenizer.TT_WORD && rtok.sval.compareTo("@1") == 0) {
rtok.nextToken();
if (rtok.ttype != ReaderTokenizer.TT_NUMBER) {
// not at data yet, look for next "@1" word
rtok.pushBack();
continue;
} else {
// we have found landmark data - scan into
rtok.pushBack();
while (rtok.nextToken() != ReaderTokenizer.TT_EOF) {
rtok.pushBack();
Point3d point = new Point3d();
point.scan(rtok);
point.scale(scale);
pts.add(point);
}
break;
}
}
}
Point3d[] ptarray = new Point3d[pts.size()];
for (int i = 0; i < pts.size(); i++) ptarray[i] = pts.get(i);
return ptarray;
}
use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class MDLMeshIO method read.
/**
* Creates a PolygonalMesh based on MDL data read from a Reader. The node
* coordinate data can be scaled non-uniformly using an optional parameter
* giving scale values about the x, y, and z axes.
*
* @param reader
* the Reader which references MDL data to be read
* @param scale
* if non-null, gives scaling about the x, y, and z axes
* @return created polygonal mesh
* @throws IOException
* if this is a problem reading the file
*/
public static PolygonalMesh read(Reader reader, Vector3d scale) throws IOException {
PolygonalMesh mesh = new PolygonalMesh();
ReaderTokenizer rtok = new ReaderTokenizer(new BufferedReader(reader));
rtok.wordChars("<>.:/\\");
// read top header
MDLHeader header = MDLHeader.scan(rtok);
if (!header.equals(defaultTopHeader)) {
// throw new IOException ("MDLReader: bad top header, " + header);
}
// read file name
if (rtok.nextToken() != ReaderTokenizer.TT_WORD) {
throw new IOException("MDLReader: expecting filename, got " + rtok.tokenName());
}
// read vertices header information
header = MDLHeader.scan(rtok);
if (!header.equals(defaultVerticesHeader)) {
// throw new IOException ("MDLReader: bad vertices header, " + header);
System.err.println("MDLReader: bad vertices header, no vertices read...");
return mesh;
}
// read vertices
int numVertices = rtok.scanInteger();
Point3d coords = new Point3d();
for (int i = 0; i < numVertices; i++) {
coords.x = rtok.scanNumber();
coords.y = rtok.scanNumber();
coords.z = rtok.scanNumber();
if (scale != null) {
coords.x *= scale.x;
coords.y *= scale.y;
coords.z *= scale.z;
}
mesh.addVertex(coords);
}
// read normals header information
try {
header = MDLHeader.scan(rtok);
} catch (IOException e) {
System.out.println("MDLReader: no normals read...");
return mesh;
}
if (!header.equals(defaultNormalsHeader)) {
// throw new IOException ("MDLReader: bad normals header, " + header);
System.out.println("MDLReader: no normals read...");
return mesh;
}
// read vertices
int numNormals = rtok.scanInteger();
ArrayList<Vector3d> vn = new ArrayList<Vector3d>(numNormals);
for (int i = 0; i < numNormals; i++) {
Vector3d vec = new Vector3d();
vec.x = rtok.scanNumber();
vec.y = rtok.scanNumber();
vec.z = rtok.scanNumber();
if (scale != null) {
vec.x *= scale.x;
vec.y *= scale.y;
vec.z *= scale.z;
}
vn.add(vec);
}
// read faces header information
try {
header = MDLHeader.scan(rtok);
} catch (IOException e) {
System.out.println("MDLReader: no faces read...");
return mesh;
}
if (!header.equals(defaultFacesHeader)) {
// throw new IOException ("MDLReader: bad faces header, " + header);
System.out.println("MDLReader: no faces read...");
return mesh;
}
// read vertices
int numFaces = rtok.scanInteger();
int[] vi = new int[3];
boolean faceIdxWarningGiven = false;
int[] normalIdxs = new int[numFaces * 3];
int k = 0;
for (int i = 0; i < numFaces; i++) {
int[] ni = new int[3];
vi[0] = rtok.scanInteger();
vi[1] = rtok.scanInteger();
vi[2] = rtok.scanInteger();
ni[0] = rtok.scanInteger();
ni[1] = rtok.scanInteger();
ni[2] = rtok.scanInteger();
if (!faceIdxWarningGiven && (vi[0] != ni[0] || vi[0] != ni[0] || vi[0] != ni[0])) {
System.out.println("Warning: MDL face idxs don't match normal idxs; ignoring");
faceIdxWarningGiven = true;
}
mesh.addFace(vi);
normalIdxs[k++] = ni[0];
normalIdxs[k++] = ni[1];
normalIdxs[k++] = ni[2];
}
mesh.setNormals(vn, normalIdxs);
return mesh;
}
use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class MeshThicken method loadRegions.
public void loadRegions(File file) {
try {
ArrayList<Region> regions = new ArrayList<Region>();
ReaderTokenizer rtok = new ReaderTokenizer(new BufferedReader(new FileReader(file)));
rtok.scanToken('[');
while (rtok.nextToken() != ']') {
rtok.pushBack();
Region region = new Region();
region.scan(rtok);
regions.add(region);
}
myRegionFile = file;
clearRegions();
for (Region region : regions) {
addRegion(region);
}
} catch (Exception e) {
e.printStackTrace();
}
}
use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class NURBSCurve2dTest method readCurve.
public NURBSCurve2d readCurve(String fileName) {
try {
ReaderTokenizer rtok = new ReaderTokenizer(new BufferedReader(new FileReader(fileName)));
NURBSCurve2d curve = new NURBSCurve2d();
curve.read(rtok);
return curve;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
use of maspack.util.ReaderTokenizer in project artisynth_core by artisynth.
the class VtkAsciiReader method read.
public static FemModel3d read(FemModel3d model, Reader reader) throws IOException {
ReaderTokenizer rtok = new ReaderTokenizer(reader);
ArrayList<Point3d> nodeList = new ArrayList<Point3d>();
ArrayList<ArrayList<Integer>> elemList = new ArrayList<ArrayList<Integer>>();
rtok.eolIsSignificant(false);
// read until we find a dataset
while (rtok.nextToken() != ReaderTokenizer.TT_EOF) {
if (rtok.ttype == ReaderTokenizer.TT_WORD) {
if (rtok.sval.equalsIgnoreCase("DATASET")) {
rtok.nextToken();
String dataType = rtok.sval;
if (dataType.equalsIgnoreCase("UNSTRUCTURED_GRID")) {
parseFemData(rtok, nodeList, elemList);
return buildFem(model, nodeList, elemList);
} else {
System.err.println("Error: unknown dataset type '" + dataType + "'");
}
}
}
}
return null;
}
Aggregations