use of org.hipparchus.geometry.spherical.twod.Vertex in project Orekit by CS-SI.
the class FootprintOverlapDetector method sample.
/**
* Sample the region.
* @param body body on which the geographic zone is defined
* @param zone geographic zone to consider
* @param samplingStep linear step used for sampling the geographic zone (in meters)
* @return sampling points
* @throws OrekitException if the region cannot be sampled
*/
private static List<SamplingPoint> sample(final OneAxisEllipsoid body, final SphericalPolygonsSet zone, final double samplingStep) throws OrekitException {
final List<SamplingPoint> sampledZone = new ArrayList<SamplingPoint>();
// sample the zone boundary
final List<Vertex> boundary = zone.getBoundaryLoops();
for (final Vertex loopStart : boundary) {
int count = 0;
for (Vertex v = loopStart; count == 0 || v != loopStart; v = v.getOutgoing().getEnd()) {
++count;
final Edge edge = v.getOutgoing();
final int n = (int) FastMath.ceil(edge.getLength() * body.getEquatorialRadius() / samplingStep);
for (int i = 0; i < n; ++i) {
final S2Point intermediate = new S2Point(edge.getPointAt(i * edge.getLength() / n));
final GeodeticPoint gp = new GeodeticPoint(0.5 * FastMath.PI - intermediate.getPhi(), intermediate.getTheta(), 0.0);
sampledZone.add(new SamplingPoint(body.transform(gp), gp.getZenith()));
}
}
}
// sample the zone interior
final EllipsoidTessellator tessellator = new EllipsoidTessellator(body, new ConstantAzimuthAiming(body, 0.0), 4);
final List<List<GeodeticPoint>> gpSample = tessellator.sample(zone, samplingStep, samplingStep);
for (final List<GeodeticPoint> list : gpSample) {
for (final GeodeticPoint gp : list) {
sampledZone.add(new SamplingPoint(body.transform(gp), gp.getZenith()));
}
}
return sampledZone;
}
use of org.hipparchus.geometry.spherical.twod.Vertex in project Orekit by CS-SI.
the class FieldOfView method getFootprint.
/**
* Get the footprint of the field Of View on ground.
* <p>
* This method assumes the Field Of View is centered on some carrier,
* which will typically be a spacecraft or a ground station antenna.
* The points in the footprint boundary loops are all at altitude zero
* with respect to the ellipsoid, they correspond either to projection
* on ground of the edges of the Field Of View, or to points on the body
* limb if the Field Of View goes past horizon. The points on the limb
* see the carrier origin at zero elevation. If the Field Of View is so
* large it contains entirely the body, all points will correspond to
* points at limb. If the Field Of View looks away from body, the
* boundary loops will be an empty list. The points within footprint
* the loops are sorted in trigonometric order as seen from the carrier.
* This implies that someone traveling on ground from one point to the
* next one will have the points visible from the carrier on his left
* hand side, and the points not visible from the carrier on his right
* hand side.
* </p>
* <p>
* The truncation of Field Of View at limb can induce strange results
* for complex Fields Of View. If for example a Field Of View is a
* ring with a hole and part of the ring goes past horizon, then instead
* of having a single loop with a C-shaped boundary, the method will
* still return two loops truncated at the limb, one clockwise and one
* counterclockwise, hence "closing" the C-shape twice. This behavior
* is considered acceptable.
* </p>
* <p>
* If the carrier is a spacecraft, then the {@code fovToBody} transform
* can be computed from a {@link org.orekit.propagation.SpacecraftState}
* as follows:
* </p>
* <pre>
* Transform inertToBody = state.getFrame().getTransformTo(body.getBodyFrame(), state.getDate());
* Transform fovToBody = new Transform(state.getDate(),
* state.toTransform().getInverse(),
* inertToBody);
* </pre>
* <p>
* If the carrier is a ground station, located using a topocentric frame
* and managing its pointing direction using a transform between the
* dish frame and the topocentric frame, then the {@code fovToBody} transform
* can be computed as follows:
* </p>
* <pre>
* Transform topoToBody = topocentricFrame.getTransformTo(body.getBodyFrame(), date);
* Transform topoToDish = ...
* Transform fovToBody = new Transform(date,
* topoToDish.getInverse(),
* topoToBody);
* </pre>
* <p>
* Only the raw zone is used, the angular margin is ignored here.
* </p>
* @param fovToBody transform between the frame in which the Field Of View
* is defined and body frame.
* @param body body surface the Field Of View will be projected on
* @param angularStep step used for boundary loops sampling (radians)
* @return list footprint boundary loops (there may be several independent
* loops if the Field Of View shape is complex)
* @throws OrekitException if some frame conversion fails or if carrier is
* below body surface
*/
List<List<GeodeticPoint>> getFootprint(final Transform fovToBody, final OneAxisEllipsoid body, final double angularStep) throws OrekitException {
final Frame bodyFrame = body.getBodyFrame();
final Vector3D position = fovToBody.transformPosition(Vector3D.ZERO);
final double r = position.getNorm();
if (body.isInside(position)) {
throw new OrekitException(OrekitMessages.POINT_INSIDE_ELLIPSOID);
}
final List<List<GeodeticPoint>> footprint = new ArrayList<List<GeodeticPoint>>();
final List<Vertex> boundary = zone.getBoundaryLoops();
for (final Vertex loopStart : boundary) {
int count = 0;
final List<GeodeticPoint> loop = new ArrayList<GeodeticPoint>();
boolean intersectionsFound = false;
for (Edge edge = loopStart.getOutgoing(); count == 0 || edge.getStart() != loopStart; edge = edge.getEnd().getOutgoing()) {
++count;
final int n = (int) FastMath.ceil(edge.getLength() / angularStep);
final double delta = edge.getLength() / n;
for (int i = 0; i < n; ++i) {
final Vector3D awaySC = new Vector3D(r, edge.getPointAt(i * delta));
final Vector3D awayBody = fovToBody.transformPosition(awaySC);
final Line lineOfSight = new Line(position, awayBody, 1.0e-3);
GeodeticPoint gp = body.getIntersectionPoint(lineOfSight, position, bodyFrame, null);
if (gp != null && Vector3D.dotProduct(awayBody.subtract(position), body.transform(gp).subtract(position)) < 0) {
// the intersection is in fact on the half-line pointing
// towards the back side, it is a spurious intersection
gp = null;
}
if (gp != null) {
// the line of sight does intersect the body
intersectionsFound = true;
} else {
// the line of sight does not intersect body
// we use a point on the limb
gp = body.transform(body.pointOnLimb(position, awayBody), bodyFrame, null);
}
// add the point in front of the list
// (to ensure the loop will be in trigonometric orientation)
loop.add(0, gp);
}
}
if (intersectionsFound) {
// at least some of the points did intersect the body,
// this loop contributes to the footprint
footprint.add(loop);
}
}
if (footprint.isEmpty()) {
// none of the Field Of View loops cross the body
// either the body is outside of Field Of View, or it is fully contained
// we check the center
final Vector3D bodyCenter = fovToBody.getInverse().transformPosition(Vector3D.ZERO);
if (zone.checkPoint(new S2Point(bodyCenter)) != Region.Location.OUTSIDE) {
// the body is fully contained in the Field Of View
// we use the full limb as the footprint
final Vector3D x = bodyCenter.orthogonal();
final Vector3D y = Vector3D.crossProduct(bodyCenter, x).normalize();
final double sinEta = body.getEquatorialRadius() / r;
final double sinEta2 = sinEta * sinEta;
final double cosAlpha = (FastMath.cos(angularStep) + sinEta2 - 1) / sinEta2;
final int n = (int) FastMath.ceil(MathUtils.TWO_PI / FastMath.acos(cosAlpha));
final double delta = MathUtils.TWO_PI / n;
final List<GeodeticPoint> loop = new ArrayList<GeodeticPoint>(n);
for (int i = 0; i < n; ++i) {
final Vector3D outside = new Vector3D(r * FastMath.cos(i * delta), x, r * FastMath.sin(i * delta), y);
loop.add(body.transform(body.pointOnLimb(position, outside), bodyFrame, null));
}
footprint.add(loop);
}
}
return footprint;
}
use of org.hipparchus.geometry.spherical.twod.Vertex in project Orekit by CS-SI.
the class InsideFinder method visitLeafNode.
/**
* {@inheritDoc}
*/
@Override
public void visitLeafNode(final BSPTree<Sphere2D> node) {
// we have already found a good point
if (insidePointFirstChoice != null) {
return;
}
if ((Boolean) node.getAttribute()) {
// transform this inside leaf cell into a simple convex polygon
final SphericalPolygonsSet convex = new SphericalPolygonsSet(node.pruneAroundConvexCell(Boolean.TRUE, Boolean.FALSE, null), zone.getTolerance());
// extract the start of the single loop boundary of the convex cell
final List<Vertex> boundary = convex.getBoundaryLoops();
final Vertex start = boundary.get(0);
int n = 0;
Vector3D sumB = Vector3D.ZERO;
for (Edge e = start.getOutgoing(); n == 0 || e.getStart() != start; e = e.getEnd().getOutgoing()) {
sumB = new Vector3D(1, sumB, e.getLength(), e.getCircle().getPole());
n++;
}
final S2Point candidate = new S2Point(sumB);
// and checkPoint selects another (very close) tree leaf node
if (zone.checkPoint(candidate) == Location.INSIDE) {
insidePointFirstChoice = candidate;
} else {
insidePointSecondChoice = candidate;
}
}
}
Aggregations