Commit 15df4ed5 authored by Daniel Eggert's avatar Daniel Eggert
Browse files

alterations addressing polygons intersecting the dateline.

parent 664a213c
......@@ -15,12 +15,12 @@
<include>**/*.xml</include>
</includes>
</resource>
<resource>
<directory>src/main/resources</directory>
<excludes>
<exclude>*.properties</exclude>
</excludes>
</resource>
<!-- <resource> -->
<!-- <directory>src/main/resources</directory> -->
<!-- <excludes> -->
<!-- <exclude>*.properties</exclude> -->
<!-- </excludes> -->
<!-- </resource> -->
</resources>
......@@ -33,12 +33,23 @@
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
</plugin>
<!-- <plugin> -->
<!-- <groupId>org.apache.maven.plugins</groupId> -->
<!-- <artifactId>maven-surefire-plugin</artifactId> -->
<!-- <version>2.18.1</version> -->
<!-- </plugin> -->
<!-- <plugin> -->
<!-- <groupId>org.apache.maven.plugins</groupId> -->
<!-- <artifactId>maven-jar-plugin</artifactId> -->
<!-- <version>3.0.2</version> -->
<!-- <configuration> -->
<!-- <excludes> -->
<!-- <exclude>**/*.properties</exclude> -->
<!-- </excludes> -->
<!-- </configuration> -->
<!-- </plugin> -->
</plugins>
</build>
<repositories>
......
......@@ -13,7 +13,6 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
......@@ -757,7 +756,7 @@ public class SceneDatabase {
* @param polygon
* @return
*/
public void getCompactSceneCountFromIndex(final Timestamp start, final Timestamp end, final int seasonCode, final byte maxCloudcover,
public void getCompactSceneCountFromIndex(final Timestamp start, final Timestamp end, final int seasonCode, final byte maxCloudcover, final byte minProclevel,
final double[] polygon, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
......@@ -773,7 +772,7 @@ public class SceneDatabase {
// calculate the area, width and height of the region of interest polygon
// getCompactMeasurementsForPolygon(polygon, buffer);
spatialIndex.countScenesGroupedByDatasets(e, start, end, seasonCode, maxCloudcover, buffer);
spatialIndex.countScenesGroupedByDatasets(e, start, end, seasonCode, maxCloudcover, minProclevel, buffer);
}
}
......@@ -820,7 +819,7 @@ public class SceneDatabase {
* Stores a compact representation of the resulting scene query in the given buffer
*/
public void getCompactSceneListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactScenes(datasetid, start, end, polygon)
......@@ -833,7 +832,7 @@ public class SceneDatabase {
e.expandToInclude(polygon[i], polygon[i + 1]);
}
spatialIndex.getFullSceneDataForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, buffer);
spatialIndex.getFullSceneDataForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, buffer);
}
}
......@@ -849,8 +848,8 @@ public class SceneDatabase {
* [long] timestamp<br>
* [byte] cloudcover
*/
public void getCompactSceneMetaDataListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode,
final byte minCloudcover, final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
public void getCompactSceneMetaDataListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactScenes(datasetid, start, end, polygon)
......@@ -863,15 +862,15 @@ public class SceneDatabase {
e.expandToInclude(polygon[i], polygon[i + 1]);
}
spatialIndex.getSceneMetaDataForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, buffer);
spatialIndex.getSceneMetaDataForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, buffer);
}
}
/**
* Stores a compact representation of the resulting scene query in the given buffer. Here only the scene bounds are stored in the buffer.
*/
public void getCompactSceneBoundsListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode,
final byte minCloudcover, final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
public void getCompactSceneBoundsListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactScenes(datasetid, start, end, polygon)
......@@ -884,7 +883,7 @@ public class SceneDatabase {
e.expandToInclude(polygon[i], polygon[i + 1]);
}
spatialIndex.getSceneBoundsForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, buffer);
spatialIndex.getSceneBoundsForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, buffer);
}
}
......@@ -1096,8 +1095,7 @@ public class SceneDatabase {
if (datasetid != 249 && affectedRows > 1) {
throw new IllegalStateException("More than one row (" + affectedRows + ") was affected by the filename update");
} else if (affectedRows == 0) {
throw new IllegalArgumentException(
"The update did not affect any row, wrong sceneid? (" + sceneid + " | " + filename + " | " + proc_level.name() + ")");
throw new IllegalArgumentException("The update did not affect any row, wrong sceneid? (" + sceneid + " | " + filename + " | " + proc_level.name() + ")");
}
} catch (SQLException e) {
e.printStackTrace();
......
......@@ -25,7 +25,7 @@ public interface SqlStatements {
+ "WHERE ST_Intersects(ST_GeographyFromText('SRID=4326; POLYGON((' || ? || ' ' || ? || ', ' || ? || ' ' || ? || ', ' || ? || ' ' || ? || ', ' || ? || ' ' || ? || ', ' || ? || ' ' || ? || '))'), bounds) "
+ "GROUP BY 1 " + "ORDER BY 1;";
public static final String SCENES_QUERY = "SELECT id, datasetid, acquisitiondate, bounds, cloudcover FROM scenes;";
public static final String SCENES_QUERY = "SELECT id, datasetid, acquisitiondate, bounds, cloudcover, proc_level, ST_INTERSECTS(bounds, ST_GeogFromText('SRID=4326;LINESTRING(-180 -89.999999, -180 90)')) FROM scenes;";
public static final String SCENE_ENTITYID_QUERY = "SELECT entityid FROM scenes WHERE id=?;";
......
......@@ -21,18 +21,20 @@ public interface SceneIndex {
public short datasetid;
public long timestamp;
public byte cloudcover;
public byte proclevel;
public double[] bounds;
}
// public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e);
public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end, int seasoncode, byte maxCloudcover);
public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end, int seasoncode, byte maxCloudcover, byte minProclevel);
// public Map<Short, List<Integer>> getScenesGroupedByDatasets(Envelope e);
//
// public Map<Short, List<Integer>> getScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end);
public List<Integer> getScenesForDataset(Envelope e, Timestamp start, Timestamp end, int seasoncode, byte minCloudcover, byte maxCloudcover, short datasetid);
public List<Integer> getScenesForDataset(Envelope e, Timestamp start, Timestamp end, int seasoncode, byte minCloudcover, byte maxCloudcover, short datasetid, byte minProclevel,
byte maxProclevel);
public SceneData getSceneData(SceneData sceneData, int dataidx);
......
......@@ -17,10 +17,8 @@ import org.postgis.Polygon;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.vividsolutions.jts.algorithm.ConvexHull;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.index.ItemVisitor;
import com.vividsolutions.jts.index.strtree.STRtree;
......@@ -45,10 +43,10 @@ public class SpatialSceneIndex implements SceneIndex {
throw new RuntimeException("The thread visiting the tree nodes got interrupted.");
}
int idx = (int) item;
visitScene(idx, ids[idx], datasetids[idx], timestamps[idx], cloudcover[idx], bounds[idx]);
visitScene(idx, ids[idx], datasetids[idx], timestamps[idx], cloudcover[idx], proclevel[idx], bounds[idx]);
}
public abstract void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, double[] bound);
public abstract void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, double[] bound);
}
......@@ -60,7 +58,7 @@ public class SpatialSceneIndex implements SceneIndex {
private final short[] datasetids;
private final long[] timestamps;
private final byte[] cloudcover;
// private final byte[] proclevel;
private final byte[] proclevel;
private final double[][] bounds;
public static SceneIndex getInstance() {
......@@ -94,46 +92,15 @@ public class SpatialSceneIndex implements SceneIndex {
datasetids = new short[sceneCount];
timestamps = new long[sceneCount];
cloudcover = new byte[sceneCount];
// proclevel = new byte[sceneCount];
proclevel = new byte[sceneCount];
bounds = new double[sceneCount][];
initFromDB(db);
}
// @Override
// public boolean equals(Object obj) {
// if (obj == null || !(obj instanceof SpatialSceneIndex)) {
// return false;
// }
//
// SpatialSceneIndex other = (SpatialSceneIndex) obj;
//
// if (!Arrays.equals(this.ids, other.ids)) {
// return false;
// }
//
// if (!Arrays.equals(this.datasetids, other.datasetids)) {
// return false;
// }
//
// if (!Arrays.equals(this.timestamps, other.timestamps)) {
// return false;
// }
//
// if (!Arrays.equals(this.cloudcover, other.cloudcover)) {
// return false;
// }
//
// if (!Arrays.deepEquals(this.bounds, other.bounds)) {
// return false;
// }
//
// return true;
// }
private void initFromDB(SceneDatabase db) {
int count = 0;
GeometryFactory gf = new GeometryFactory();
// GeometryFactory gf = new GeometryFactory();
int numScenes = 0;
final int totalScenes = ids.length;
......@@ -170,16 +137,20 @@ public class SpatialSceneIndex implements SceneIndex {
Point[] points = ((LinearRing) poly.getSubGeometry(0)).getPoints();
// add the number of points for this polygon/ring and the actual coordinates
for (Point pt : points) {
env.expandToInclude(pt.x, pt.y);
// keep original coordinates
coords.add(new Coordinate(pt.x, pt.y));
// build envelope, deal with dateline coordinate wraparound
if (s.onDateline && pt.x < 0) {
pt.x += 360;
}
env.expandToInclude(pt.x, pt.y);
}
}
ConvexHull hull = new ConvexHull(coords.toArray(new Coordinate[coords.size()]), gf);
Coordinate[] hullCoords = hull.getConvexHull().getCoordinates();
double[] outline = new double[(hullCoords.length - 1) * 2];
for (int i = 0; i < hullCoords.length - 1; ++i) {
Coordinate c = hullCoords[i];
double[] outline = new double[(coords.size() - 1) * 2];
for (int i = 0; i < coords.size() - 1; ++i) {
Coordinate c = coords.get(i);
outline[2 * i] = c.x;
outline[2 * i + 1] = c.y;
}
......@@ -189,6 +160,7 @@ public class SpatialSceneIndex implements SceneIndex {
datasetids[count] = s.datasetid;
timestamps[count] = s.timestamp.getDateTimeAsLong();
cloudcover[count] = s.cloudcover;
proclevel[count] = s.proclevel;
bounds[count] = outline;
++count;
......@@ -210,42 +182,40 @@ public class SpatialSceneIndex implements SceneIndex {
LOG.info("finished building index");
}
public void visit(double x1, double x2, double y1, double y2, SceneVisitor visitor) {
visit(new Envelope(x1, x2, y1, y2), visitor);
}
public void visit(Envelope e, SceneVisitor visitor) {
rTree.query(e, visitor);
}
// public Map<Short, Integer> countScenesGroupedByDatasets(double x1, double x2, double y1, double y2) {
// return countScenesGroupedByDatasets(new Envelope(x1, x2, y1, y2));
// private static final double PI_QUARTER = 0.25 * PI;
//
// public static double latToY(double lat) {
// return PI - Math.log(Math.tan(PI_QUARTER + 0.5 * Math.toRadians(lat)));
// }
// /*
// * (non-Javadoc)
// *
// * @see de.potsdam.gfz.gms.database.indexes.SpatialIndex#countScenesGroupedByDatasets(com.vividsolutions.jts.geom.Envelope)
// */
// @Override
// public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e) {
// final Map<Short, Integer> datasetCounts = new HashMap<>();
// visit(e, new SceneVisitor() {
//
// @Override
// public void visitScene(int dataidx, int id, short datasetid, long timestamp, double[] bound) {
// int count = 0;
// if (datasetCounts.containsKey(datasetid)) {
// count = datasetCounts.get(datasetid);
// public static double lonToX(double lon) {
// return PI + Math.toRadians(lon);
// }
//
// datasetCounts.put(datasetid, count + 1);
// /**
// * Projects the internal coordinates of the given point from geographical to Cartesian coordinates
// *
// * @param p
// */
// public static Point latLonPointToXY(Point p) {
// p.x = lonToX(p.x);
// p.y = latToY(p.y);
// return p;
// }
// });
//
// return datasetCounts;
// public static Envelope latLonEnvelopeToXY(Envelope e) {
// e.init(lonToX(e.getMinX()), lonToX(e.getMaxX()), latToY(e.getMinY()), latToY(e.getMaxY()));
// return e;
// }
public void visit(double x1, double x2, double y1, double y2, SceneVisitor visitor) {
visit(new Envelope(x1, x2, y1, y2), visitor);
}
public void visit(Envelope e, SceneVisitor visitor) {
rTree.query(e, visitor);
}
/*
* (non-Javadoc)
*
......@@ -253,23 +223,23 @@ public class SpatialSceneIndex implements SceneIndex {
* de.potsdam.gfz.gms.database.shared.Timestamp)
*/
@Override
public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e, final Timestamp start, final Timestamp end, final int seasoncode, final byte maxCloudcover) {
// if (start == null || end == null) {
// // ignore timerange
// return countScenesGroupedByDatasets(e);
// }
public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e, final Timestamp start, final Timestamp end, final int seasoncode, final byte maxCloudcover,
final byte minProclevel) {
final Map<Short, Integer> datasetCounts = new HashMap<>();
final Timestamp sceneTimestamp = new Timestamp();
visit(e, new SceneVisitor() {
@Override
public void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, double[] bound) {
public void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, double[] bound) {
if (cloudcover > maxCloudcover) {
return;
}
if (proclevel < minProclevel) {
return;
}
sceneTimestamp.setFromDateTimeLong(timestamp);
if (!sceneTimestamp.isInSeason(seasoncode)) {
return;
......@@ -292,7 +262,7 @@ public class SpatialSceneIndex implements SceneIndex {
@Override
public List<Integer> getScenesForDataset(final Envelope e, final Timestamp start, final Timestamp end, final int seasoncode, final byte minCloudcover, final byte maxCloudcover,
final short dataset_id) {
final short dataset_id, final byte minProclevel, final byte maxProclevel) {
List<Integer> sceneDataIndexList = new ArrayList<>();
final Timestamp sceneTimestamp = new Timestamp();
......@@ -302,7 +272,7 @@ public class SpatialSceneIndex implements SceneIndex {
visit(e, new SceneVisitor() {
@Override
public void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, double[] bound) {
public void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, double[] bound) {
// filter datasetid
if (datasetid != dataset_id) {
// dataset ids differ
......@@ -315,6 +285,11 @@ public class SpatialSceneIndex implements SceneIndex {
return;
}
if (proclevel < minProclevel || proclevel > maxProclevel) {
// out of proc level range
return;
}
// filter time range
sceneTimestamp.setFromDateTimeLong(timestamp);
if (sceneTimestamp.between(start, end) && sceneTimestamp.isInSeason(seasoncode)) {
......@@ -334,6 +309,7 @@ public class SpatialSceneIndex implements SceneIndex {
sceneData.timestamp = timestamps[dataidx];
sceneData.cloudcover = cloudcover[dataidx];
sceneData.bounds = bounds[dataidx];
sceneData.proclevel = proclevel[dataidx];
return sceneData;
}
}
......@@ -89,7 +89,7 @@ public class SpatialIndexMediator {
unlockChannel();
}
public void countScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte maxCloudcover, ByteBuffer buf) {
public void countScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte maxCloudcover, byte minProclevel, ByteBuffer buf) {
try {
lockChannel();
......@@ -107,6 +107,8 @@ public class SpatialIndexMediator {
out.writeInt(seasonCode);
// write max cloud cover
out.writeByte(maxCloudcover);
// write min proclevel
out.writeByte(minProclevel);
// force sending
out.flush();
......@@ -138,7 +140,8 @@ public class SpatialIndexMediator {
* @param datasetid
* @param buffer
*/
public void getFullSceneDataForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, ByteBuffer buf) {
public void getFullSceneDataForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, byte minProclevel,
byte maxProclevel, ByteBuffer buf) {
try {
lockChannel();
......@@ -160,6 +163,10 @@ public class SpatialIndexMediator {
out.writeByte(maxCloudcover);
// send datasetid
out.writeShort(datasetid);
// send min proclevel
out.writeByte(minProclevel);
// send max proclevel
out.writeByte(maxProclevel);
// force sending
out.flush();
......@@ -201,7 +208,8 @@ public class SpatialIndexMediator {
* @param datasetid
* @param buffer
*/
public void getSceneMetaDataForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, ByteBuffer buf) {
public void getSceneMetaDataForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, byte minProclevel,
byte maxProclevel, ByteBuffer buf) {
try {
lockChannel();
......@@ -223,6 +231,10 @@ public class SpatialIndexMediator {
out.writeByte(maxCloudcover);
// send datasetid
out.writeShort(datasetid);
// send min proclevel
out.writeByte(minProclevel);
// send max proclevel
out.writeByte(maxProclevel);
// force sending
out.flush();
......@@ -264,7 +276,8 @@ public class SpatialIndexMediator {
* @param datasetid
* @param buffer
*/
public void getSceneBoundsForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, ByteBuffer buf) {
public void getSceneBoundsForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, byte minProclevel,
byte maxProclevel, ByteBuffer buf) {
try {
lockChannel();
......@@ -286,6 +299,10 @@ public class SpatialIndexMediator {
out.writeByte(maxCloudcover);
// send datasetid
out.writeShort(datasetid);
// send min proclevel
out.writeByte(minProclevel);
// send max proclevel
out.writeByte(maxProclevel);
// force sending
out.flush();
......@@ -380,7 +397,7 @@ public class SpatialIndexMediator {
// buf2.clear();
// }
indexMediator.getFullSceneDataForDataset(e, start, end, 0, (byte) 0, (byte) 100, (short) 108, buf);
indexMediator.getFullSceneDataForDataset(e, start, end, 0, (byte) 0, (byte) 100, (short) 108, (byte) 0, (byte) 100, buf);
buf.flip();
short dataset2 = buf.getShort();
int count2 = buf.getInt();
......@@ -391,13 +408,14 @@ public class SpatialIndexMediator {
int sceneid = buf.getInt();
long ts = buf.getLong();
byte cloudcover = buf.get();
byte proclevel = buf.get();
byte numBounds = buf.get();
double[] bounds = new double[numBounds];
for (byte j = 0; j < numBounds; ++j) {
bounds[j] = buf.getDouble();
}
System.out.println(sceneid + "|" + new Timestamp(ts).toDateString() + "|" + cloudcover + "|" + numBounds + "|" + Arrays.toString(bounds));
System.out.println(sceneid + "|" + new Timestamp(ts).toDateString() + "|" + cloudcover + "|" + proclevel + "|" + numBounds + "|" + Arrays.toString(bounds));
}
indexMediator.disconnect();
......
......@@ -23,47 +23,49 @@ import de.potsdam.gfz.gms.database.shared.Timestamp;
*/
public class SceneIterator implements Iterator<Scene>, Closeable {
private final Runnable asyncQuery = new Runnable() {
@Override
public void run() {
if (rs != null) {
Random r = new Random();
try {
sceneLoop: while (!close && rs.next()) {
Scene s = new Scene();
s.id = rs.getInt(1);
s.datasetid = rs.getShort(2);
s.timestamp = Timestamp.fromDate(rs.getTimestamp(3));
Double cc = rs.getDouble(5);
if (Double.isNaN(cc) || cc < 0) {
s.cloudcover = (byte) r.nextInt(101);
} else {
s.cloudcover = cc.byteValue();
}
s.bounds = (PGgeometry) rs.getObject(4);
while (!buffer.offer(s, 1, TimeUnit.SECONDS)) {
if (close) {
buffer.clear();
break sceneLoop;
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
try {
rs.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
buffer.add(LAST_INDICATOR);
}
};
private final Runnable asyncQuery = new Runnable() {
@Override
public void run() {
if (rs != null) {
Random r = new Random();