Commit 8eed9637 authored by Daniel Eggert's avatar Daniel Eggert
Browse files

added day/night indicator feature and filter ability

parent bc160858
......@@ -769,7 +769,7 @@ public class SceneDatabase {
* @return
*/
public void getCompactSceneCountFromIndex(final Timestamp start, final Timestamp end, final int seasonCode, final byte maxCloudcover, final byte minProclevel,
final double[] polygon, ByteBuffer buffer) {
final byte dayNight, final double[] polygon, ByteBuffer buffer) {
SpatialIndexMediator spatialIndex = getSpatialIndex();
......@@ -787,7 +787,7 @@ public class SceneDatabase {
// calculate the area, width and height of the region of interest polygon
// getCompactMeasurementsForPolygon(polygon, buffer);
try {
spatialIndex.countScenesGroupedByDatasets(e, start, end, seasonCode, maxCloudcover, minProclevel, buffer);
spatialIndex.countScenesGroupedByDatasets(e, start, end, seasonCode, maxCloudcover, minProclevel, dayNight, buffer);
} catch (Throwable t) {
spatialIndexMediator = null;
}
......@@ -837,7 +837,7 @@ public class SceneDatabase {
* Stores a compact representation of the resulting scene query in the given buffer
*/
public void getCompactSceneListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, ByteBuffer buffer) {
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, byte dayNight, ByteBuffer buffer) {
SpatialIndexMediator spatialIndex = getSpatialIndex();
if (spatialIndex == null) {
......@@ -854,7 +854,7 @@ public class SceneDatabase {
}
try {
spatialIndex.getFullSceneDataForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, buffer);
spatialIndex.getFullSceneDataForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, dayNight, buffer);
} catch (Throwable t) {
spatialIndexMediator = null;
}
......@@ -897,7 +897,7 @@ public class SceneDatabase {
* [byte] cloudcover
*/
public void getCompactSceneMetaDataListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, ByteBuffer buffer) {
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, byte dayNight, ByteBuffer buffer) {
SpatialIndexMediator spatialIndex = getSpatialIndex();
if (spatialIndex == null) {
......@@ -913,7 +913,7 @@ public class SceneDatabase {
}
try {
spatialIndex.getSceneMetaDataForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, buffer);
spatialIndex.getSceneMetaDataForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, dayNight, buffer);
} catch (Throwable t) {
spatialIndexMediator = null;
}
......@@ -924,7 +924,7 @@ public class SceneDatabase {
* Stores a compact representation of the resulting scene query in the given buffer. Here only the scene bounds are stored in the buffer.
*/
public void getCompactSceneBoundsListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, ByteBuffer buffer) {
final byte maxCloudcover, final double[] polygon, final byte minProclevel, final byte maxProclevel, byte dayNight, ByteBuffer buffer) {
SpatialIndexMediator spatialIndex = getSpatialIndex();
if (spatialIndex == null) {
......@@ -940,7 +940,7 @@ public class SceneDatabase {
}
try {
spatialIndex.getSceneBoundsForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, buffer);
spatialIndex.getSceneBoundsForDataset(e, start, end, seasonCode, minCloudcover, maxCloudcover, datasetid, minProclevel, maxProclevel, dayNight, buffer);
} catch (Throwable t) {
spatialIndexMediator = null;
}
......@@ -1481,6 +1481,12 @@ public class SceneDatabase {
pst.setString(++idx, job.mode.toString());
pst.setInt(++idx, job.distributionIndex);
if (job.dayNightIndicator == 0) {
pst.setNull(++idx, Types.BOOLEAN);
} else {
pst.setBoolean(++idx, job.dayNightIndicator == 1);
}
ResultSet rs = pst.executeQuery();
commit();
......@@ -1670,6 +1676,14 @@ public class SceneDatabase {
} else {
job.bounds = new double[] { 0, 0, 0, 0 };
}
Boolean dayNight = (Boolean) rs.getObject("day_acquisition");
if (dayNight == null) {
job.dayNightIndicator = 0;
} else if (dayNight) {
job.dayNightIndicator = 1;
} else {
job.dayNightIndicator = 2;
}
job.distributionIndex = rs.getInt("distribution_index");
job.progress = rs.getInt("progress");
job.feedback = rs.getString("feedback");
......
......@@ -25,7 +25,7 @@ public interface SqlStatements {
+ "WHERE ST_Intersects(ST_GeographyFromText('SRID=4326; POLYGON((' || ? || ' ' || ? || ', ' || ? || ' ' || ? || ', ' || ? || ' ' || ? || ', ' || ? || ' ' || ? || ', ' || ? || ' ' || ? || '))'), bounds) "
+ "GROUP BY 1 " + "ORDER BY 1;";
public static final String SCENES_QUERY = "SELECT id, datasetid, acquisitiondate, bounds, cloudcover, proc_level, ST_INTERSECTS(bounds, ST_GeogFromText('SRID=4326;LINESTRING(-180 -89.999999, -180 90)')) FROM scenes;";
public static final String SCENES_QUERY = "SELECT id, datasetid, acquisitiondate, bounds, cloudcover, proc_level, day_acquisition, ST_INTERSECTS(bounds, ST_GeogFromText('SRID=4326;LINESTRING(-180 -89.999999, -180 90)')) FROM scenes;";
public static final String SCENE_ENTITYID_QUERY = "SELECT entityid FROM scenes WHERE id=?;";
......@@ -47,7 +47,7 @@ public interface SqlStatements {
public static final String GET_PROC_LEVEL_FOR_JOB_SCENES_QUERY = "select scenes.datasetid, scenes.id, scenes.proc_level from scenes, jobs where jobs.id=? AND scenes.id = ANY(jobs.sceneids);";
public static final String CREATE_SCENE_PROCESSING_JOB = "INSERT INTO jobs (creationtime, timerange_start, timerange_end, bounds, sceneids, virtualsensorid, datasetid_spatial_ref, ref_job_id, non_ref_datasetids, max_cloudcover, season_code, path_analysis_script, analysis_parameter, statistics, job_mode, distribution_index) VALUES (CURRENT_TIMESTAMP, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?::job_mode, ?) RETURNING id;";
public static final String CREATE_SCENE_PROCESSING_JOB = "INSERT INTO jobs (creationtime, timerange_start, timerange_end, bounds, sceneids, virtualsensorid, datasetid_spatial_ref, ref_job_id, non_ref_datasetids, max_cloudcover, season_code, path_analysis_script, analysis_parameter, statistics, job_mode, distribution_index, day_acquisition) VALUES (CURRENT_TIMESTAMP, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?::job_mode, ?, ?) RETURNING id;";
public static final String CREATE_VIRTUAL_SENSOR = "INSERT INTO virtual_sensors (name, spatial_resolution, spectral_characteristics_datasetid, wavelengths_pos, band_width) VALUES (?, ?, ?, ?, ?) RETURNING id;";
......
......@@ -22,19 +22,21 @@ public interface SceneIndex {
public long timestamp;
public byte cloudcover;
public byte proclevel;
// null == 0, day = 1, night = 2
public byte dayNight;
public double[] bounds;
}
// public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e);
public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end, int seasoncode, byte maxCloudcover, byte minProclevel);
public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end, int seasoncode, byte maxCloudcover, byte minProclevel, byte dayNight);
// public Map<Short, List<Integer>> getScenesGroupedByDatasets(Envelope e);
//
// public Map<Short, List<Integer>> getScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end);
public List<Integer> getScenesForDataset(Envelope e, Timestamp start, Timestamp end, int seasoncode, byte minCloudcover, byte maxCloudcover, short datasetid, byte minProclevel,
byte maxProclevel);
byte maxProclevel, byte dayNight);
public SceneData getSceneData(SceneData sceneData, int dataidx);
......
......@@ -45,10 +45,10 @@ public class SpatialSceneIndex implements SceneIndex {
throw new RuntimeException("The thread visiting the tree nodes got interrupted.");
}
int idx = (int) item;
visitScene(idx, ids[idx], datasetids[idx], timestamps[idx], cloudcover[idx], proclevel[idx], bounds[idx]);
visitScene(idx, ids[idx], datasetids[idx], timestamps[idx], cloudcover[idx], proclevel[idx], dayNight[idx], bounds[idx]);
}
public abstract void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, double[] bound);
public abstract void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, byte dayNight, double[] bound);
}
......@@ -61,6 +61,7 @@ public class SpatialSceneIndex implements SceneIndex {
private final long[] timestamps;
private final byte[] cloudcover;
private final byte[] proclevel;
private final byte[] dayNight;
private final double[][] bounds;
public static SceneIndex getInstance() {
......@@ -95,6 +96,7 @@ public class SpatialSceneIndex implements SceneIndex {
timestamps = new long[sceneCount];
cloudcover = new byte[sceneCount];
proclevel = new byte[sceneCount];
dayNight = new byte[sceneCount];
bounds = new double[sceneCount][];
initFromDB(db);
......@@ -163,6 +165,7 @@ public class SpatialSceneIndex implements SceneIndex {
timestamps[count] = s.timestamp.getDateTimeAsLong();
cloudcover[count] = s.cloudcover;
proclevel[count] = s.proclevel;
dayNight[count] = s.dayNight;
bounds[count] = outline;
++count;
......@@ -202,14 +205,14 @@ public class SpatialSceneIndex implements SceneIndex {
*/
@Override
public Map<Short, Integer> countScenesGroupedByDatasets(Envelope e, final Timestamp start, final Timestamp end, final int seasoncode, final byte maxCloudcover,
final byte minProclevel) {
final byte minProclevel, final byte dayNightParameter) {
final Map<Short, Integer> datasetCounts = new HashMap<>();
final Timestamp sceneTimestamp = new Timestamp();
visit(e, new SceneVisitor() {
@Override
public void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, double[] bound) {
public void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, byte dayNight, double[] bound) {
if (cloudcover > maxCloudcover) {
return;
}
......@@ -218,6 +221,14 @@ public class SpatialSceneIndex implements SceneIndex {
return;
}
if (dayNightParameter == Scene.DAY_NIGHT_DAY && dayNight != Scene.DAY_NIGHT_DAY) {
return;
}
if (dayNightParameter == Scene.DAY_NIGHT_NIGHT && dayNight != Scene.DAY_NIGHT_NIGHT) {
return;
}
sceneTimestamp.setFromDateTimeLong(timestamp);
if (!sceneTimestamp.isInSeason(seasoncode)) {
return;
......@@ -240,7 +251,7 @@ public class SpatialSceneIndex implements SceneIndex {
@Override
public List<Integer> getScenesForDataset(final Envelope e, final Timestamp start, final Timestamp end, final int seasoncode, final byte minCloudcover, final byte maxCloudcover,
final short dataset_id, final byte minProclevel, final byte maxProclevel) {
final short dataset_id, final byte minProclevel, final byte maxProclevel, final byte dayNightParameter) {
List<Integer> sceneDataIndexList = new ArrayList<>();
final Timestamp sceneTimestamp = new Timestamp();
......@@ -250,7 +261,7 @@ public class SpatialSceneIndex implements SceneIndex {
visit(e, new SceneVisitor() {
@Override
public void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, double[] bound) {
public void visitScene(int dataidx, int id, short datasetid, long timestamp, byte cloudcover, byte proclevel, byte dayNight, double[] bound) {
// filter datasetid
if (datasetid != dataset_id) {
// dataset ids differ
......@@ -268,6 +279,14 @@ public class SpatialSceneIndex implements SceneIndex {
return;
}
if (dayNightParameter == Scene.DAY_NIGHT_DAY && dayNight != Scene.DAY_NIGHT_DAY) {
return;
}
if (dayNightParameter == Scene.DAY_NIGHT_NIGHT && dayNight != Scene.DAY_NIGHT_NIGHT) {
return;
}
// filter time range
sceneTimestamp.setFromDateTimeLong(timestamp);
if (sceneTimestamp.between(start, end) && sceneTimestamp.isInSeason(seasoncode)) {
......
......@@ -9,7 +9,6 @@ import java.io.IOException;
import java.net.InetAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.concurrent.Semaphore;
import org.slf4j.Logger;
......@@ -90,8 +89,7 @@ public class SpatialIndexMediator {
unlockChannel();
}
public void countScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte maxCloudcover, byte minProclevel,
ByteBuffer buf) {
public void countScenesGroupedByDatasets(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte maxCloudcover, byte minProclevel, byte dayNight, ByteBuffer buf) {
try {
lockChannel();
......@@ -111,6 +109,8 @@ public class SpatialIndexMediator {
out.writeByte(maxCloudcover);
// write min proclevel
out.writeByte(minProclevel);
// write day night parameter
out.writeByte(dayNight);
// force sending
out.flush();
......@@ -171,8 +171,8 @@ public class SpatialIndexMediator {
* @param datasetid
* @param buffer
*/
public void getFullSceneDataForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid,
byte minProclevel, byte maxProclevel, ByteBuffer buf) {
public void getFullSceneDataForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, byte minProclevel,
byte maxProclevel, byte dayNight, ByteBuffer buf) {
try {
lockChannel();
......@@ -198,6 +198,8 @@ public class SpatialIndexMediator {
out.writeByte(minProclevel);
// send max proclevel
out.writeByte(maxProclevel);
// send day night
out.writeByte(dayNight);
// force sending
out.flush();
......@@ -239,8 +241,8 @@ public class SpatialIndexMediator {
* @param datasetid
* @param buffer
*/
public void getSceneMetaDataForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid,
byte minProclevel, byte maxProclevel, ByteBuffer buf) {
public void getSceneMetaDataForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, byte minProclevel,
byte maxProclevel, byte dayNight, ByteBuffer buf) {
try {
lockChannel();
......@@ -266,6 +268,7 @@ public class SpatialIndexMediator {
out.writeByte(minProclevel);
// send max proclevel
out.writeByte(maxProclevel);
out.writeByte(dayNight);
// force sending
out.flush();
......@@ -307,8 +310,8 @@ public class SpatialIndexMediator {
* @param datasetid
* @param buffer
*/
public void getSceneBoundsForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid,
byte minProclevel, byte maxProclevel, ByteBuffer buf) {
public void getSceneBoundsForDataset(Envelope e, Timestamp start, Timestamp end, int seasonCode, byte minCloudcover, byte maxCloudcover, short datasetid, byte minProclevel,
byte maxProclevel, byte dayNight, ByteBuffer buf) {
try {
lockChannel();
......@@ -334,6 +337,7 @@ public class SpatialIndexMediator {
out.writeByte(minProclevel);
// send max proclevel
out.writeByte(maxProclevel);
out.writeByte(dayNight);
// force sending
out.flush();
......@@ -383,77 +387,77 @@ public class SpatialIndexMediator {
public static void main(String[] args) {
SpatialIndexMediator indexMediator = SpatialIndexMediator.connect("localhost", MediatorConstants.PORT);
System.out.println("gms-database version: "+ Version.getVersionInfo());
System.out.println("gms-database version: " + Version.getVersionInfo());
System.out.println("server version: " + indexMediator.getServerVersionInfo());
// // ByteBuffer buf = ByteBuffer.allocate(1000000);
// ByteBuffer buf = ByteBuffer.allocate(100000000);
//
// double low = 42.372196452589;
// double left = 6.1190697187496;
// double up = 55.613615264148;
// double right = 17.237233781249;
// double[] roi = new double[] { left, low, right, low, right, up, left, up, left, low };
//
// Envelope e = new Envelope();
// for (int i = 0; i < roi.length; i += 2) {
// e.expandToInclude(roi[i], roi[i + 1]);
// }
//
// Timestamp start = new Timestamp(1900, 1, 1);
// Timestamp end = Timestamp.now();
//
// // indexMediator.countScenesGroupedByDatasets(e, start, end, buf);
// //
// // buf.flip();
// // short num = buf.getShort();
// // for (int i = 0; i < num; ++i) {
// // short dataset = buf.getShort();
// // int count = buf.getInt();
// // indexMediator.getScenesForDataset(e, start, end, dataset, buf2);
// // buf2.flip();
// // short dataset2 = buf2.getShort();
// // int count2 = buf2.getInt();
// // LOG.info("datasets: " + dataset + " vs. " + dataset2);
// // LOG.info("counts: " + count + " vs. " + count2);
// //
// // for (int k = 0; k < count2; ++k) {
// // int sceneid = buf2.getInt();
// // long ts = buf2.getLong();
// // byte numBounds = buf2.get();
// // double[] bounds = new double[numBounds];
// // for (byte j = 0; j < numBounds; ++j) {
// // bounds[j] = buf2.getDouble();
// // }
// //
// // System.out.println(sceneid + "|" + new Timestamp(ts).toDateString() + "|" + numBounds + "|" + Arrays.toString(bounds));
// // }
// // buf2.clear();
// // }
//
// indexMediator.getFullSceneDataForDataset(e, start, end, 0, (byte) 0, (byte) 100, (short) 108, (byte) 0, (byte) 100, buf);
// buf.flip();
// short dataset2 = buf.getShort();
// int count2 = buf.getInt();
// LOG.info("dataset: " + dataset2);
// LOG.info("count: " + count2);
//
// for (int k = 0; k < count2; ++k) {
// int sceneid = buf.getInt();
// long ts = buf.getLong();
// byte cloudcover = buf.get();
// byte proclevel = buf.get();
// byte numBounds = buf.get();
// double[] bounds = new double[numBounds];
// for (byte j = 0; j < numBounds; ++j) {
// bounds[j] = buf.getDouble();
// }
//
// System.out.println(
// sceneid + "|" + new Timestamp(ts).toDateString() + "|" + cloudcover + "|" + proclevel + "|" + numBounds + "|" + Arrays.toString(bounds));
// }
//
// indexMediator.disconnect();
// // ByteBuffer buf = ByteBuffer.allocate(1000000);
// ByteBuffer buf = ByteBuffer.allocate(100000000);
//
// double low = 42.372196452589;
// double left = 6.1190697187496;
// double up = 55.613615264148;
// double right = 17.237233781249;
// double[] roi = new double[] { left, low, right, low, right, up, left, up, left, low };
//
// Envelope e = new Envelope();
// for (int i = 0; i < roi.length; i += 2) {
// e.expandToInclude(roi[i], roi[i + 1]);
// }
//
// Timestamp start = new Timestamp(1900, 1, 1);
// Timestamp end = Timestamp.now();
//
// // indexMediator.countScenesGroupedByDatasets(e, start, end, buf);
// //
// // buf.flip();
// // short num = buf.getShort();
// // for (int i = 0; i < num; ++i) {
// // short dataset = buf.getShort();
// // int count = buf.getInt();
// // indexMediator.getScenesForDataset(e, start, end, dataset, buf2);
// // buf2.flip();
// // short dataset2 = buf2.getShort();
// // int count2 = buf2.getInt();
// // LOG.info("datasets: " + dataset + " vs. " + dataset2);
// // LOG.info("counts: " + count + " vs. " + count2);
// //
// // for (int k = 0; k < count2; ++k) {
// // int sceneid = buf2.getInt();
// // long ts = buf2.getLong();
// // byte numBounds = buf2.get();
// // double[] bounds = new double[numBounds];
// // for (byte j = 0; j < numBounds; ++j) {
// // bounds[j] = buf2.getDouble();
// // }
// //
// // System.out.println(sceneid + "|" + new Timestamp(ts).toDateString() + "|" + numBounds + "|" + Arrays.toString(bounds));
// // }
// // buf2.clear();
// // }
//
// indexMediator.getFullSceneDataForDataset(e, start, end, 0, (byte) 0, (byte) 100, (short) 108, (byte) 0, (byte) 100, buf);
// buf.flip();
// short dataset2 = buf.getShort();
// int count2 = buf.getInt();
// LOG.info("dataset: " + dataset2);
// LOG.info("count: " + count2);
//
// for (int k = 0; k < count2; ++k) {
// int sceneid = buf.getInt();
// long ts = buf.getLong();
// byte cloudcover = buf.get();
// byte proclevel = buf.get();
// byte numBounds = buf.get();
// double[] bounds = new double[numBounds];
// for (byte j = 0; j < numBounds; ++j) {
// bounds[j] = buf.getDouble();
// }
//
// System.out.println(
// sceneid + "|" + new Timestamp(ts).toDateString() + "|" + cloudcover + "|" + proclevel + "|" + numBounds + "|" + Arrays.toString(bounds));
// }
//
// indexMediator.disconnect();
}
}
......@@ -44,7 +44,15 @@ public class SceneIterator implements Iterator<Scene>, Closeable {
s.cloudcover = cc.byteValue();
}
s.proclevel = (byte) DB2JavaEnums.convertToProcLevel(rs.getString(6)).ordinal();
s.onDateline = rs.getBoolean(7);
Boolean dn = (Boolean) rs.getObject(7);
if (dn == null) {
s.dayNight = Scene.DAY_NIGHT_NULL;
} else if (dn) {
s.dayNight = Scene.DAY_NIGHT_DAY;
} else {
s.dayNight = Scene.DAY_NIGHT_NIGHT;
}
s.onDateline = rs.getBoolean(8);
while (!buffer.offer(s, 1, TimeUnit.SECONDS)) {
if (close) {
buffer.clear();
......
......@@ -17,11 +17,16 @@ public class Scene implements Serializable {
private static final long serialVersionUID = -3686589972943546902L;
public static final byte DAY_NIGHT_NULL = 0;
public static final byte DAY_NIGHT_DAY = 1;
public static final byte DAY_NIGHT_NIGHT = 2;
public int id;
public short datasetid;
public Timestamp timestamp;
public byte cloudcover;
public byte proclevel;
public byte dayNight;
public PGgeometry bounds;
public boolean onDateline;
......@@ -33,6 +38,6 @@ public class Scene implements Serializable {
@Override
public String toString() {
return "[id:" + id + " | datasetid:" + datasetid + " | timestamp:" + timestamp.toFullString() + " | cloudcover:" + cloudcover + " | proclevel:" + proclevel + " | bounds:"
+ bounds.getType() + "] onDateline: " + onDateline;
+ bounds.getType() + "] onDateline: " + onDateline + " | dayNight: " + dayNight;
}
}
......@@ -64,6 +64,8 @@ public class ProcessingJob {
public String pathAnalysisScript;
public String analysisParameter;
public byte dayNightIndicator;
/**
* download job statistics:<br>
* [0] total number of scenes to download<br>
......@@ -175,6 +177,7 @@ public class ProcessingJob {
// subJob.datacubeMgrsTilesProc = Arrays.copyOf(datacubeMgrsTilesProc, datacubeMgrsTilesProc.length);
// }
downloadJob.maxCloudcover = maxCloudcover;
downloadJob.dayNightIndicator = dayNightIndicator;
downloadJob.seasonCode = seasonCode;
// subJob.comment = comment;
downloadJob.status = job_status.pending;
......@@ -226,6 +229,7 @@ public class ProcessingJob {
}
procJob.virtualsensorid = virtualsensorid;
procJob.maxCloudcover = maxCloudcover;
procJob.dayNightIndicator = dayNightIndicator;
procJob.seasonCode = seasonCode;
procJob.status = job_status.pending;
procJob.mode = job_mode.processing_only;
......@@ -288,6 +292,7 @@ public class ProcessingJob {
analysisJob.virtualsensorid = virtualsensorid;
analysisJob.maxCloudcover = maxCloudcover;
analysisJob.dayNightIndicator = dayNightIndicator;
analysisJob.seasonCode = seasonCode;
analysisJob.status = job_status.pending;
analysisJob.mode = job_mode.analysis_only;
......@@ -445,7 +450,7 @@ public class ProcessingJob {
+ (datacubeMgrsTilesProc != null ? Arrays.toString(datacubeMgrsTilesProc) : "null") + "),\n" + "nonRefDatasetIds("
+ (nonRefDatasetIds != null ? Arrays.toString(nonRefDatasetIds) : "null") + "),\n" + "comment(" + comment + "), maxCloudcover(" + maxCloudcover + "), seasonCode("
+ seasonCode + "),\n" + "status(" + status + "),\n" + "pathAnalysisScript(" + pathAnalysisScript + "), analysisParameter(" + analysisParameter + "), mode(" + mode
+ ")\n" + (downloadJobs != null ? ("\ndownloadJobs {" + Arrays.toString(downloadJobs.values().toArray()) + "}") : "") + ")\n"
+ ") dayNight(" + dayNightIndicator + ")\n" + (downloadJobs != null ? ("\ndownloadJobs {" + Arrays.toString(downloadJobs.values().toArray()) + "}") : "") + ")\n"
+ (procJobs != null ? ("\nprocJobs {" + Arrays.toString(procJobs.values().toArray()) + "}") : "") + ")\n"
+ (analysisJobs != null ? ("\nanalysisJobs {" + Arrays.toString(analysisJobs.toArray()) + "}") : "");
}
......
......@@ -35,7 +35,7 @@ import de.potsdam.gfz.gms.metadatacrawler.model.SceneMetadata;
*/
public class MetadatacrawlerModule {
private static final String INSERT_QUERY = "INSERT INTO scenes (entityid, datasetid, acquisitiondate, starttime, endtime, browseurl, dataaccessurl, downloadurl, metadataurl, modifieddate, orderurl, summary, bounds, satelliteid, sensorid, subsystemid, proc_level, cloudcover) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'METADATA', ?);";
private static final String INSERT_QUERY = "INSERT INTO scenes (entityid, datasetid, acquisitiondate, starttime, endtime, browseurl, dataaccessurl, downloadurl, metadataurl, modifieddate, orderurl, summary, bounds, satelliteid, sensorid, subsystemid, proc_level, cloudcover, day_acquisition) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'METADATA', ?, ?);";
@SuppressWarnings("unused")
public static void main(String[] args) throws Exception {
......@@ -62,6 +62,7 @@ public class MetadatacrawlerModule {
// aster_l1t.id = 189;
// aster_l1t.name = "ASTER_L1T";
// crawlerTasks.put(usgs, Arrays.asList(landsat7c1));