Commit 5606a6bc authored by Daniel Eggert's avatar Daniel Eggert
Browse files

applied changes addressing issues #2 and #3 of GeoMultiSens

parent ee930472
......@@ -757,8 +757,8 @@ public class SceneDatabase {
* @param polygon
* @return
*/
public void getCompactSceneCountFromIndex(final Timestamp start, final Timestamp end, final int seasonCode, final byte maxCloudcover, final double[] polygon,
ByteBuffer buffer) {
public void getCompactSceneCountFromIndex(final Timestamp start, final Timestamp end, final int seasonCode, final byte maxCloudcover,
final double[] polygon, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactSceneCount(start, end, polygon, buffer);
......@@ -849,8 +849,8 @@ public class SceneDatabase {
* [long] timestamp<br>
* [byte] cloudcover
*/
public void getCompactSceneMetaDataListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
public void getCompactSceneMetaDataListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode,
final byte minCloudcover, final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactScenes(datasetid, start, end, polygon)
......@@ -870,8 +870,8 @@ public class SceneDatabase {
/**
* Stores a compact representation of the resulting scene query in the given buffer. Here only the scene bounds are stored in the buffer.
*/
public void getCompactSceneBoundsListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
public void getCompactSceneBoundsListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode,
final byte minCloudcover, final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactScenes(datasetid, start, end, polygon)
......@@ -1096,7 +1096,8 @@ public class SceneDatabase {
if (datasetid != 249 && affectedRows > 1) {
throw new IllegalStateException("More than one row (" + affectedRows + ") was affected by the filename update");
} else if (affectedRows == 0) {
throw new IllegalArgumentException("The update did not affect any row, wrong sceneid? (" + sceneid + " | " + filename + " | " + proc_level.name() + ")");
throw new IllegalArgumentException(
"The update did not affect any row, wrong sceneid? (" + sceneid + " | " + filename + " | " + proc_level.name() + ")");
}
} catch (SQLException e) {
e.printStackTrace();
......@@ -1390,20 +1391,19 @@ public class SceneDatabase {
pst.setNull(++idx, Types.VARCHAR);
}
Integer[] ary;
if (job.statistics != null) {
ary = new Integer[job.statistics.length];
Arrays.fill(ary, 0);
if (job.statistics != null && job.statistics.length > 0) {
// convert to obj array
Integer[] ary = new Integer[job.statistics.length];
if (job.statistics.length > 0) {
ary[0] = job.statistics[0];
for (int i = 0; i < ary.length; ++i) {
ary[i] = job.statistics[i];
}
pst.setArray(++idx, con.createArrayOf("integer", ary));
} else {
ary = new Integer[] { 0, 0, 0 };
pst.setNull(++idx, Types.ARRAY);
}
pst.setArray(++idx, con.createArrayOf("integer", ary));
pst.setString(++idx, job.mode.toString());
ResultSet rs = pst.executeQuery();
......
......@@ -55,6 +55,10 @@ public class Enums {
* indicates an abandoned job, refers usually to job that is unfinished and not monitored by any controller instance
*/
abandoned,
/**
* indicates a failed job, e.g. due to unhandled exceptions
*/
failed,
/**
* invalid or unknown status
*/
......
......@@ -4,6 +4,7 @@
package de.potsdam.gfz.gms.database.shared;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
......@@ -56,4 +57,13 @@ public class LabeledHistogram {
return max;
}
public void clearZeroLabels() {
for(Iterator<Entry<String, Integer>> iter = histMap.entrySet().iterator(); iter.hasNext();) {
Entry<String, Integer> e = iter.next();
if(e.getValue() == 0) {
iter.remove();
}
}
}
}
......@@ -30,14 +30,23 @@ public class ProcessingJob {
// public static final int PROGRESS_ANALYSIS_FINISHED_WITH_WARNINGS = 1 << 17;
// public static final int PROGRESS_ANALYSIS_CANCELED = 1 << 18;
public static final int PROC_STAT_IDX_READY_TO_PROCESS = 0;
public static final int PROC_STAT_IDX_L1A = 1;
public static final int PROC_STAT_IDX_L1B = 2;
public static final int PROC_STAT_IDX_L1C = 3;
public static final int PROC_STAT_IDX_L2A = 4;
public static final int PROC_STAT_IDX_L2B = 5;
public static final int PROC_STAT_IDX_L2C = 6;
public static final int PROC_STAT_IDX_FAILED = 7;
/**
* in case the corresponding job instance is <code>null</code> for some reason
*/
public static final long ID_NULL_JOB = 0;
public static final long ID_NULL_JOB = 0;
/**
* in case the actual id has not been assigned yet
*/
public static final long ID_INVALID = -1;
public static final long ID_INVALID = -1;
public long id;
public Timestamp creationtime;
......@@ -211,6 +220,7 @@ public class ProcessingJob {
if (sceneids == null || sceneids.length == 0) {
throw new IllegalArgumentException("null or empty sceneids array provided - unable to init download job instance.");
}
// compile job
ProcessingJob procJob = new ProcessingJob();
procJob.id = ID_INVALID;
......@@ -441,24 +451,24 @@ public class ProcessingJob {
@Override
public String toString() {
return "[" + id + "] creationtime(" + creationtime + "), finishtime(" + finishtime + "),\n" + "sceneids(" + (sceneids != null ? Arrays.toString(sceneids) : "null") + "),\n"
+ "timerangeStart(" + timerangeStart + "), timerangeEnd(" + timerangeEnd + "), bounds(" + Arrays.toString(bounds) + "),\n" + "distributionIndex("
+ distributionIndex + "), progress(" + progress + "), feedback(" + feedback + "),\n" + "failedSceneIds("
+ (failedSceneids != null ? Arrays.toString(failedSceneids) : "null") + "),\n" + "datasetidSpatialRef(" + datasetidSpatialRef + "), virtualsensorid("
+ virtualsensorid + "), refJobId(" + refJobId + "),\n" + "datacubeMgrsTilesProc("
return "[" + id + "] creationtime(" + creationtime + "), finishtime(" + finishtime + "),\n" + "sceneids("
+ (sceneids != null ? Arrays.toString(sceneids) : "null") + "),\n" + "timerangeStart(" + timerangeStart + "), timerangeEnd(" + timerangeEnd
+ "), bounds(" + Arrays.toString(bounds) + "),\n" + "distributionIndex(" + distributionIndex + "), progress(" + progress + "), feedback("
+ feedback + "),\n" + "failedSceneIds(" + (failedSceneids != null ? Arrays.toString(failedSceneids) : "null") + "),\n" + "datasetidSpatialRef("
+ datasetidSpatialRef + "), virtualsensorid(" + virtualsensorid + "), refJobId(" + refJobId + "),\n" + "datacubeMgrsTilesProc("
+ (datacubeMgrsTilesProc != null ? Arrays.toString(datacubeMgrsTilesProc) : "null") + "),\n" + "nonRefDatasetIds("
+ (nonRefDatasetIds != null ? Arrays.toString(nonRefDatasetIds) : "null") + "),\n" + "comment(" + comment + "), maxCloudcover(" + maxCloudcover + "), seasonCode("
+ seasonCode + "),\n" + "status(" + status + "),\n" + "pathAnalysisScript(" + pathAnalysisScript + "), analysisParameter(" + analysisParameter + "), mode(" + mode
+ ")\n" + (downloadJobs != null ? ("\ndownloadJobs {" + Arrays.toString(downloadJobs.values().toArray()) + "}") : "") + ")\n"
+ (nonRefDatasetIds != null ? Arrays.toString(nonRefDatasetIds) : "null") + "),\n" + "comment(" + comment + "), maxCloudcover(" + maxCloudcover
+ "), seasonCode(" + seasonCode + "),\n" + "status(" + status + "),\n" + "pathAnalysisScript(" + pathAnalysisScript + "), analysisParameter("
+ analysisParameter + "), mode(" + mode + ")\n"
+ (downloadJobs != null ? ("\ndownloadJobs {" + Arrays.toString(downloadJobs.values().toArray()) + "}") : "") + ")\n"
+ (procJobs != null ? ("\nprocJobs {" + Arrays.toString(procJobs.values().toArray()) + "}") : "") + ")\n"
+ (analysisJobs != null ? ("\nanalysisJobs {" + Arrays.toString(analysisJobs.toArray()) + "}") : "");
}
/**
* A given ID is valid if its a positive long. Note: a negative number corresponds to an invalid id, which usually has yet to be
* assigned, <br>
* while 0 (zero) corresponds to a <code>null</code> object which might be returned by some methods to indicate an error an non-existing
* job entry in the database.
* A given ID is valid if its a positive long. Note: a negative number corresponds to an invalid id, which usually has yet to be assigned, <br>
* while 0 (zero) corresponds to a <code>null</code> object which might be returned by some methods to indicate an error an non-existing job entry in the
* database.
*
* @param jobid
* @return
......@@ -481,7 +491,8 @@ public class ProcessingJob {
}
public boolean involvesProcessing() {
return mode == job_mode.download_and_processing || mode == job_mode.processing_only || mode == job_mode.processing_and_analysis || mode == job_mode.full;
return mode == job_mode.download_and_processing || mode == job_mode.processing_only || mode == job_mode.processing_and_analysis
|| mode == job_mode.full;
}
public boolean involvesAnalysis() {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment