Commit 03794e16 authored by Daniel Eggert's avatar Daniel Eggert
Browse files

rebranded 'downloadStats' to more general 'statistics' field, also used

by processing jobs
parent a2f6703f
......@@ -756,8 +756,8 @@ public class SceneDatabase {
* @param polygon
* @return
*/
public void getCompactSceneCountFromIndex(final Timestamp start, final Timestamp end, final int seasonCode, final byte maxCloudcover,
final double[] polygon, ByteBuffer buffer) {
public void getCompactSceneCountFromIndex(final Timestamp start, final Timestamp end, final int seasonCode, final byte maxCloudcover, final double[] polygon,
ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactSceneCount(start, end, polygon, buffer);
......@@ -848,8 +848,8 @@ public class SceneDatabase {
* [long] timestamp<br>
* [byte] cloudcover
*/
public void getCompactSceneMetaDataListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode,
final byte minCloudcover, final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
public void getCompactSceneMetaDataListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactScenes(datasetid, start, end, polygon)
......@@ -869,8 +869,8 @@ public class SceneDatabase {
/**
* Stores a compact representation of the resulting scene query in the given buffer. Here only the scene bounds are stored in the buffer.
*/
public void getCompactSceneBoundsListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode,
final byte minCloudcover, final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
public void getCompactSceneBoundsListFromIndex(final short datasetid, final Timestamp start, final Timestamp end, final int seasonCode, final byte minCloudcover,
final byte maxCloudcover, final double[] polygon, ByteBuffer buffer) {
if (spatialIndex == null) {
// no spatial index - use the database
// getCompactScenes(datasetid, start, end, polygon)
......@@ -1095,8 +1095,7 @@ public class SceneDatabase {
if (datasetid != 249 && affectedRows > 1) {
throw new IllegalStateException("More than one row (" + affectedRows + ") was affected by the filename update");
} else if (affectedRows == 0) {
throw new IllegalArgumentException(
"The update did not affect any row, wrong sceneid? (" + sceneid + " | " + filename + " | " + proc_level.name() + ")");
throw new IllegalArgumentException("The update did not affect any row, wrong sceneid? (" + sceneid + " | " + filename + " | " + proc_level.name() + ")");
}
} catch (SQLException e) {
e.printStackTrace();
......@@ -1390,9 +1389,14 @@ public class SceneDatabase {
pst.setNull(++idx, Types.VARCHAR);
}
Integer[] ary = new Integer[] { 0, 0, 0 };
if (job.downloadStats != null && job.downloadStats.length > 0) {
ary[0] = job.downloadStats[0];
Integer[] ary;
if (job.statistics != null) {
ary = new Integer[job.statistics.length];
if (job.statistics.length > 0) {
ary[0] = job.statistics[0];
}
} else {
ary = new Integer[] { 0, 0, 0 };
}
pst.setArray(++idx, con.createArrayOf("integer", ary));
......@@ -1606,13 +1610,13 @@ public class SceneDatabase {
job.pathAnalysisScript = rs.getString("path_analysis_script");
job.analysisParameter = rs.getString("analysis_parameter");
ary = rs.getArray("download_stats");
ary = rs.getArray("statistics");
if (ary != null) {
Object array = ary.getArray();
if (array instanceof Integer[]) {
job.downloadStats = DatatypeConv.toPrimitivArray((Integer[]) array);
job.statistics = DatatypeConv.toPrimitivArray((Integer[]) array);
} else {
throw new RuntimeException("Unexpected return type while processing download_stats");
throw new RuntimeException("Unexpected return type while processing statistics");
}
}
job.mode = DB2JavaEnums.convertToJobMode(rs.getString("job_mode"));
......@@ -1865,10 +1869,10 @@ public class SceneDatabase {
}
// set download stats
if (job.downloadStats != null) {
Integer[] ary = new Integer[job.downloadStats.length];
for (int i = 0; i < job.downloadStats.length; ++i) {
ary[i] = job.downloadStats[i];
if (job.statistics != null) {
Integer[] ary = new Integer[job.statistics.length];
for (int i = 0; i < job.statistics.length; ++i) {
ary[i] = job.statistics[i];
}
pst.setArray(3, con.createArrayOf("integer", ary));
} else {
......
......@@ -47,7 +47,7 @@ public interface SqlStatements {
public static final String GET_PROC_LEVEL_FOR_JOB_SCENES_QUERY = "select scenes.datasetid, scenes.id, scenes.proc_level from scenes, jobs where jobs.id=? AND scenes.id = ANY(jobs.sceneids);";
public static final String CREATE_SCENE_PROCESSING_JOB = "INSERT INTO jobs (creationtime, timerange_start, timerange_end, bounds, sceneids, virtualsensorid, datasetid_spatial_ref, ref_job_id, non_ref_datasetids, max_cloudcover, season_code, path_analysis_script, analysis_parameter, download_stats, job_mode) VALUES (CURRENT_TIMESTAMP, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?::job_mode) RETURNING id;";
public static final String CREATE_SCENE_PROCESSING_JOB = "INSERT INTO jobs (creationtime, timerange_start, timerange_end, bounds, sceneids, virtualsensorid, datasetid_spatial_ref, ref_job_id, non_ref_datasetids, max_cloudcover, season_code, path_analysis_script, analysis_parameter, statistics, job_mode) VALUES (CURRENT_TIMESTAMP, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?::job_mode) RETURNING id;";
public static final String CREATE_VIRTUAL_SENSOR = "INSERT INTO virtual_sensors (name, spatial_resolution, spectral_characteristics_datasetid, wavelengths_pos, band_width) VALUES (?, ?, ?, ?, ?) RETURNING id;";
......@@ -65,9 +65,9 @@ public interface SqlStatements {
public static final String UPDATE_JOB_STATUS_QUERY = "UPDATE jobs SET status=?::job_status WHERE id=?;";
public static final String UPDATE_JOB_COMMENT_QUERY = "UPDATE jobs SET comment=? WHERE id=?;";
public static final String UPDATE_JOB_PROGRESS_QUERY = "UPDATE jobs SET progress=? WHERE id=?;";
public static final String UPDATE_INC_JOB_DOWNLOAD_STATS = "UPDATE jobs SET download_stats[?] = download_stats[?]+1 WHERE id=? AND download_stats is not NULL RETURNING download_stats;";
public static final String UPDATE_JOB_DOWNLOAD_STATS = "UPDATE jobs SET download_stats = ? WHERE id=?;";
public static final String UPDATE_FINISHED_JOB_QUERY = "UPDATE jobs SET finishtime=CURRENT_TIMESTAMP, status=?::job_status, comment=?, download_stats=?, failed_sceneids=? WHERE id=? RETURNING finishtime;";
public static final String UPDATE_INC_JOB_DOWNLOAD_STATS = "UPDATE jobs SET statistics[?] = statistics[?]+1 WHERE id=? AND statistics is not NULL RETURNING statistics;";
public static final String UPDATE_JOB_DOWNLOAD_STATS = "UPDATE jobs SET statistics = ? WHERE id=?;";
public static final String UPDATE_FINISHED_JOB_QUERY = "UPDATE jobs SET finishtime=CURRENT_TIMESTAMP, status=?::job_status, comment=?, statistics=?, failed_sceneids=? WHERE id=? RETURNING finishtime;";
public static final String JOB_COMMENT_QUERY = "SELECT comment FROM jobs WHERE id=?;";
/**
......
......@@ -94,7 +94,7 @@ public class ProcessingJob {
* [1] number of scenes successfully downloaded<br>
* [2] number of scenes failed to download
*/
public int[] downloadStats;
public int[] statistics;
public job_mode mode;
public Map<Short, List<ProcessingJob>> downloadJobs;
......@@ -191,7 +191,7 @@ public class ProcessingJob {
// subJob.pathAnalysisScript = pathAnalysisScript;
// subJob.analysisParameter = analysisParameter;
downloadJob.mode = job_mode.download_only;
downloadJob.downloadStats = new int[] { sceneids.length, 0, 0 };
downloadJob.statistics = new int[] { sceneids.length, 0, 0 };
// set refjob id, if we have a valid one
if (isValidID(id)) {
......@@ -238,7 +238,7 @@ public class ProcessingJob {
procJob.seasonCode = seasonCode;
procJob.status = job_status.pending;
procJob.mode = job_mode.processing_only;
procJob.downloadStats = null;
procJob.statistics = new int[] { sceneids.length, 0, 0, 0, 0, 0, 0, 0 };
// set refjob id, if we have a valid one
if (isValidID(id)) {
......@@ -299,7 +299,7 @@ public class ProcessingJob {
analysisJob.seasonCode = seasonCode;
analysisJob.status = job_status.pending;
analysisJob.mode = job_mode.analysis_only;
analysisJob.downloadStats = null;
analysisJob.statistics = null;
analysisJob.analysisParameter = this.analysisParameter;
analysisJob.pathAnalysisScript = this.pathAnalysisScript;
......@@ -468,9 +468,9 @@ public class ProcessingJob {
}
public boolean areDownloadsFinished() {
if (downloadStats != null) {
if (statistics != null) {
// if successful and failed downloads add up to total number of downloads -- all downloads are finished
return downloadStats[0] == downloadStats[1] + downloadStats[2];
return statistics[0] == statistics[1] + statistics[2];
} else {
return false;
}
......
......@@ -37,12 +37,12 @@ public class TestProcessingJobHandling {
job.maxCloudcover = 14;
job.seasonCode = 1 + 2 + 4 + 8 + 16 + 32; // jan-jun
job.virtualsensorid = 1;
job.downloadStats = new int[] { 100, 0, 0 };
job.statistics = new int[] { 100, 0, 0 };
job.mode = job_mode.download_only;
// sub job
ProcessingJob subJob = job.initDownloadJobInstance(job.datasetidSpatialRef, new Long[] { 15616441l, 15616302l });
subJob.downloadStats = new int[] { 2, 0, 0 };
subJob.statistics = new int[] { 2, 0, 0 };
db.createProcessJob(job);
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment