Commit 30f22c84 authored by Daniel Eggert's avatar Daniel Eggert
Browse files

removed hardcoded credentials

parents f89913a4 e86e2795
......@@ -47,7 +47,7 @@ public class CloudCoverUpdate {
public static void main(String[] args) throws Exception {
// landsat_tm(108), landsat_mss(107), landsat_8(104), landsat_7(112)
short datasetids[] = new short[] { 104, 112, 189, 250 }; // , 108, 107, 112 };
short datasetids[] = new short[] { 189 }; // , 108, 107, 112 };
// Timestamp start = Timestamp.MIN;
// Timestamp end = Timestamp.now();
// Timestamp end = start.plus(20, Timestamp.TimeUnits.DAYS);
......
......@@ -39,29 +39,34 @@ public class MetadatacrawlerModule {
public static void main(String[] args) throws Exception {
Map<MetadataCrawler, List<DatasetMetadata>> crawlerTasks = new HashMap<>();
// crawl landsat 7 and 8 from usgs
//
// // crawl landsat 7 and 8 from usgs
MetadataCrawler usgs = new UsgsCrawler();
// for(DatasetMetadata dm : usgs.getAvailableDatasets(new Timestamp(2017), new Timestamp(2018))) {
// if(dm.name.equals("LANDSAT_8_C1")) {
// System.out.println(dm.name + " \t " + dm.description + " \t " + dm.imageType + " \t ");
// }
// }
// printProvidedDatasets(usgs);
DatasetMetadata landsat7 = new DatasetMetadata();
landsat7.id = 112;
landsat7.name = "LANDSAT_ETM_SLC_OFF";
DatasetMetadata landsat8 = new DatasetMetadata();
landsat8.id = 104;
landsat8.name = "LANDSAT_8";
DatasetMetadata aster_l1t = new DatasetMetadata();
aster_l1t.id = 189;
aster_l1t.name = "ASTER_L1T";
// crawlerTasks.put(usgs, Arrays.asList(landsat7, landsat8));
crawlerTasks.put(usgs, Arrays.asList(aster_l1t));
// DatasetMetadata landsat7 = new DatasetMetadata();
// landsat7.id = 112;
// landsat7.name = "LANDSAT_ETM_SLC_OFF";
DatasetMetadata landsat8c1 = new DatasetMetadata();
landsat8c1.id = 250;
landsat8c1.name = "LANDSAT_8_C1";
// DatasetMetadata aster_l1t = new DatasetMetadata();
// aster_l1t.id = 189;
// aster_l1t.name = "ASTER_L1T";
crawlerTasks.put(usgs, Arrays.asList(landsat8c1));
// crawlerTasks.put(usgs, Arrays.asList(aster_l1t, landsat7, landsat8));
// crawl sentinel2 from esa' scientific data hub
MetadataCrawler scihub = new ScientificDataHubCrawler();
DatasetMetadata s2 = new DatasetMetadata();
s2.id = 249;
s2.name = "Sentinel-2";
crawlerTasks.put(scihub, Arrays.asList(s2));
// crawlerTasks.put(scihub, Arrays.asList(s2));
final boolean queryLatestOnly = true;
......@@ -72,7 +77,7 @@ public class MetadatacrawlerModule {
// crawl datasets
for (DatasetMetadata dataset : datasets) {
crawlDataset(dataset, crawler, queryLatestOnly, null);
crawlDataset(dataset, crawler, queryLatestOnly, null);// new Timestamp(2017, 5, 1));
}
}
}
......@@ -129,12 +134,11 @@ public class MetadatacrawlerModule {
BlockingQueue<SceneMetadata> queue = new LinkedBlockingQueue<>();
Timestamp start = null;
Timestamp end = null;
Timestamp end = Timestamp.now().plus(1, Timestamp.TimeUnits.DAYS);
// there are scenes query later ones
if (queryLatestOnly && !latestDateInDB.equals(Timestamp.MIN)) {
start = latestDateInDB;
end = Timestamp.now().plus(1, Timestamp.TimeUnits.DAYS);
}
if (startOverride != null) {
......@@ -168,6 +172,7 @@ public class MetadatacrawlerModule {
if (scene.satelliteid == 0) {
crawler.cancelCrawling();
dbScenes.clear();
throw new IllegalArgumentException("The scenes have to be tagged with the satelliteid - update crawler class \"" + crawler.getClass().getSimpleName()
+ "\" to support crawling of dataset \"" + dataset.name + "\"");
}
......@@ -210,21 +215,36 @@ public class MetadatacrawlerModule {
}
}
dbScenes.clear();
System.out.println("inserted " + count + " scenes into database");
db.commit();
// update dataset count
ResultSet rs = db.placeCustomQuery("SELECT count(id) from scenes where datasetid = " + dataset.id + ";");
int totalScenes = 0;
ResultSet rs = db.placeCustomQuery("SELECT count(id), min(acquisitiondate), max(acquisitiondate) from scenes where datasetid = " + dataset.id + ";");
long totalScenes = 0;
java.sql.Timestamp maxDate = null;
java.sql.Timestamp minDate = null;
if (rs.next()) {
totalScenes = rs.getInt(1);
db.prepareCustomStatement("UPDATE datasets SET totalscenes = " + totalScenes + " where id = " + dataset.id + ";").executeUpdate();
totalScenes = rs.getLong(1);
minDate = rs.getTimestamp(2);
maxDate = rs.getTimestamp(3);
PreparedStatement dsUpPst = db.prepareCustomStatement("UPDATE datasets SET totalscenes = ?, startdate = ?, enddate = ? where id = ?;");
dsUpPst.setLong(1, totalScenes);
dsUpPst.setTimestamp(2, minDate);
dsUpPst.setTimestamp(3, maxDate);
dsUpPst.setShort(4, dataset.id);
dsUpPst.executeUpdate();
db.commit();
}
System.out.println("\ndataset: " + dataset.name);
System.out.println("crawler: " + crawler.getClass().getSimpleName());
System.out.println("total scenes in DB: " + totalScenes);
System.out.println("date range in DB: " + minDate.toString() + " TO " + maxDate.toString());
System.out.println("********** Crawling finished **********\n");
}
......
......@@ -102,8 +102,9 @@ public abstract class MetadataCrawler {
scene.sensorid = 9; // ASTER
scene.subsystemid = 0; // null
break;
case 250:
case 104:
// LANDSAT_8
// LANDSAT_8 and LANDSAT_8_C1
scene.satelliteid = 7; // Landsat-8
scene.subsystemid = 0; // null
......
/**
*
*/
package de.potsdam.gfz.scihubapi;
/**
* Hardcoded credentials for the gms_downloader scihub account
*
* @author Daniel Eggert (daniel.eggert@gfz-potsdam.de)
*
*/
public interface SciHubCredentials {
// credentials
public static final String USERNAME = "gms_downloader";
public static final String PASSWORD = "gms_downloader";
}
......@@ -24,7 +24,6 @@ import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import de.potsdam.gfz.scihubapi.SciHubCredentials;
import de.potsdam.gfz.scihubapi.UrlUtils;
/**
......@@ -327,7 +326,7 @@ public class SciHubOpenData {
}
public static void main(String[] args) {
SciHubOpenData sciHubData = new SciHubOpenData(SciHubCredentials.USERNAME, SciHubCredentials.PASSWORD);
SciHubOpenData sciHubData = new SciHubOpenData("username", "password");
System.out.println("SCI_HUB_GRANULE LIST");
// final String uuid = "283adccc-f786-42d8-a4b0-c30739a71238";
......
......@@ -26,7 +26,6 @@ import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import de.potsdam.gfz.scihubapi.SciHubCredentials;
import de.potsdam.gfz.scihubapi.UrlUtils;
/**
......@@ -204,7 +203,7 @@ public class SciHubSearch {
BlockingQueue<SearchResultEntry> resultQueue = new LinkedBlockingQueue<>();
SciHubSearch sciHub = new SciHubSearch(SciHubCredentials.USERNAME, SciHubCredentials.PASSWORD);
SciHubSearch sciHub = new SciHubSearch("username", "password");
// place query
sciHub.search(query, resultQueue);
......
/**
*
*/
package de.potsdam.gfz.usgsapi;
/**
* Hardcoded credentials for the eggert usgs account
*
* @author Daniel Eggert (daniel.eggert@gfz-potsdam.de)
*
*/
public interface UsgsCredentials {
// credentials
public static final String USERNAME = "eggert";
public static final String PASSWORD = "|R8o.Yeop3#";
}
......@@ -10,7 +10,6 @@ import java.net.HttpURLConnection;
import java.net.URL;
import java.util.List;
import de.potsdam.gfz.usgsapi.UsgsCredentials;
import de.potsdam.gfz.usgsapi.json.datamodels.InventoryScene;
import de.potsdam.gfz.usgsapi.json.datamodels.SearchFilter;
import de.potsdam.gfz.usgsapi.json.datamodels.SearchFilter.Or;
......@@ -34,12 +33,12 @@ import net.minidev.json.parser.ParseException;
*/
public class UsgsApi {
private static final String URL = "https://earthexplorer.usgs.gov/inventory/json/";
private static final String URL = "https://earthexplorer.usgs.gov/inventory/json/";
/**
* the received api key will be invalidated after one hour
*/
public static final long APIKEY_VALIDATION_TIMEOUT = 60 * 60 * 1000; // 1 hour
public static final long APIKEY_VALIDATION_TIMEOUT = 60 * 60 * 1000; // 1 hour
/**
* Example requests
......@@ -51,7 +50,7 @@ public class UsgsApi {
String apiKey = null;
System.out.println("submitting login request");
JSONResponse response = sendRequest(new LoginRequest(UsgsCredentials.USERNAME, UsgsCredentials.PASSWORD));
JSONResponse response = sendRequest(new LoginRequest("paste_your_username_here", "paste_your_password_here"));
System.out.println(response.getClass().getSimpleName());
System.out.println("hasError: " + response.hasError());
if (response.hasError()) {
......@@ -187,7 +186,7 @@ public class UsgsApi {
} else {
url += "?jsonRequest=" + request.getDataAsJSON();
// System.out.println("submitting request: " + url);
// System.out.println("submitting request: " + url);
con = (HttpURLConnection) new URL(url).openConnection();
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment