Commit eb9510fe authored by Leonie Pick's avatar Leonie Pick

Update prior to submission.

parent c92b7cc3
This diff is collapsed.
......@@ -7,6 +7,7 @@ import pandas as pd
import datetime as dt
import itertools
import time as time
import joblib
# IPython
from IPython.display import display
# Matplotlib
......
......@@ -132,12 +132,12 @@ def Search_TargetEvents(HMC, HMC11y, HMC5d, dHMC, HTime, DTime, grid, Save):
return IndexMin2
###
###
def Get_TargetEvents(HMC, HMC11y, HMC5d, dHMC, Kp_all, KpHours_all, Training, Time, Date, YearStart, Save):
def Get_TargetEvents(HMC, HMC11y, HMC5d, dHMC, Kp_all, KpHours_all, Reference, Time, Date, YearStart, Save):
YearsIndex = np.where(Time[:,0] >= YearStart)[0]
TrClass = Training[:,0]
TrTimeIndex = Training[:,1]
TrFound = Training[:,2]
TrClass = Reference[:,0]
TrTimeIndex = Reference[:,1]
TrFound = Reference[:,2]
CIRs = np.where(np.logical_or(TrClass == 0, TrClass == 2))[0]
CMEs = np.where(TrClass==1)[0]
......@@ -179,19 +179,19 @@ def Get_TargetEvents(HMC, HMC11y, HMC5d, dHMC, Kp_all, KpHours_all, Training, Ti
TrFound[CIRIndices] = 1
TrFound[CMEIndices] = 1
Training[:,2] = TrFound
Reference[:,2] = TrFound
#print(grid[IndexWin,:])
#if Save == True:
#np.save('./Dump/Events',StormsWin)
###
TargetResults = {'No. targets':len(StormsWin),'Fraction [%]':len(StormsWin)*100/len(HMC[YearsIndex])}
TargetResults = {'No. target events':len(StormsWin),'Fraction [%]':len(StormsWin)*100/len(HMC[YearsIndex])}
TrainingResults = [[len(CIRs), len(CMEs), len(CIRs)+len(CMEs)],[len(Found_CIRs),len(Found_CMEs),len(Found_CIRs)+len(Found_CMEs)]]
SelectionParams = {'HMC percentile':grid[IndexWin,0],'Scaling power':grid[IndexWin,1], 'Separation [h]':grid[IndexWin,2], 'Min. HMC drop [nT]':grid[IndexWin,3]}
SelectionParams = {r'$P_n [nT]$':grid[:,0],r'$p_{sc}$':grid[:,1], r'$\Delta t$ [h]':grid[:,2], r' $Hs$ [nT]':-grid[:,3]}
display(pd.DataFrame(data=SelectionParams,index=['Selection parameters']))
display(pd.DataFrame(data=TargetResults,index=['Selection result']))
display(pd.DataFrame(data=TrainingResults,columns=['No. CIRs','No. CMEs', 'Total'], index=['Training set', 'in Target set']))
display(pd.DataFrame(data=TrainingResults,columns=['No. CIRs','No. CMEs', 'Total'], index=['Reference set', 'Training set']))
###
pl.IndexDist(Time,YearsIndex,StormsWin,Kp_all,KpHours_all,HMC,Save)
......@@ -324,7 +324,7 @@ def Get_F11(i,TrTimeIndex,TrClass):
return F11
###
###
def Get_Features(Time, Storms, Training, HMC, HMC11y, HMC1y, dHMC, B_MLT, ASY, Save):
def Get_Features(Time, Storms, Reference, HMC, HMC11y, HMC1y, dHMC, B_MLT, ASY, Save):
PeakTimes = Storms; PeakIndices = np.arange(0,len(Storms),1)
......@@ -342,8 +342,8 @@ def Get_Features(Time, Storms, Training, HMC, HMC11y, HMC1y, dHMC, B_MLT, ASY, S
dAsyDD = np.gradient(ASY[:,0],1,edge_order=1)
dHMC11y = np.gradient(HMC11y,1,edge_order=1)
TrTimeIndex = Training[:,1]#Training.sel(Properties='TimeIndex')
TrClass = Training[:,0]#Training.sel(Properties='Class')
TrTimeIndex = Reference[:,1]#Reference.sel(Properties='TimeIndex')
TrClass = Reference[:,0]#Reference.sel(Properties='Class')
excludeEvents = []
count1=0; count2=0
......@@ -406,11 +406,11 @@ def Get_Features(Time, Storms, Training, HMC, HMC11y, HMC1y, dHMC, B_MLT, ASY, S
return PeakIndices, FeatureMatrix
###
###
def Get_Diagnostics(FeatureMatrix,Storms,Training,Save):
def Get_Diagnostics(FeatureMatrix,Storms,Reference,Save):
TrClass = Training[:,0]
TrTimeIndex = Training[:,1]
TrKnown = Training[:,2]
TrClass = Reference[:,0]
TrTimeIndex = Reference[:,1]
TrKnown = Reference[:,2]
n_features = FeatureMatrix.shape[1]
n_classes = len(np.unique(TrClass))
......@@ -456,7 +456,7 @@ def Get_Diagnostics(FeatureMatrix,Storms,Training,Save):
k += 1
###
display(pd.DataFrame(data=FeatureResults,index=np.linspace(1,12,12,dtype=int),columns=['Q2 CIRs','Q2 CMEs','Q2 Difference','IQR Overlap', 'Q2 Difference- IQR Overlap']))
display(pd.DataFrame(data=FeatureResults,index=np.linspace(1,12,12,dtype=int),columns=['Q2 CIRs','Q2 CMEs','Q2 Difference','IQR Overlap', 'Q2 Difference - IQR Overlap']))
###
pl.Diagnostics(n_features, n_classes, NData, Save)
......
This diff is collapsed.
# ClassifyStorms - An automatic classifier for geomagnetic storm drivers based on machine learning techniques
# ClassifyStorms - an automatic classifier for geomagnetic storm drivers based on machine learning techniques
### Author
Leonie Pick, GFZ German Research Centre for Geoscieces, leonie.pick@gfz-potsdam.de
......@@ -32,7 +32,7 @@ executed by a supervised binary logistic regression model in the framework of py
### How to run ClassifyStorms?
Download the GitLab project 'ClassifyStorms' from http://gitext.gfz-potsdam.de/lpick/ClassifyStorms by clicking on the 'Download'
button (top right). Additionally, download 'Input.nc' from ftp://ftp.gfz-potsdam.de/home/mag/pick/ClassifyStorms and place it into
button (top right). Additionally, download 'Input.nc' from GFZ Data Services ( http://doi.org/10.5880/GFZ.2.3.2019.003 ) and place it into
the extracted directory 'ClassifyStorms-master'. Navigate to ClassifyStorms-master and start the jupyter server
(http://jupyter.org) by typing 'jupyter notebook' into the command line. This will open the jupyter 'Home' in your web
browser. Select 'ClassifyStorms.ipynb' from the list and run the notebook by clicking 'Run'.
......@@ -46,6 +46,6 @@ browser. Select 'ClassifyStorms.ipynb' from the list and run the notebook by cli
project's master branch (destination).
### References
+ The original version of this software is a supplement to Pick et al., GRL, 2019, submitted.
+ The original version of this software is a supplement to Pick et al., Earth and Space Science, 2019.
+ Details on the HMC index are given in Pick et al., JGR Space Physics, 2019
( http://doi.org/10.1029/2018JA026185, with data published under http://doi.org/10.5880/GFZ.2.3.2018.006 ).
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment