Skip to content

Commit

Permalink
Merge pull request #20 from ROBelgium/bugfix_februari2014
Browse files Browse the repository at this point in the history
Bugfix - Removing implicit castings - better obspy filtering
  • Loading branch information
ThomasLecocq committed Feb 25, 2014
2 parents a0be3e2 + b550b5c commit 3e5f4d8
Show file tree
Hide file tree
Showing 6 changed files with 19 additions and 26 deletions.
6 changes: 3 additions & 3 deletions MWCS.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,8 @@ def mwcs(ccCurrent, ccReference, fmin, fmax, sampRate, tmin, windL, step):
cri -= np.mean(cri)
cri *= tp

Fcur = scipy.fftpack.fft(cci, n=int(padd))[:padd/2]
Fref = scipy.fftpack.fft(cri, n=int(padd))[:padd/2]
Fcur = scipy.fftpack.fft(cci, n=int(padd))[:int(padd)/2]
Fref = scipy.fftpack.fft(cri, n=int(padd))[:int(padd)/2]

Fcur2 = np.real(Fcur)**2 + np.imag(Fcur)**2
Fref2 = np.real(Fref)**2 + np.imag(Fref)**2
Expand All @@ -111,7 +111,7 @@ def mwcs(ccCurrent, ccReference, fmin, fmax, sampRate, tmin, windL, step):
dcs = np.abs(X)

#Find the values the frequency range of interest
freqVec = scipy.fftpack.fftfreq(len(X)*2, 1./sampRate)[:padd/2]
freqVec = scipy.fftpack.fftfreq(len(X)*2, 1./sampRate)[:int(padd)/2]
indRange = np.argwhere(np.logical_and(freqVec >= fmin,
freqVec <= fmax))

Expand Down
2 changes: 1 addition & 1 deletion database_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def get_data_availability(session, net=None, sta=None, comp=None, starttime=None
if not starttime:
data = session.query(DataAvailability).filter(DataAvailability.net == net).filter(DataAvailability.sta == sta).filter(DataAvailability.comp == comp).all()
if not net:
data = session.query(DataAvailability).filter(func.DATE(DataAvailability.starttime) <= starttime.date()).filter(func.DATE(DataAvailability.endtime) >= endtime.date()).all()
data = session.query(DataAvailability).filter(func.DATE(DataAvailability.starttime) <= endtime).filter(func.DATE(DataAvailability.endtime) >= starttime).all()
else:
data = session.query(DataAvailability).filter(DataAvailability.net == net).filter(DataAvailability.sta == sta).filter(func.DATE(DataAvailability.starttime) <= starttime.date()).filter(func.DATE(DataAvailability.endtime) >= endtime.date()).all()
return data
Expand Down
2 changes: 1 addition & 1 deletion myCorr.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def myCorr(data, maxlag, plot=False):
normFact = E[0] * E[1]

if normalized:
corr /= normFact
corr /= np.real(normFact)

if maxlag != Nt:
tcorr = np.arange(-Nt + 1, Nt)
Expand Down
7 changes: 3 additions & 4 deletions plot_dataavailability.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from tmp_database_tools import *
from database_tools import *
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.dates import date2num
Expand All @@ -15,9 +15,8 @@
# print day
data = get_data_availability(db,starttime=day, endtime=day)
for di in data:
net, sta, comp, starttime, endtime, data_duration, gaps_duration, samplerate, flag = di
stations.append("%s.%s"%(net,sta))
dates.append(starttime)
stations.append("%s.%s"%(di.net,di.sta))
dates.append(di.starttime)

data = pd.DataFrame({"stations":stations},index=dates)
data = data.groupby('stations')
Expand Down
2 changes: 1 addition & 1 deletion s01scan_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def worker(s, pool):
r0 = time.time()
source = Fname
name = os.path.split(source)[1]
data = read(source, quality=False)
data = read(source)
# print data
if data[0].stats.starttime.date < startdate:
r2 = time.time()
Expand Down
26 changes: 10 additions & 16 deletions s03compute_cc.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,10 +207,8 @@
for trace in stream:
data = trace.data
if len(data) > 2:
tp = cosTaper(len(data), 0.01)
data -= np.mean(data)
data *= tp
trace.data = data
trace.detrend("demean")
trace.taper(0.01)
else:
trace.data *= 0
del data
Expand All @@ -224,19 +222,15 @@
goal_day.replace('-', '')) + goal_duration - stream[0].stats.delta, pad=True, fill_value=0.0)
trace = stream[0]

data = trace.data
freq = preprocess_lowpass
logging.debug(
"%s.%s Lowpass at %.2f Hz" % (station, comp, freq))
data = lowpass(
trace.data, freq, trace.stats.sampling_rate, zerophase=True)
"%s.%s Lowpass at %.2f Hz" % (station, comp, preprocess_lowpass))
trace.filter("lowpass", freq=preprocess_lowpass, zerophase=True)

freq = preprocess_highpass
logging.debug(
"%s.%s Highpass at %.2f Hz" % (station, comp, freq))
data = highpass(
data, freq, trace.stats.sampling_rate, zerophase=True)

"%s.%s Highpass at %.2f Hz" % (station, comp, preprocess_highpass))
trace.filter("highpass", freq=preprocess_highpass, zerophase=True)

data = trace.data
samplerate = trace.stats['sampling_rate']
if samplerate != goal_sampling_rate:
if resampling_method == "Resample":
Expand Down Expand Up @@ -374,7 +368,7 @@

for itranche in range(0, tranches):
# print "Avancement: %#2d/%2d"% (itranche+1,tranches)
trame2h = trames[:, itranche * min30:(itranche + 1) * min30]
trame2h = trames[:, itranche * int(min30):(itranche + 1) * int(min30)]
rmsmat = np.std(np.abs(trame2h), axis=1)
for filterdb in get_filters(db, all=False):
filterid = filterdb.ref
Expand All @@ -390,7 +384,7 @@
if min30 / 2 % 2 != 0:
Nfft = min30 + 2

trames2hWb = np.zeros((2, Nfft), dtype=np.complex)
trames2hWb = np.zeros((2, int(Nfft)), dtype=np.complex)
for i, station in enumerate(pair):
# print "USING rms threshold = %f" % rms_threshold
# logging.debug("rmsmat[i] = %f" % rmsmat[i])
Expand Down

0 comments on commit 3e5f4d8

Please sign in to comment.