Skip to content

Commit

Permalink
getting rid of print() statements
Browse files Browse the repository at this point in the history
  • Loading branch information
ThomasLecocq committed May 26, 2023
1 parent d74ddd4 commit b6234b8
Show file tree
Hide file tree
Showing 20 changed files with 123 additions and 125 deletions.
49 changes: 23 additions & 26 deletions msnoise/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,17 +556,17 @@ def check_stations_uniqueness(session, station):
if station.count(".") == 2:
return station

print("It seems you're voluntarily missing the location code for"
" \"%s\". We'll handle this automatically, if there are no "
"conflicts." % station)
logging.info("It seems you're voluntarily missing the location code for"
" \"%s\". We'll handle this automatically, if there are no "
"conflicts." % station)
net, sta = station.split(".")
locs = get_station(session, net, sta).locs()
if len(locs) != 1:
print("There are more than 1 location codes for this station: "
"%s" % locs)
logging.info("There are more than 1 location codes for this station: "
"%s" % locs)
return station
station += ".%s" % locs[0]
print("Found %s to be the unique solution for this station" % station)
logging.info("Found %s to be the unique solution for this station" % station)
return station


Expand Down Expand Up @@ -920,7 +920,7 @@ def get_next_job(session, flag='T', jobtype='CC', limit=99999):
filter(Job.jobtype == jobtype).
filter(Job.flag == flag).first().day).\
limit(limit).with_for_update()
# print(jobs.statement.compile(compile_kwargs={"literal_binds": True}))

tmp = jobs.all()
refs = [_.ref for _ in tmp]
q = update(Job).values({"flag":"I"}).where(Job.ref.in_(refs))
Expand All @@ -937,7 +937,7 @@ def get_dvv_jobs(session, flag='T', jobtype='DVV', limit=99999):
jobs = session.query(Job).filter(Job.jobtype == jobtype). \
filter(Job.flag == flag). \
limit(limit).with_for_update()
# print(jobs.statement.compile(compile_kwargs={"literal_binds": True}))

tmp = jobs.all()
refs = [_.ref for _ in tmp]
q = update(Job).values({"flag": "I"}).where(Job.ref.in_(refs))
Expand Down Expand Up @@ -1448,7 +1448,7 @@ def get_results(session, station1, station2, filterid, components, dates,
base = os.path.join("STACKS", "%02i" % filterid,
"%03i_DAYS" % mov_stack, components,
"%s_%s" % (station1, station2), "%s") + extension
print("Reading files... in %s" % base)
logging.debug("Reading files... in %s" % base)
lastday = dates[0]
for j, date in enumerate(dates):
daystack = base % str(date)
Expand Down Expand Up @@ -2026,7 +2026,7 @@ def psd_read_results(net, sta, loc, chan, datelist, format='PPSD', use_cache=Tru
import tempfile
fn = os.path.join(tempfile.gettempdir(), "MSNOISE-PSD", fn)
if use_cache and os.path.isfile(fn):
print("I found this cool file: %s" % fn)
logging.debug("I found this cool file: %s" % fn)
ppsd = PPSD.load_npz(fn)
else:
first = True
Expand All @@ -2038,7 +2038,7 @@ def psd_read_results(net, sta, loc, chan, datelist, format='PPSD', use_cache=Tru
net, sta, loc, chan, day.year, jday))
files = glob.glob(toglob)
if not len(files):
print("No files found for %s.%s.%s.%s: %s" % (
logging.error("No files found for %s.%s.%s.%s: %s" % (
net, sta, loc, chan, day))
continue
file = files[0]
Expand Down Expand Up @@ -2132,15 +2132,13 @@ def xr_create_or_open(fn, taxis=[], name="CCF"):
dr = xr.DataArray(data, coords=[times, level0, level1],
dims=["times", "level0", "level1"])
else:
print("Not implemented, name=%s invalid." % name)
logging.error("Not implemented, name=%s invalid." % name)
sys.exit(1)
dr.name = name
return dr.to_dataset()


def xr_insert_or_update(dataset, new):
print("dataset", type(dataset))
print("new", type(new))
tt = new.merge(dataset, compat='override', combine_attrs="drop_conflicts")
return tt.combine_first(dataset)

Expand All @@ -2167,15 +2165,16 @@ def xr_save_ccf(station1, station2, components, filterid, mov_stack, taxis, new,
xr_save_and_close(dr, fullpath)
return dr


def xr_get_ccf(station1, station2, components, filterid, mov_stack, taxis):
path = os.path.join("STACKS2", "%02i" % filterid,
"%03i_DAYS" % mov_stack, "%s" % components)
fn = "%s_%s.nc" % (station1, station2)

fullpath = os.path.join(path, fn)
if not os.path.isfile(fullpath):
print("FILE DOES NOT EXIST: %s, skipping" % fullpath)
raise FileNotFoundError
# logging.error("FILE DOES NOT EXIST: %s, skipping" % fullpath)
raise FileNotFoundError(fullpath)
data = xr_create_or_open(fullpath, taxis, name="CCF")
return data.CCF.to_dataframe().unstack().droplevel(0, axis=1)

Expand All @@ -2201,8 +2200,8 @@ def xr_get_ref(station1, station2, components, filterid, taxis):

fullpath = os.path.join(path, fn)
if not os.path.isfile(fullpath):
print("FILE DOES NOT EXIST: %s, skipping" % fullpath)
raise FileNotFoundError
# logging.error("FILE DOES NOT EXIST: %s, skipping" % fullpath)
raise FileNotFoundError(fullpath)
data = xr_create_or_open(fullpath, taxis, name="REF")
return data.CCF.to_dataframe()

Expand Down Expand Up @@ -2232,8 +2231,8 @@ def xr_get_mwcs(station1, station2, components, filterid, mov_stack):
"%s" % components,
"%s_%s.nc" % (station1, station2))
if not os.path.isfile(fn):
print("FILE DOES NOT EXIST: %s, skipping" % fn)
raise FileNotFoundError
# logging.error("FILE DOES NOT EXIST: %s, skipping" % fn)
raise FileNotFoundError(fn)
data = xr_create_or_open(fn, name="MWCS")
data = data.MWCS.to_dataframe().reorder_levels(['times', 'taxis', 'keys']).unstack().droplevel(0, axis=1).unstack()
return data
Expand All @@ -2247,8 +2246,6 @@ def xr_save_dtt(station1, station2, components, filterid, mov_stack, dataframe):
if not os.path.isdir(os.path.split(fn)[0]):
os.makedirs(os.path.split(fn)[0])
d = dataframe.stack()
print("OUTPUT:")
print(d.head())
d.index = d.index.set_names(["times", "keys"])
d.columns = ["DTT"]
dr = xr_create_or_open(fn, taxis=[], name="DTT")
Expand All @@ -2263,8 +2260,8 @@ def xr_get_dtt(station1, station2, components, filterid, mov_stack):
"%s" % components,
"%s_%s.nc" % (station1, station2))
if not os.path.isfile(fn):
print("FILE DOES NOT EXIST: %s, skipping" % fn)
raise FileNotFoundError
# logging.error("FILE DOES NOT EXIST: %s, skipping" % fn)
raise FileNotFoundError(fn)
dr = xr_create_or_open(fn, taxis=[], name="DTT")
data = dr.DTT.to_dataframe().reorder_levels(['times', 'keys']).unstack().droplevel(0, axis=1)
return data
Expand Down Expand Up @@ -2293,8 +2290,8 @@ def xr_get_dvv(components, filterid, mov_stack):
"%03i_DAYS" % mov_stack,
"%s.nc" % components)
if not os.path.isfile(fn):
print("FILE DOES NOT EXIST: %s, skipping" % fn)
raise FileNotFoundError
# logging.error("FILE DOES NOT EXIST: %s, skipping" % fn)
raise FileNotFoundError(fn)
data = xr_create_or_open(fn, name="DVV")
data = data.DVV.to_dataframe().reorder_levels(['times', 'level1', 'level0']).unstack().droplevel(0, axis=1).unstack()
return data
Expand Down
4 changes: 0 additions & 4 deletions msnoise/msnoise_admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,6 @@ def index(self):
params = get_params(db)
station1, station2 = pairs[pair]['text'].replace('.', '_').split(' - ')
start, end, dates = build_movstack_datelist(db)
print(station1, station2, filter, component, dates, format)
i, result = get_results(db,station1, station2, filter, component, dates,
format=format, params=params)

Expand Down Expand Up @@ -649,7 +648,6 @@ def dataAvail():
db = connect()
data = get_data_availability(db, net=data['net'], sta=data['sta'],
loc=data['loc'], chan='HHZ')
print(data)
o = {'dates': [o.starttime.strftime('%Y-%m-%d') for o in data]}
db.close()
o['result'] = 'ok'
Expand Down Expand Up @@ -751,7 +749,6 @@ def PSDAvail():
data = flask.request.get_json()
if not data:
data = flask.request.args
print(data)
fn = os.path.join(os.getcwd(), "PSD", "PNG", "*", data["net"], data["sta"], "%s.D"%data["chan"], "*")
files = sorted(glob.glob(fn))
o = {}
Expand Down Expand Up @@ -783,7 +780,6 @@ def PSD_PNG():
year = "%04i"%d.year
fn = os.path.join(os.getcwd(), "PSD", "PNG", year, data["net"], data["sta"],
"%s.D" % data["chan"], file)
print(fn)
format = "png"
if 'format' in data:
format = data["format"]
Expand Down
12 changes: 7 additions & 5 deletions msnoise/plots/ccftime.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@

def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, seismic=False,
show=False, outfile=None, envelope=False, refilter=None,
normalize=None, **kwargs):
normalize=None, loglevel="INFO", **kwargs):
logger = get_logger('msnoise.cc_plot_ccftime', loglevel,
with_pid=True)
db = connect()
maxlag = float(get_config(db, 'maxlag'))
samples = get_maxlag_samples(db)
Expand All @@ -66,23 +68,23 @@ def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, seismic=False,
freqmax = float(freqmax)

if sta2 < sta1:
print("Stations STA1 STA2 should be sorted alphabetically")
logger.error("Stations STA1 STA2 should be sorted alphabetically")
return

sta1 = check_stations_uniqueness(db, sta1)
sta2 = check_stations_uniqueness(db, sta2)

pair = "%s:%s" % (sta1, sta2)

print("Fetching CCF data for %s-%s-%i-%i" % (pair, components, filterid,
logger.info("Fetching CCF data for %s-%s-%i-%i" % (pair, components, filterid,
mov_stack))
stack_total = xr_get_ccf(sta1, sta2, components, filterid, mov_stack, taxis)

# convert index to mdates
stack_total.index = mdates.date2num(stack_total.index.to_pydatetime())

if len(stack_total) == 0:
print("No CCF found for this request")
logger.error("No CCF found for this request")
return

if normalize == "common":
Expand Down Expand Up @@ -137,7 +139,7 @@ def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, seismic=False,
filterid,
mov_stack))
outfile = "ccftime " + outfile
print("output to:", outfile)
logger.info("output to:", outfile)
plt.savefig(outfile)
if show:
plt.show()
5 changes: 4 additions & 1 deletion msnoise/plots/distance.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@

def main(filterid, components, ampli=1, show=True, outfile=None,
refilter=None, virtual_source=None, **kwargs):
logger = get_logger('msnoise.plotdistance_child', "DEBUG",
with_pid=True)
db = connect()

pairs = get_station_pairs(db, used=1)
Expand Down Expand Up @@ -61,7 +63,8 @@ def main(filterid, components, ampli=1, show=True, outfile=None,
ref = xr_get_ref(sta1, sta2, components, filterid, taxis)
ref = Trace(data=ref.CCF.values)
ref.stats.sampling_rate = cc_sampling_rate
except FileNotFoundError:
except FileNotFoundError as fullpath:
logger.error("FILE DOES NOT EXIST: %s, skipping" % fullpath)
continue

if refilter:
Expand Down
17 changes: 9 additions & 8 deletions msnoise/plots/spectime.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,14 @@

from msnoise.api import build_movstack_datelist, connect, get_config, \
get_filters, get_results, check_stations_uniqueness, xr_get_ccf,\
get_t_axis
get_t_axis, get_logger


def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, show=False,
outfile=False, refilter=None, startdate=None, enddate=None, **kwargs):

outfile=False, refilter=None, startdate=None, enddate=None,
loglevel="INFO", **kwargs):
logger = get_logger('msnoise.cc_plot_spectime', loglevel,
with_pid=True)
db = connect()
cc_sampling_rate = float(get_config(db, 'cc_sampling_rate'))
start, end, datelist = build_movstack_datelist(db)
Expand All @@ -66,23 +68,23 @@ def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, show=False,
freqmax = float(freqmax)

if sta2 < sta1:
print("Stations STA1 STA2 should be sorted alphabetically")
logger.error("Stations STA1 STA2 should be sorted alphabetically")
return

sta1 = check_stations_uniqueness(db, sta1)
sta2 = check_stations_uniqueness(db, sta2)

pair = "%s:%s" % (sta1, sta2)

print("New Data for %s-%s-%i-%i" % (pair, components, filterid,
logger.info("Fetching CCF data for %s-%s-%i-%i" % (pair, components, filterid,
mov_stack))
stack_total = xr_get_ccf(sta1, sta2, components, filterid, mov_stack, taxis)

# convert index to mdates
stack_total.index = mdates.date2num(stack_total.index.to_pydatetime())

if len(stack_total) == 0:
print("No CCF found for this request")
logger.error("No CCF found for this request")
return
ax = plt.subplot(111)
for i, line in stack_total.iterrows():
Expand Down Expand Up @@ -122,7 +124,6 @@ def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, show=False,
ax.set_title(title)

cursor = Cursor(ax, useblit=True, color='red', linewidth=1.2)
print(outfile)
if outfile:
if outfile.startswith("?"):
pair = pair.replace(':', '-')
Expand All @@ -131,7 +132,7 @@ def main(sta1, sta2, filterid, components, mov_stack=1, ampli=5, show=False,
filterid,
mov_stack))
outfile = "spectime " + outfile
print("output to:", outfile)
logger.info("output to:", outfile)
plt.savefig(outfile)
if show:
plt.show()
Expand Down
4 changes: 2 additions & 2 deletions msnoise/ppsd_compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,14 +111,14 @@ def main(loglevel="INFO", njobs_per_worker=9999):
continue
for job in jobs:
net, sta, loc = job.pair.split('.')
print("Processing %s"% job.pair)
logger.debug("Processing %s"% job.pair)
gd = UTCDateTime(job.day).datetime
files = get_data_availability(
db, net=net, sta=sta, loc=loc,
starttime=(UTCDateTime(job.day) - 1.5 * ppsd_length).datetime,
endtime=gd)
if len(files) == 0:
print("No files found for %s" % job.day)
logger.error("No files found for %s" % job.day)
continue

for comp in ppsd_components:
Expand Down
2 changes: 1 addition & 1 deletion msnoise/preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def preprocess(db, stations, comps, goal_day, params, responses=None):
datafiles[station][file.chan[-1]].append(fullpath)
else:
MULTIPLEX = True
print("Mutliplex mode, reading the files")
logger.debug("Mutliplex mode, reading the files")
fullpath = os.path.join(file.path, file.file)
multiplexed = sorted(glob.glob(fullpath))
for comp in comps:
Expand Down
6 changes: 3 additions & 3 deletions msnoise/psd_compute_rms.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def main(loglevel="INFO", njobs_per_worker=9999):
for job in jobs:
net, sta, loc = job.pair.split('.')
if station is None:
print("Processing %s" % (job.pair))
logger.debug("Processing %s" % (job.pair))
station = get_station(db, net, sta)
for chan in station.chans():
if chan not in datelists:
Expand All @@ -96,13 +96,13 @@ def main(loglevel="INFO", njobs_per_worker=9999):
if not len(datelists[chan]):
continue
seed_id = "%s.%s.%s.%s" % (net, sta, loc, chan)
print("Will open HDFstore: %s" % seed_id)
logger.debug("Will open HDFstore: %s" % seed_id)
store = hdf_open_store(seed_id, mode="r")

s = datelists[chan][0].strftime("%Y-%m-%d %H:%M:%S")
e = (datelists[chan][-1] + datetime.timedelta(days=1)).strftime(
"%Y-%m-%d %H:%M:%S")
print("Selecting data between %s and %s" % (s, e))
logger.debug("Selecting data between %s and %s" % (s, e))
data = store.select("PSD", "(index >= '%s') & (index <= '%s')" % (s, e))
data = store.PSD
# only need to compute RMS for new/updated PSD data
Expand Down
2 changes: 1 addition & 1 deletion msnoise/psd_export_rms.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def main(loglevel="INFO", njobs_per_worker=9999):
params = get_params(db)
files = glob.glob(os.path.join("PSD", "RMS", params.qc_rms_type, '*.h5'))
for file in sorted(files):
print(file)
logger.debug(file)
store = hdf_open_store_from_fn(file, "r")\

store.RMS.to_csv(file.replace(".h5", ".csv"))
Expand Down
2 changes: 1 addition & 1 deletion msnoise/psd_to_hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def main(loglevel="INFO", njobs_per_worker=9999):
for job in jobs:
net, sta, loc = job.pair.split('.')
if station is None:
print("Processing %s" % (job.pair))
logger.debug("Processing %s" % (job.pair))
station = get_station(db, net, sta)
for chan in station.chans():
if chan not in datelists:
Expand Down

0 comments on commit b6234b8

Please sign in to comment.