Skip to content

Commit

Permalink
Merge pull request #8 from jzuhone/cxotime
Browse files Browse the repository at this point in the history
Replace Chandra.Time with CxoTime
  • Loading branch information
jzuhone committed Jul 8, 2021
2 parents 5163aad + a925347 commit 2cf00f8
Show file tree
Hide file tree
Showing 11 changed files with 115 additions and 161 deletions.
21 changes: 11 additions & 10 deletions acispy/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@
OutputFieldsNotFound, create_builtin_derived_states
from acispy.time_series import TimeSeriesData, EmptyTimeSeries
from acispy.utils import get_display_name, moving_average, \
ensure_list, get_time
ensure_list
from acispy.units import get_units
import numpy as np
import Ska.engarchive.fetch_sci as fetch
from cxotime import CxoTime


class Dataset(object):
Expand Down Expand Up @@ -476,8 +477,8 @@ class EngArchiveData(Dataset):
def __init__(self, tstart, tstop, msids, get_states=True,
filter_bad=False, stat='5min', state_keys=None,
interpolate=None, interpolate_times=None):
tstart = get_time(tstart)
tstop = get_time(tstop)
tstart = CxoTime(tstart).date
tstop = CxoTime(tstop).date
msids = MSIDs.from_database(msids, tstart, tstop=tstop,
filter_bad=filter_bad, stat=stat,
interpolate=interpolate,
Expand Down Expand Up @@ -520,8 +521,8 @@ class MaudeData(Dataset):
def __init__(self, tstart, tstop, msids, get_states=True,
user=None, password=None, other_msids=None,
state_keys=None):
tstart = get_time(tstart)
tstop = get_time(tstop)
tstart = CxoTime(tstart).date
tstop = CxoTime(tstop).date
msids = MSIDs.from_maude(msids, tstart, tstop=tstop, user=user,
password=password)
if other_msids is not None:
Expand All @@ -539,9 +540,9 @@ def __init__(self, tstart, tstop, msids, get_states=True,
def _parse_tracelogs(tbegin, tend, filenames, other_msids):
filenames = ensure_list(filenames)
if tbegin is not None:
tbegin = get_time(tbegin)
tbegin = CxoTime(tbegin).date
if tend is not None:
tend = get_time(tend)
tend = CxoTime(tend).date
msid_objs = []
for filename in filenames:
# Figure out what kind of file this is
Expand Down Expand Up @@ -730,13 +731,13 @@ def __init__(self, tstart, tstop, msids, recent_source="maude",
filter_bad=False, stat='5min', user=None, password=None,
get_states=True, state_keys=None):
msids = ensure_list(msids)
tstart = get_time(tstart, fmt='secs')
tstop = get_time(tstop, fmt='secs')
tstart = CxoTime(tstart).secs
tstop = CxoTime(tstop).secs
tmid = 1.0e99
for msid in msids:
tm = fetch.get_time_range(msid, format="secs")[-1]
tmid = min(tmid, tm)
tmid = get_time(tmid, fmt='secs')
tmid = CxoTime(tmid).secs
if tmid < tstop:
msids1 = MSIDs.from_database(msids, tstart, tstop=tmid,
filter_bad=filter_bad, stat=stat)
Expand Down
38 changes: 19 additions & 19 deletions acispy/load_review.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import os
from acispy.thermal_models import ThermalModelFromLoad
from acispy.plots import DatePlot
from acispy.utils import get_time, mylog, find_load, \
from acispy.utils import mylog, find_load, \
lr_root, cti_simodes
from collections import defaultdict
from Chandra.Time import date2secs, secs2date
from Ska.Matplotlib import cxctime2plotdate
import numpy as np
from datetime import datetime, timezone
import bisect
from cxotime import CxoTime

lr_file = "ACIS-LoadReview.txt"

Expand Down Expand Up @@ -190,7 +190,7 @@ def _populate_event_times(self):
if event not in self.events:
self.events[event] = {"times": []}
if event == "comm_ends":
time = secs2date(date2secs(words[0])-1800.0)
time = CxoTime(CxoTime(words[0]).secs-1800.0).date
self.events[event]["times"].append(time)
if state is not None:
if "state" not in self.events[event]:
Expand All @@ -212,7 +212,7 @@ def _populate_event_times(self):
"==> DITHER" in line:
lines.append(line)
line_times.append(time)
line_times = date2secs(line_times)
line_times = CxoTime(line_times).secs
if len(self.events["comm_begins"]) > 0:
lines, line_times = self._fix_comm_times(lines, line_times, comm_durations)
return lines, line_times
Expand Down Expand Up @@ -242,8 +242,8 @@ def get_updated_dsn_comms(self):
if os.path.getsize(dsnfile) == 0:
mylog.warning("DSN summary file is empty. Ignoring.")
return
tstart = date2secs(self.first_time)
tstop = date2secs(self.last_time)
tstart = CxoTime(self.first_time).secs
tstop = CxoTime(self.last_time).secs
bots = []
eots = []
new_durations = []
Expand All @@ -252,15 +252,15 @@ def get_updated_dsn_comms(self):
words = line.strip().split()
bot = datetime.strptime("%s:%s:00:00:00" % (words[-4], words[-3].split(".")[0]), "%Y:%j:%H:%M:%S")
eot = datetime.strptime("%s:%s:00:00:00" % (words[-2], words[-1].split(".")[0]), "%Y:%j:%H:%M:%S")
time_bot = date2secs(bot.strftime("%Y:%j:%H:%M:%S"))+86400.0*(float(words[-3]) % 1)
time_eot = date2secs(eot.strftime("%Y:%j:%H:%M:%S"))+86400.0*(float(words[-1]) % 1)
time_bot = CxoTime(bot.strftime("%Y:%j:%H:%M:%S")).secs+86400.0*(float(words[-3]) % 1)
time_eot = CxoTime(eot.strftime("%Y:%j:%H:%M:%S")).secs+86400.0*(float(words[-1]) % 1)
new_durations.append((time_eot-time_bot)/60.0)
if tstart <= time_bot <= tstop:
bots.append(time_bot)
if tstart <= time_eot <= tstop:
eots.append(time_eot)
self.events["comm_begins"]["times"] = secs2date(bots)
self.events["comm_ends"]["times"] = secs2date(eots)
self.events["comm_begins"]["times"] = CxoTime(bots).date
self.events["comm_ends"]["times"] = CxoTime(eots).date
self.lines, self.line_times = self._fix_comm_times(self.lines, self.line_times, new_durations)

def _fix_comm_times(self, lines, line_times, comm_durations):
Expand All @@ -272,13 +272,13 @@ def _fix_comm_times(self, lines, line_times, comm_durations):
new_times.append(line_times[i])
for time in self.events["comm_begins"]["times"]:
local_time = datetime.strptime(time, "%Y:%j:%H:%M:%S.%f").replace(tzinfo=timezone.utc).astimezone(tz=None)
t = date2secs(time)
t = CxoTime(time).secs
idx = bisect.bisect_right(new_times, t)
new_times.insert(idx, t)
new_lines.insert(idx, "%s REAL-TIME COMM BEGINS %s EDT" % (time, local_time.strftime("%Y:%j:%H:%M:%S")))
for i, time in enumerate(self.events["comm_ends"]["times"]):
local_time = datetime.strptime(time, "%Y:%j:%H:%M:%S.%f").replace(tzinfo=timezone.utc).astimezone(tz=None)
t = date2secs(time)
t = CxoTime(time).secs
idx = bisect.bisect_right(new_times, t)
new_times.insert(idx, t)
new_lines.insert(idx, "%s REAL-TIME COMM ENDS %s EDT" % (time, local_time.strftime("%Y:%j:%H:%M:%S")))
Expand Down Expand Up @@ -311,15 +311,15 @@ def _add_annotations(self, plot, annotations, tbegin, tend):
color = colors[key]
ls = styles[key]
for i, t in enumerate(self.events[key]["times"]):
tt = date2secs(t)
tt = CxoTime(t).secs
if tt < tbegin or tt > tend:
continue
plot.add_vline(t, color=color, ls=ls)
if "state" in self.events[key] and key in offsets:
text = self.events[key]["state"][i]
if isinstance(text, tuple):
text = text[-1]
tdt = secs2date(tt + 1800.0)
tdt = CxoTime(tt + 1800.0).date
ymin, ymax = plot.ax.get_ylim()
y = (1.0-offsets[key])*ymin+offsets[key]*ymax
plot.add_text(tdt, y, text, fontsize=15,
Expand All @@ -343,8 +343,8 @@ def _plot_bands(self, tbegin, tend, plot, events, color, alpha=1.0):
if tc_start[-1] > tc_end[-1]:
tc_end.append(self.last_time)
assert len(tc_start) == len(tc_end)
tc_start = date2secs(tc_start)
tc_end = date2secs(tc_end)
tc_start = CxoTime(tc_start).secs
tc_end = CxoTime(tc_end).secs
ybot, ytop = plot.ax.get_ylim()
t = np.linspace(tbegin, tend, 500)
tplot = cxctime2plotdate(t)
Expand Down Expand Up @@ -425,9 +425,9 @@ def plot(self, fields, field2=None, lw=1.5, fontsize=18,
tbegin = self.first_time
if tend is None:
tend = self.last_time
tbegin = get_time(tbegin, 'secs')
tend = get_time(tend, 'secs')
tbegin = CxoTime(tbegin).secs
tend = CxoTime(tend).secs
if annotations is not None:
self._add_annotations(dp, annotations.copy(), tbegin, tend)
dp.set_xlim(secs2date(tbegin), secs2date(tend))
dp.set_xlim(CxoTime(tbegin).date, CxoTime(tend).date)
return dp
6 changes: 4 additions & 2 deletions acispy/model.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import requests
from astropy.io import ascii
import Ska.Numpy
from acispy.utils import get_time, mylog, find_load
from acispy.utils import mylog, find_load
from acispy.units import APQuantity, Quantity, get_units
from acispy.utils import ensure_list
from acispy.time_series import TimeSeriesData
import numpy as np
from cxotime import CxoTime


comp_map = {"1deamzt": "dea",
"1dpamzt": "dpa",
Expand Down Expand Up @@ -121,7 +123,7 @@ def from_load_file(cls, temps_file, esa_file=None):
return cls(table=data)

def get_values(self, time):
time = get_time(time, fmt='secs')
time = CxoTime(time).secs
t = Quantity(time, "s")
values = {}
for key in self.keys():
Expand Down
29 changes: 14 additions & 15 deletions acispy/msids.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
from acispy.utils import get_time, mit_trans_table, ensure_list, \
from acispy.utils import mit_trans_table, ensure_list, \
get_state_codes
from acispy.units import get_units, APQuantity, APStringArray, \
Quantity
import Ska.engarchive.fetch_sci as fetch
from astropy.io import ascii
import numpy as np
from acispy.time_series import TimeSeriesData
from Chandra.Time import date2secs, DateTime
import Ska.Numpy
from acispy.fields import builtin_deps
from astropy.table import Table

from cxotime import CxoTime

def check_depends(msids):
output_msids = []
Expand Down Expand Up @@ -67,12 +66,12 @@ def from_mit_file(cls, filename, tbegin=None, tend=None):
tbegin = -1.0e22
else:
if isinstance(tbegin, str):
tbegin = date2secs(tbegin)
tbegin = CxoTime(tbegin).secs
if tend is None:
tend = 1.0e22
else:
if isinstance(tend, str):
tend = date2secs(tend)
tend = CxoTime(tend).secs
f = open(filename, 'r')
line = f.readline()
f.close()
Expand All @@ -95,7 +94,7 @@ def from_mit_file(cls, filename, tbegin=None, tend=None):
for y, d, h, m, s in zip(data[year].data,
data["DOY"].data,
hours, mins, secs)]
tsecs = date2secs(time_arr)
tsecs = CxoTime(time_arr).secs
idxs = np.logical_and(tsecs >= tbegin, tsecs <= tend)
table = {}
times = {}
Expand Down Expand Up @@ -132,12 +131,12 @@ def from_tracelog(cls, filename, tbegin=None, tend=None):
tbegin = -1.0e22
else:
if isinstance(tbegin, str):
tbegin = date2secs(tbegin)
tbegin = CxoTime(tbegin).secs
if tend is None:
tend = 1.0e22
else:
if isinstance(tend, str):
tend = date2secs(tend)
tend = CxoTime(tend).secs
f = open(filename, "r")
header = f.readline().split()
dtype = []
Expand Down Expand Up @@ -168,8 +167,8 @@ def from_tracelog(cls, filename, tbegin=None, tend=None):
@classmethod
def from_database(cls, msids, tstart, tstop=None, filter_bad=False,
stat='5min', interpolate=None, interpolate_times=None):
tstart = get_time(tstart)
tstop = get_time(tstop)
tstart = CxoTime(tstart).date
tstop = CxoTime(tstop).date
msids = ensure_list(msids)
msids, derived_msids = check_depends(msids)
msids = [msid.lower() for msid in msids]
Expand All @@ -185,13 +184,13 @@ def from_database(cls, msids, tstart, tstop=None, filter_bad=False,
max_fetch_tstart = max(msid.times[0] for msid in data.values())
min_fetch_tstop = min(msid.times[-1] for msid in data.values())
dt = 328.0
start = DateTime(tstart).secs if tstart else data.tstart
stop = DateTime(tstop).secs if tstop else data.tstop
start = CxoTime(tstart).secs if tstart else data.tstart
stop = CxoTime(tstop).secs if tstop else data.tstop
start = max(start, max_fetch_tstart)
stop = min(stop, min_fetch_tstop)
interpolate_times = np.arange((stop - start) // dt + 1) * dt + start
else:
interpolate_times = DateTime(interpolate_times).secs
interpolate_times = CxoTime(interpolate_times).secs
for k, msid in data.items():
if interpolate is not None:
indexes = Ska.Numpy.interpolate(np.arange(len(msid.times)),
Expand All @@ -212,8 +211,8 @@ def from_database(cls, msids, tstart, tstop=None, filter_bad=False,
@classmethod
def from_maude(cls, msids, tstart, tstop=None, user=None, password=None):
import maude
tstart = get_time(tstart)
tstop = get_time(tstop)
tstart = CxoTime(tstart).date
tstop = CxoTime(tstop).date
msids = ensure_list(msids)
msids, derived_msids = check_depends(msids)
table = {}
Expand Down

0 comments on commit 2cf00f8

Please sign in to comment.