sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
---|---|---|
def checkUpdate(self, *args):
"""
Updates values after first checking instrument parameters are OK.
This is not integrated within update to prevent ifinite recursion
since update gets called from ipars.
"""
g = get_root(self).globals
if not self.check():
g.clog.warn('Current observing parameters are not valid.')
return False
if not g.ipars.check():
g.clog.warn('Current instrument parameters are not valid.')
return False | Updates values after first checking instrument parameters are OK.
This is not integrated within update to prevent ifinite recursion
since update gets called from ipars. | entailment |
def check(self):
"""
Checks values
"""
status = True
g = get_root(self).globals
if self.mag.ok():
self.mag.config(bg=g.COL['main'])
else:
self.mag.config(bg=g.COL['warn'])
status = False
if self.airmass.ok():
self.airmass.config(bg=g.COL['main'])
else:
self.airmass.config(bg=g.COL['warn'])
status = False
if self.seeing.ok():
self.seeing.config(bg=g.COL['main'])
else:
self.seeing.config(bg=g.COL['warn'])
status = False
return status | Checks values | entailment |
def update(self, *args):
"""
Updates values. You should run a check on the instrument and
target parameters before calling this.
"""
g = get_root(self).globals
expTime, deadTime, cycleTime, dutyCycle, frameRate = g.ipars.timing()
total, peak, peakSat, peakWarn, ston, ston3 = \
self.counts(expTime, cycleTime)
if cycleTime < 0.01:
self.cadence.config(text='{0:7.5f} s'.format(cycleTime))
elif cycleTime < 0.1:
self.cadence.config(text='{0:6.4f} s'.format(cycleTime))
elif cycleTime < 1.:
self.cadence.config(text='{0:5.3f} s'.format(cycleTime))
elif cycleTime < 10.:
self.cadence.config(text='{0:4.2f} s'.format(cycleTime))
elif cycleTime < 100.:
self.cadence.config(text='{0:4.1f} s'.format(cycleTime))
elif cycleTime < 1000.:
self.cadence.config(text='{0:4.0f} s'.format(cycleTime))
else:
self.cadence.config(text='{0:5.0f} s'.format(cycleTime))
if expTime < 0.01:
self.exposure.config(text='{0:7.5f} s'.format(expTime))
elif expTime < 0.1:
self.exposure.config(text='{0:6.4f} s'.format(expTime))
elif expTime < 1.:
self.exposure.config(text='{0:5.3f} s'.format(expTime))
elif expTime < 10.:
self.exposure.config(text='{0:4.2f} s'.format(expTime))
elif expTime < 100.:
self.exposure.config(text='{0:4.1f} s'.format(expTime))
elif expTime < 1000.:
self.exposure.config(text='{0:4.0f} s'.format(expTime))
else:
self.exposure.config(text='{0:5.0f} s'.format(expTime))
self.duty.config(text='{0:4.1f} %'.format(dutyCycle))
self.peak.config(text='{0:d} cts'.format(int(round(peak))))
if peakSat:
self.peak.config(bg=g.COL['error'])
elif peakWarn:
self.peak.config(bg=g.COL['warn'])
else:
self.peak.config(bg=g.COL['main'])
self.total.config(text='{0:d} cts'.format(int(round(total))))
self.ston.config(text='{0:.1f}'.format(ston))
self.ston3.config(text='{0:.1f}'.format(ston3)) | Updates values. You should run a check on the instrument and
target parameters before calling this. | entailment |
def counts(self, expTime, cycleTime, ap_scale=1.6, ndiv=5):
"""
Computes counts per pixel, total counts, sky counts
etc given current magnitude, seeing etc. You should
run a check on the instrument parameters before calling
this.
expTime : exposure time per frame (seconds)
cycleTime : sampling, cadence (seconds)
ap_scale : aperture radius as multiple of seeing
Returns: (total, peak, peakSat, peakWarn, ston, ston3)
total -- total number of object counts in aperture
peak -- peak counts in a pixel
peakSat -- flag to indicate saturation
peakWarn -- flag to indication level approaching saturation
ston -- signal-to-noise per exposure
ston3 -- signal-to-noise after 3 hours on target
"""
# code directly translated from Java equivalent.
g = get_root(self).globals
# Set the readout speed
readSpeed = g.ipars.readSpeed()
if readSpeed == 'Fast':
gain = GAIN_FAST
read = RNO_FAST
elif readSpeed == 'Slow':
gain = GAIN_SLOW
read = RNO_SLOW
else:
raise DriverError('CountsFrame.counts: readout speed = ' +
readSpeed + ' not recognised.')
xbin, ybin = g.ipars.wframe.xbin.value(), g.ipars.wframe.ybin.value()
# calculate SN info.
zero, sky, skyTot, darkTot = 0., 0., 0., 0.
total, peak, correct, signal, readTot, seeing = 0., 0., 0., 0., 0., 0.
noise, narcsec, npix, signalToNoise3 = 1., 0., 0., 0.
tinfo = g.TINS[g.cpars['telins_name']]
filtnam = self.filter.value()
zero = tinfo['zerop'][filtnam]
mag = self.mag.value()
seeing = self.seeing.value()
sky = g.SKY[self.moon.value()][filtnam]
airmass = self.airmass.value()
plateScale = tinfo['plateScale']
# calculate expected electrons
total = 10.**((zero-mag-airmass*g.EXTINCTION[filtnam])/2.5)*expTime
# compute fraction that fall in central pixel
# assuming target exactly at its centre. Do this
# by splitting each pixel of the central (potentially
# binned) pixel into ndiv * ndiv points at
# which the seeing profile is added. sigma is the
# RMS seeing in terms of pixels.
sigma = seeing/g.EFAC/plateScale
sum = 0.
for iyp in range(ybin):
yoff = -ybin/2.+iyp
for ixp in range(xbin):
xoff = -xbin/2.+ixp
for iys in range(ndiv):
y = (yoff + (iys+0.5)/ndiv)/sigma
for ixs in range(ndiv):
x = (xoff + (ixs+0.5)/ndiv)/sigma
sum += math.exp(-(x*x+y*y)/2.)
peak = total*sum/(2.*math.pi*sigma**2*ndiv**2)
# Work out fraction of flux in aperture with radius AP_SCALE*seeing
correct = 1. - math.exp(-(g.EFAC*ap_scale)**2/2.)
# expected sky e- per arcsec
skyPerArcsec = 10.**((zero-sky)/2.5)*expTime
# skyPerPixel = skyPerArcsec*plateScale**2*xbin*ybin
narcsec = math.pi*(ap_scale*seeing)**2
skyTot = skyPerArcsec*narcsec
npix = math.pi*(ap_scale*seeing/plateScale)**2/xbin/ybin
signal = correct*total # in electrons
darkTot = npix*DARK_E*expTime # in electrons
readTot = npix*read**2 # in electrons
# noise, in electrons
noise = math.sqrt(readTot + darkTot + skyTot + signal)
# Now compute signal-to-noise in 3 hour seconds run
signalToNoise3 = signal/noise*math.sqrt(3*3600./cycleTime)
# convert from electrons to counts
total /= gain
peak /= gain
warn = 25000
sat = 60000
peakSat = peak > sat
peakWarn = peak > warn
return (total, peak, peakSat, peakWarn, signal/noise, signalToNoise3) | Computes counts per pixel, total counts, sky counts
etc given current magnitude, seeing etc. You should
run a check on the instrument parameters before calling
this.
expTime : exposure time per frame (seconds)
cycleTime : sampling, cadence (seconds)
ap_scale : aperture radius as multiple of seeing
Returns: (total, peak, peakSat, peakWarn, ston, ston3)
total -- total number of object counts in aperture
peak -- peak counts in a pixel
peakSat -- flag to indicate saturation
peakWarn -- flag to indication level approaching saturation
ston -- signal-to-noise per exposure
ston3 -- signal-to-noise after 3 hours on target | entailment |
def disable(self):
"""
Disable the button, if in non-expert mode.
"""
w.ActButton.disable(self)
g = get_root(self).globals
if self._expert:
self.config(bg=g.COL['start'])
else:
self.config(bg=g.COL['startD']) | Disable the button, if in non-expert mode. | entailment |
def setExpert(self):
"""
Turns on 'expert' status whereby the button is always enabled,
regardless of its activity status.
"""
w.ActButton.setExpert(self)
g = get_root(self).globals
self.config(bg=g.COL['start']) | Turns on 'expert' status whereby the button is always enabled,
regardless of its activity status. | entailment |
def act(self):
"""
Carries out action associated with start button
"""
g = get_root(self).globals
# check binning against overscan
msg = """
HiperCAM has an o/scan of 50 pixels.
Your binning does not fit into this
region. Some columns will contain a
mix of o/scan and data.
Click OK if you wish to continue."""
if g.ipars.oscan():
xbin, ybin = g.ipars.wframe.xbin.value(), g.ipars.wframe.ybin.value()
if xbin not in (1, 2, 5, 10) or ybin not in (1, 2, 5, 10):
if not messagebox.askokcancel('Binning alert', msg):
return False
# Check instrument pars are OK
if not g.ipars.check():
g.clog.warn('Invalid instrument parameters; save failed.')
return False
# create JSON to post
data = createJSON(g)
# POST
try:
success = postJSON(g, data)
if not success:
raise Exception('postJSON returned False')
except Exception as err:
g.clog.warn("Failed to post data to servers")
g.clog.warn(str(err))
return False
# Is nod enabled? Should we start GTC offsetter?
try:
success = startNodding(g, data)
if not success:
raise Exception('Failed to start dither: response was false')
except Exception as err:
g.clog.warn("Failed to start GTC offsetter")
g.clog.warn(str(err))
return False
# START
try:
success = execCommand(g, 'start')
if not success:
raise Exception("Start command failed: check server response")
except Exception as err:
g.clog.warn('Failed to start run')
g.clog.warn(str(err))
return False
# Send first offset if nodding enabled.
# Initial trigger is sent after first offset, otherwise we'll hang indefinitely
try:
success = forceNod(g, data)
if not success:
raise Exception('Failed to send intitial offset and trigger - exposure will be paused indefinitely')
except Exception as err:
g.clog.warn('Run is paused indefinitely')
g.clog.warn('use "ngcbCmd seq start" to fix')
g.clog.warn(str(err))
# Run successfully started.
# enable stop button, disable Start
# also make inactive until RunType select box makes active again
# start run timer
# finally, clear table which stores TCS info during this run
self.disable()
self.run_type_set = False
g.observe.stop.enable()
g.info.timer.start()
g.info.clear_tcs_table()
return True | Carries out action associated with start button | entailment |
def act(self):
"""
Carries out the action associated with the Load button
"""
g = get_root(self).globals
fname = filedialog.askopenfilename(
defaultextension='.json',
filetypes=[('json files', '.json'), ('fits files', '.fits')],
initialdir=g.cpars['app_directory'])
if not fname:
g.clog.warn('Aborted load from disk')
return False
# load json
if fname.endswith('.json'):
with open(fname) as ifname:
json_string = ifname.read()
else:
json_string = jsonFromFits(fname)
# load up the instrument settings
g.ipars.loadJSON(json_string)
# load up the run parameters
g.rpars.loadJSON(json_string)
return True | Carries out the action associated with the Load button | entailment |
def act(self):
"""
Carries out the action associated with the Save button
"""
g = get_root(self).globals
g.clog.info('\nSaving current application to disk')
# check instrument parameters
if not g.ipars.check():
g.clog.warn('Invalid instrument parameters; save failed.')
return False
# check run parameters
rok, msg = g.rpars.check()
if not rok:
g.clog.warn('Invalid run parameters; save failed.')
g.clog.warn(msg)
return False
# Get data to save
data = createJSON(g, full=False)
# Save to disk
if saveJSON(g, data):
# modify buttons
g.observe.load.enable()
g.observe.unfreeze.disable()
# unfreeze the instrument and run params
g.ipars.unfreeze()
g.rpars.unfreeze()
return True
else:
return False | Carries out the action associated with the Save button | entailment |
def act(self):
"""
Carries out the action associated with the Unfreeze button
"""
g = get_root(self).globals
g.ipars.unfreeze()
g.rpars.unfreeze()
g.observe.load.enable()
self.disable() | Carries out the action associated with the Unfreeze button | entailment |
def setExpertLevel(self):
"""
Set expert level
"""
g = get_root(self).globals
level = g.cpars['expert_level']
# now set whether buttons are permanently enabled or not
if level == 0 or level == 1:
self.load.setNonExpert()
self.save.setNonExpert()
self.unfreeze.setNonExpert()
self.start.setNonExpert()
self.stop.setNonExpert()
elif level == 2:
self.load.setExpert()
self.save.setExpert()
self.unfreeze.setExpert()
self.start.setExpert()
self.stop.setExpert() | Set expert level | entailment |
def process_factory_meta_options(
mcs_args: McsArgs,
default_factory_class: Type[MetaOptionsFactory] = MetaOptionsFactory,
factory_attr_name: str = META_OPTIONS_FACTORY_CLASS_ATTR_NAME) \
-> MetaOptionsFactory:
"""
Main entry point for consumer metaclasses. Usage::
from py_meta_utils import (AbstractMetaOption, McsArgs, MetaOptionsFactory,
process_factory_meta_options)
class YourMetaOptionsFactory(MetaOptionsFactory):
_options = [AbstractMetaOption]
class YourMetaclass(type):
def __new__(mcs, name, bases, clsdict):
mcs_args = McsArgs(mcs, name, bases, clsdict)
# process_factory_meta_options must come *before* super().__new__()
process_factory_meta_options(mcs_args, YourMetaOptionsFactory)
return super().__new__(*mcs_args)
class YourClass(metaclass=YourMetaclass):
pass
Subclasses of ``YourClass`` may set their ``_meta_options_factory_class``
attribute to a subclass of ``YourMetaOptionsFactory`` to customize
their own supported meta options::
from py_meta_utils import MetaOption
class FooMetaOption(MetaOption):
def __init__(self):
super().__init__(name='foo', default=None, inherit=True)
class FooMetaOptionsFactory(YourMetaOptionsFactory):
_options = YourMetaOptionsFactory._options + [
FooMetaOption,
]
class FooClass(YourClass):
_meta_options_factory_class = FooMetaOptionsFactory
class Meta:
foo = 'bar'
:param mcs_args: The :class:`McsArgs` for the class-under-construction
:param default_factory_class: The default MetaOptionsFactory class to use, if
the ``factory_attr_name`` attribute is not set on
the class-under-construction
:param factory_attr_name: The attribute name to look for an overridden factory
meta options class on the class-under-construction
:return: The populated instance of the factory class
"""
factory_cls = mcs_args.getattr(
factory_attr_name or META_OPTIONS_FACTORY_CLASS_ATTR_NAME,
default_factory_class)
options_factory = factory_cls()
options_factory._contribute_to_class(mcs_args)
return options_factory | Main entry point for consumer metaclasses. Usage::
from py_meta_utils import (AbstractMetaOption, McsArgs, MetaOptionsFactory,
process_factory_meta_options)
class YourMetaOptionsFactory(MetaOptionsFactory):
_options = [AbstractMetaOption]
class YourMetaclass(type):
def __new__(mcs, name, bases, clsdict):
mcs_args = McsArgs(mcs, name, bases, clsdict)
# process_factory_meta_options must come *before* super().__new__()
process_factory_meta_options(mcs_args, YourMetaOptionsFactory)
return super().__new__(*mcs_args)
class YourClass(metaclass=YourMetaclass):
pass
Subclasses of ``YourClass`` may set their ``_meta_options_factory_class``
attribute to a subclass of ``YourMetaOptionsFactory`` to customize
their own supported meta options::
from py_meta_utils import MetaOption
class FooMetaOption(MetaOption):
def __init__(self):
super().__init__(name='foo', default=None, inherit=True)
class FooMetaOptionsFactory(YourMetaOptionsFactory):
_options = YourMetaOptionsFactory._options + [
FooMetaOption,
]
class FooClass(YourClass):
_meta_options_factory_class = FooMetaOptionsFactory
class Meta:
foo = 'bar'
:param mcs_args: The :class:`McsArgs` for the class-under-construction
:param default_factory_class: The default MetaOptionsFactory class to use, if
the ``factory_attr_name`` attribute is not set on
the class-under-construction
:param factory_attr_name: The attribute name to look for an overridden factory
meta options class on the class-under-construction
:return: The populated instance of the factory class | entailment |
def deep_getattr(clsdict: Dict[str, Any],
bases: Tuple[Type[object], ...],
name: str,
default: Any = _missing) -> Any:
"""
Acts just like ``getattr`` would on a constructed class object, except this operates
on the pre-construction class dictionary and base classes. In other words, first we
look for the attribute in the class dictionary, and then we search all the base
classes (in method resolution order), finally returning the default value if the
attribute was not found in any of the class dictionary or base classes (or it raises
``AttributeError`` if no default was given).
"""
value = clsdict.get(name, _missing)
if value != _missing:
return value
for base in bases:
value = getattr(base, name, _missing)
if value != _missing:
return value
if default != _missing:
return default
raise AttributeError(name) | Acts just like ``getattr`` would on a constructed class object, except this operates
on the pre-construction class dictionary and base classes. In other words, first we
look for the attribute in the class dictionary, and then we search all the base
classes (in method resolution order), finally returning the default value if the
attribute was not found in any of the class dictionary or base classes (or it raises
``AttributeError`` if no default was given). | entailment |
def getattr(self, name, default: Any = _missing):
"""
Convenience method equivalent to
``deep_getattr(mcs_args.clsdict, mcs_args.bases, 'attr_name'[, default])``
"""
return deep_getattr(self.clsdict, self.bases, name, default) | Convenience method equivalent to
``deep_getattr(mcs_args.clsdict, mcs_args.bases, 'attr_name'[, default])`` | entailment |
def qualname(self) -> str:
"""
Returns the fully qualified name of the class-under-construction, if possible,
otherwise just the class name.
"""
if self.module:
return self.module + '.' + self.name
return self.name | Returns the fully qualified name of the class-under-construction, if possible,
otherwise just the class name. | entailment |
def is_abstract(self) -> bool:
"""
Whether or not the class-under-construction was declared as abstract (**NOTE:**
this property is usable even *before* the :class:`MetaOptionsFactory` has run)
"""
meta_value = getattr(self.clsdict.get('Meta'), 'abstract', False)
return self.clsdict.get(ABSTRACT_ATTR, meta_value) is True | Whether or not the class-under-construction was declared as abstract (**NOTE:**
this property is usable even *before* the :class:`MetaOptionsFactory` has run) | entailment |
def get_value(self, Meta: Type[object], base_classes_meta, mcs_args: McsArgs) -> Any:
"""
Returns the value for ``self.name`` given the class-under-construction's class
``Meta``. If it's not found there, and ``self.inherit == True`` and there is a
base class that has a class ``Meta``, use that value, otherwise ``self.default``.
:param Meta: the class ``Meta`` (if any) from the class-under-construction
(**NOTE:** this will be an ``object`` or ``None``, NOT an instance
of :class:`MetaOptionsFactory`)
:param base_classes_meta: the :class:`MetaOptionsFactory` instance (if any) from
the base class of the class-under-construction
:param mcs_args: the :class:`McsArgs` for the class-under-construction
"""
value = self.default
if self.inherit and base_classes_meta is not None:
value = getattr(base_classes_meta, self.name, value)
if Meta is not None:
value = getattr(Meta, self.name, value)
return value | Returns the value for ``self.name`` given the class-under-construction's class
``Meta``. If it's not found there, and ``self.inherit == True`` and there is a
base class that has a class ``Meta``, use that value, otherwise ``self.default``.
:param Meta: the class ``Meta`` (if any) from the class-under-construction
(**NOTE:** this will be an ``object`` or ``None``, NOT an instance
of :class:`MetaOptionsFactory`)
:param base_classes_meta: the :class:`MetaOptionsFactory` instance (if any) from
the base class of the class-under-construction
:param mcs_args: the :class:`McsArgs` for the class-under-construction | entailment |
def _get_meta_options(self) -> List[MetaOption]:
"""
Returns a list of :class:`MetaOption` instances that this factory supports.
"""
return [option if isinstance(option, MetaOption) else option()
for option in self._options] | Returns a list of :class:`MetaOption` instances that this factory supports. | entailment |
def _contribute_to_class(self, mcs_args: McsArgs):
"""
Where the magic happens. Takes one parameter, the :class:`McsArgs` of the
class-under-construction, and processes the declared ``class Meta`` from
it (if any). We fill ourself with the declared meta options' name/value pairs,
give the declared meta options a chance to also contribute to the class-under-
construction, and finally replace the class-under-construction's ``class Meta``
with this populated factory instance (aka ``self``).
"""
self._mcs_args = mcs_args
Meta = mcs_args.clsdict.pop('Meta', None) # type: Type[object]
base_classes_meta = mcs_args.getattr('Meta', None) # type: MetaOptionsFactory
mcs_args.clsdict['Meta'] = self # must come before _fill_from_meta, because
# some meta options may depend upon having
# access to the values of earlier meta options
self._fill_from_meta(Meta, base_classes_meta, mcs_args)
for option in self._get_meta_options():
option_value = getattr(self, option.name, None)
option.contribute_to_class(mcs_args, option_value) | Where the magic happens. Takes one parameter, the :class:`McsArgs` of the
class-under-construction, and processes the declared ``class Meta`` from
it (if any). We fill ourself with the declared meta options' name/value pairs,
give the declared meta options a chance to also contribute to the class-under-
construction, and finally replace the class-under-construction's ``class Meta``
with this populated factory instance (aka ``self``). | entailment |
def _fill_from_meta(self, Meta: Type[object], base_classes_meta, mcs_args: McsArgs):
"""
Iterate over our supported meta options, and set attributes on the factory
instance (self) for each meta option's name/value. Raises ``TypeError`` if
we discover any unsupported meta options on the class-under-construction's
``class Meta``.
"""
# Exclude private/protected fields from the Meta
meta_attrs = {} if not Meta else {k: v for k, v in vars(Meta).items()
if not k.startswith('_')}
for option in self._get_meta_options():
existing = getattr(self, option.name, None)
if existing and not (existing in self._allowed_properties
and not isinstance(existing, property)):
raise RuntimeError("Can't override field {name}."
"".format(name=option.name))
value = option.get_value(Meta, base_classes_meta, mcs_args)
option.check_value(value, mcs_args)
meta_attrs.pop(option.name, None)
if option.name != '_':
setattr(self, option.name, value)
if meta_attrs:
# Only allow attributes on the Meta that have a respective MetaOption
raise TypeError(
'`class Meta` for {cls} got unknown attribute(s) {attrs}'.format(
cls=mcs_args.name,
attrs=', '.join(sorted(meta_attrs.keys())))) | Iterate over our supported meta options, and set attributes on the factory
instance (self) for each meta option's name/value. Raises ``TypeError`` if
we discover any unsupported meta options on the class-under-construction's
``class Meta``. | entailment |
def get_object(self, binding_name, cls):
"""
Get a reference to a remote object using CORBA
"""
return self._state.get_object(self, binding_name, cls) | Get a reference to a remote object using CORBA | entailment |
def get_object(conn, binding_name, object_cls):
"""
Get a reference to a remote object using CORBA
"""
try:
obj = conn.rootContext.resolve(binding_name)
narrowed = obj._narrow(object_cls)
except CORBA.TRANSIENT:
raise IOError('Attempt to retrieve object failed')
if narrowed is None:
raise IOError('Attempt to retrieve object got a different class of object')
return narrowed | Get a reference to a remote object using CORBA | entailment |
def set(self, num):
"""
Sets the current value equal to num
"""
self._value = str(int(num))
self._variable.set(self._value) | Sets the current value equal to num | entailment |
def on_key_release_repeat(self, *dummy):
"""
Avoid repeated trigger of callback.
When holding a key down, multiple key press and release events
are fired in succession. Debouncing is implemented to squash these.
"""
self.has_prev_key_release = self.after_idle(self.on_key_release, dummy) | Avoid repeated trigger of callback.
When holding a key down, multiple key press and release events
are fired in succession. Debouncing is implemented to squash these. | entailment |
def set_bind(self):
"""
Sets key bindings.
"""
# Arrow keys and enter
self.bind('<Up>', lambda e: self.on_key_press_repeat('Up'))
self.bind('<Down>', lambda e: self.on_key_press_repeat('Down'))
self.bind('<Shift-Up>', lambda e: self.on_key_press_repeat('Shift-Up'))
self.bind('<Shift-Down>', lambda e: self.on_key_press_repeat('Shift-Down'))
self.bind('<Control-Up>', lambda e: self.on_key_press_repeat('Control-Up'))
self.bind('<Control-Down>', lambda e: self.on_key_press_repeat('Control-Down'))
self.bind('<KeyRelease>', lambda e: self.on_key_release_repeat())
# Mouse buttons: bit complex since they don't automatically
# run in continuous mode like the arrow keys
self.bind('<ButtonPress-1>', self._leftMouseDown)
self.bind('<ButtonRelease-1>', self._leftMouseUp)
self.bind('<Shift-ButtonPress-1>', self._shiftLeftMouseDown)
self.bind('<Shift-ButtonRelease-1>', self._shiftLeftMouseUp)
self.bind('<Control-Button-1>', lambda e: self.add(100))
self.bind('<ButtonPress-3>', self._rightMouseDown)
self.bind('<ButtonRelease-3>', self._rightMouseUp)
self.bind('<Shift-ButtonPress-3>', self._shiftRightMouseDown)
self.bind('<Shift-ButtonRelease-3>', self._shiftRightMouseUp)
self.bind('<Control-Button-3>', lambda e: self.sub(100))
self.bind('<Double-Button-1>', self._dadd1)
self.bind('<Double-Button-3>', self._dsub1)
self.bind('<Shift-Double-Button-1>', self._dadd10)
self.bind('<Shift-Double-Button-3>', self._dsub10)
self.bind('<Control-Double-Button-1>', self._dadd100)
self.bind('<Control-Double-Button-3>', self._dsub100)
self.bind('<Enter>', self._enter) | Sets key bindings. | entailment |
def _pollMouse(self):
"""
Polls @10Hz, with a slight delay at the
start.
"""
if self._mouseJustPressed:
delay = 300
self._mouseJustPressed = False
else:
delay = 100
if self._leftMousePressed:
self.add(1)
self.after_id = self.after(delay, self._pollMouse)
if self._shiftLeftMousePressed:
self.add(10)
self.after_id = self.after(delay, self._pollMouse)
if self._rightMousePressed:
self.sub(1)
self.after_id = self.after(delay, self._pollMouse)
if self._shiftRightMousePressed:
self.sub(10)
self.after_id = self.after(delay, self._pollMouse) | Polls @10Hz, with a slight delay at the
start. | entailment |
def _callback(self, *dummy):
"""
This gets called on any attempt to change the value
"""
# retrieve the value from the Entry
value = self._variable.get()
# run the validation. Returns None if no good
newvalue = self.validate(value)
if newvalue is None:
# Invalid: restores previously stored value
# no checker run.
self._variable.set(self._value)
elif newvalue != value:
# If the value is different update appropriately
# Store new value.
self._value = newvalue
self._variable.set(self.newvalue)
else:
# Store new value
self._value = value | This gets called on any attempt to change the value | entailment |
def set_bind(self):
"""
Sets key bindings -- we need this more than once
"""
IntegerEntry.set_bind(self)
self.bind('<Next>', lambda e: self.set(0)) | Sets key bindings -- we need this more than once | entailment |
def validate(self, value):
"""
Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes.
"""
try:
# trap blank fields here
if not self.blank or value:
v = int(value)
if v < 0:
return None
return value
except ValueError:
return None | Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes. | entailment |
def add(self, num):
"""
Adds num to the current value
"""
try:
val = self.value() + num
except:
val = num
self.set(max(0, val)) | Adds num to the current value | entailment |
def sub(self, num):
"""
Subtracts num from the current value
"""
try:
val = self.value() - num
except:
val = -num
self.set(max(0, val)) | Subtracts num from the current value | entailment |
def ok(self):
"""
Returns True if OK to use, else False
"""
try:
v = int(self._value)
if v < 0:
return False
else:
return True
except:
return False | Returns True if OK to use, else False | entailment |
def set_bind(self):
"""
Sets key bindings -- we need this more than once
"""
IntegerEntry.set_bind(self)
self.bind('<Next>', lambda e: self.set(self.imin))
self.bind('<Prior>', lambda e: self.set(self.imax)) | Sets key bindings -- we need this more than once | entailment |
def validate(self, value):
"""
Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes.
"""
try:
# trap blank fields here
if not self.blank or value:
v = int(value)
if v < self.imin or v > self.imax:
return None
return value
except ValueError:
return None | Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes. | entailment |
def add(self, num):
"""
Adds num to the current value
"""
try:
val = self.value() + num
except:
val = num
self.set(min(self.imax, max(self.imin, val))) | Adds num to the current value | entailment |
def ok(self):
"""
Returns True if OK to use, else False
"""
try:
v = int(self._value)
if v < self.imin or v > self.imax:
return False
else:
return True
except:
return False | Returns True if OK to use, else False | entailment |
def set_bind(self):
"""
Sets key bindings -- we need this more than once
"""
RangedInt.set_bind(self)
self.unbind('<Next>')
self.unbind('<Prior>')
self.bind('<Next>', lambda e: self.set(self._min()))
self.bind('<Prior>', lambda e: self.set(self._max())) | Sets key bindings -- we need this more than once | entailment |
def add(self, num):
"""
Adds num to the current value, jumping up the next
multiple of mfac if the result is not a multiple already
"""
try:
val = self.value() + num
except:
val = num
chunk = self.mfac.value()
if val % chunk > 0:
if num > 0:
val = chunk*(val // chunk + 1)
elif num < 0:
val = chunk*(val // chunk)
val = max(self._min(), min(self._max(), val))
self.set(val) | Adds num to the current value, jumping up the next
multiple of mfac if the result is not a multiple already | entailment |
def ok(self):
"""
Returns True if OK to use, else False
"""
try:
v = int(self._value)
chunk = self.mfac.value()
if v < self.imin or v > self.imax or (v % chunk != 0):
return False
else:
return True
except:
return False | Returns True if OK to use, else False | entailment |
def set_bind(self):
"""
Sets key bindings -- we need this more than once
"""
IntegerEntry.set_bind(self)
self.unbind('<Shift-Up>')
self.unbind('<Shift-Down>')
self.unbind('<Control-Up>')
self.unbind('<Control-Down>')
self.unbind('<Double-Button-1>')
self.unbind('<Double-Button-3>')
self.unbind('<Shift-Button-1>')
self.unbind('<Shift-Button-3>')
self.unbind('<Control-Button-1>')
self.unbind('<Control-Button-3>')
self.bind('<Button-1>', lambda e: self.add(1))
self.bind('<Button-3>', lambda e: self.sub(1))
self.bind('<Up>', lambda e: self.add(1))
self.bind('<Down>', lambda e: self.sub(1))
self.bind('<Enter>', self._enter)
self.bind('<Next>', lambda e: self.set(self.allowed[0]))
self.bind('<Prior>', lambda e: self.set(self.allowed[-1])) | Sets key bindings -- we need this more than once | entailment |
def set_unbind(self):
"""
Unsets key bindings -- we need this more than once
"""
IntegerEntry.set_unbind(self)
self.unbind('<Button-1>')
self.unbind('<Button-3>')
self.unbind('<Up>')
self.unbind('<Down>')
self.unbind('<Enter>')
self.unbind('<Next>')
self.unbind('<Prior>') | Unsets key bindings -- we need this more than once | entailment |
def validate(self, value):
"""
Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes.
"""
try:
v = int(value)
if v not in self.allowed:
return None
return value
except ValueError:
return None | Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes. | entailment |
def set(self, num):
"""
Sets current value to num
"""
if self.validate(num) is not None:
self.index = self.allowed.index(num)
IntegerEntry.set(self, num) | Sets current value to num | entailment |
def add(self, num):
"""
Adds num to the current value
"""
self.index = max(0, min(len(self.allowed)-1, self.index+num))
self.set(self.allowed[self.index]) | Adds num to the current value | entailment |
def validate(self, value):
"""
Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes.
"""
try:
# trap blank fields here
if not self.blank or value:
float(value)
return value
except ValueError:
return None | Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes. | entailment |
def set(self, num):
"""
Sets the current value equal to num
"""
self._value = str(round(float(num), self.nplaces))
self._variable.set(self._value) | Sets the current value equal to num | entailment |
def set_bind(self):
"""
Sets key bindings.
"""
self.bind('<Button-1>', lambda e: self.add(0.1))
self.bind('<Button-3>', lambda e: self.sub(0.1))
self.bind('<Up>', lambda e: self.add(0.1))
self.bind('<Down>', lambda e: self.sub(0.1))
self.bind('<Shift-Up>', lambda e: self.add(1))
self.bind('<Shift-Down>', lambda e: self.sub(1))
self.bind('<Control-Up>', lambda e: self.add(10))
self.bind('<Control-Down>', lambda e: self.sub(10))
self.bind('<Double-Button-1>', self._dadd)
self.bind('<Double-Button-3>', self._dsub)
self.bind('<Shift-Button-1>', lambda e: self.add(1))
self.bind('<Shift-Button-3>', lambda e: self.sub(1))
self.bind('<Control-Button-1>', lambda e: self.add(10))
self.bind('<Control-Button-3>', lambda e: self.sub(10))
self.bind('<Enter>', self._enter) | Sets key bindings. | entailment |
def set_unbind(self):
"""
Unsets key bindings.
"""
self.unbind('<Button-1>')
self.unbind('<Button-3>')
self.unbind('<Up>')
self.unbind('<Down>')
self.unbind('<Shift-Up>')
self.unbind('<Shift-Down>')
self.unbind('<Control-Up>')
self.unbind('<Control-Down>')
self.unbind('<Double-Button-1>')
self.unbind('<Double-Button-3>')
self.unbind('<Shift-Button-1>')
self.unbind('<Shift-Button-3>')
self.unbind('<Control-Button-1>')
self.unbind('<Control-Button-3>')
self.unbind('<Enter>') | Unsets key bindings. | entailment |
def set_bind(self):
"""
Sets key bindings -- we need this more than once
"""
FloatEntry.set_bind(self)
self.bind('<Next>', lambda e: self.set(self.fmin))
self.bind('<Prior>', lambda e: self.set(self.fmax)) | Sets key bindings -- we need this more than once | entailment |
def validate(self, value):
"""
Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes.
"""
try:
# trap blank fields here
if not self.blank or value:
v = float(value)
if (self.allowzero and v != 0 and v < self.fmin) or \
(not self.allowzero and v < self.fmin) or v > self.fmax:
return None
return value
except ValueError:
return None | Applies the validation criteria.
Returns value, new value, or None if invalid.
Overload this in derived classes. | entailment |
def add(self, num):
"""
Adds num to the current value
"""
try:
val = self.value() + num
except:
val = num
self.set(min(self.fmax, max(self.fmin, val))) | Adds num to the current value | entailment |
def ok(self):
"""
Returns True if OK to use, else False
"""
try:
v = float(self._value)
if v < self.fmin or v > self.fmax:
return False
else:
return True
except:
return False | Returns True if OK to use, else False | entailment |
def validate(self, value):
"""
This prevents setting any value more precise than 0.00001
"""
try:
# trap blank fields here
if value:
v = float(value)
if (v != 0 and v < self.fmin) or v > self.fmax:
return None
if abs(round(100000*v)-100000*v) > 1.e-12:
return None
return value
except ValueError:
return None | This prevents setting any value more precise than 0.00001 | entailment |
def set_min(self, fmin):
"""
Updates minimum value
"""
if round(100000*fmin) != 100000*fmin:
raise DriverError('utils.widgets.Expose.set_min: ' +
'fmin must be a multiple of 0.00001')
self.fmin = fmin
self.set(self.fmin) | Updates minimum value | entailment |
def disable(self):
"""
Disable the button, if in non-expert mode;
unset its activity flag come-what-may.
"""
if not self._expert:
self.config(state='disable')
self._active = False | Disable the button, if in non-expert mode;
unset its activity flag come-what-may. | entailment |
def setNonExpert(self):
"""
Turns off 'expert' status whereby to allow a button to be disabled
"""
self._expert = False
if self._active:
self.enable()
else:
self.disable() | Turns off 'expert' status whereby to allow a button to be disabled | entailment |
def validate(self, value):
"""
Applies the validation criteria.
Returns value, new value, or None if invalid.
"""
try:
coord.Angle(value, unit=self.unit)
return value
except ValueError:
return None | Applies the validation criteria.
Returns value, new value, or None if invalid. | entailment |
def set(self, num):
"""
Sets the current value equal to num
"""
self._value = coord.Angle(num, unit=u.deg)
self._variable.set(self.as_string()) | Sets the current value equal to num | entailment |
def add(self, quantity):
"""
Adds an angle to the value
"""
newvalue = self._value + quantity
self.set(newvalue.deg) | Adds an angle to the value | entailment |
def sub(self, quantity):
"""
Subtracts an angle from the value
"""
newvalue = self._value - quantity
self.set(newvalue.deg) | Subtracts an angle from the value | entailment |
def ok(self):
"""
Returns True if OK to use, else False
"""
try:
coord.Angle(self._value, unit=u.deg)
return True
except ValueError:
return False | Returns True if OK to use, else False | entailment |
def _callback(self, *dummy):
"""
This gets called on any attempt to change the value
"""
# retrieve the value from the Entry
value = self._variable.get()
# run the validation. Returns None if no good
newvalue = self.validate(value)
if newvalue is None:
# Invalid: restores previously stored value
# no checker run.
self._variable.set(self.as_string())
else:
# Store new value
self._value = coord.Angle(value, unit=self.unit)
if self.checker:
self.checker(*dummy) | This gets called on any attempt to change the value | entailment |
def act(self):
"""
Carries out the action associated with Stop button
"""
g = get_root(self).globals
g.clog.debug('Stop pressed')
# Stop exposure meter
# do this first, so timer doesn't also try to enable idle mode
g.info.timer.stop()
def stop_in_background():
try:
self.stopping = True
if execCommand(g, 'abort'):
self.stopped_ok = True
else:
g.clog.warn('Failed to stop run')
self.stopped_ok = False
self.stopping = False
except Exception as err:
g.clog.warn('Failed to stop run. Error = ' + str(err))
self.stopping = False
self.stopped_ok = False
# stopping can take a while during which the GUI freezes so run in
# background.
t = threading.Thread(target=stop_in_background)
t.daemon = True
t.start()
self.after(500, self.check) | Carries out the action associated with Stop button | entailment |
def check(self):
"""
Checks the status of the stop exposure command
This is run in background and can take a few seconds
"""
g = get_root(self).globals
if self.stopped_ok:
# Exposure stopped OK; modify buttons
self.disable()
# try and write FITS table before enabling start button, otherwise
# a new start will clear table
try:
insertFITSHDU(g)
except Exception as err:
g.clog.warn('Could not add FITS Table to run')
g.clog.warn(str(err))
g.observe.start.enable()
g.setup.powerOn.disable()
g.setup.powerOff.enable()
# Report that run has stopped
g.clog.info('Run stopped')
# enable idle mode now run has stopped
g.clog.info('Setting chips to idle')
idle = {'appdata': {'app': 'Idle'}}
try:
success = postJSON(g, idle)
if not success:
raise Exception('postJSON returned false')
except Exception as err:
g.clog.warn('Failed to enable idle mode')
g.clog.warn(str(err))
g.clog.info('Stopping offsets (if running')
try:
success = stopNodding(g)
if not success:
raise Exception('Failed to stop dithering: response was false')
except Exception as err:
g.clog.warn('Failed to stop GTC offset script')
g.clog.warn(str(err))
return True
elif self.stopping:
# Exposure in process of stopping
# Disable lots of buttons
self.disable()
g.observe.start.disable()
g.setup.powerOn.disable()
g.setup.powerOff.disable()
# wait a second before trying again
self.after(500, self.check)
else:
self.enable()
g.observe.start.disable()
g.setup.powerOn.disable()
g.setup.powerOff.disable()
# Start exposure meter
g.info.timer.start()
return False | Checks the status of the stop exposure command
This is run in background and can take a few seconds | entailment |
def modver(self, *args):
"""
Switches colour of verify button
"""
g = get_root(self).globals
if self.ok():
tname = self.val.get()
if tname in self.successes:
# known to be in simbad
self.verify.config(bg=g.COL['start'])
elif tname in self.failures:
# known not to be in simbad
self.verify.config(bg=g.COL['stop'])
else:
# not known whether in simbad
self.verify.config(bg=g.COL['main'])
self.verify.config(state='normal')
else:
self.verify.config(bg=g.COL['main'])
self.verify.config(state='disable')
if self.callback is not None:
self.callback() | Switches colour of verify button | entailment |
def act(self):
"""
Carries out the action associated with Verify button
"""
tname = self.val.get()
g = get_root(self).globals
g.clog.info('Checking ' + tname + ' in simbad')
try:
ret = checkSimbad(g, tname)
if len(ret) == 0:
self.verify.config(bg=g.COL['stop'])
g.clog.warn('No matches to "' + tname + '" found.')
if tname not in self.failures:
self.failures.append(tname)
elif len(ret) == 1:
self.verify.config(bg=g.COL['start'])
g.clog.info(tname + ' verified OK in simbad')
g.clog.info('Primary simbad name = ' + ret[0]['Name'])
if tname not in self.successes:
self.successes.append(tname)
else:
g.clog.warn('More than one match to "' + tname + '" found')
self.verify.config(bg=g.COL['stop'])
if tname not in self.failures:
self.failures.append(tname)
except urllib.error.URLError:
g.clog.warn('Simbad lookup timed out')
except socket.timeout:
g.clog.warn('Simbad lookup timed out') | Carries out the action associated with Verify button | entailment |
def act(self):
"""
Power on action
"""
g = get_root(self).globals
g.clog.debug('Power on pressed')
if execCommand(g, 'online'):
g.clog.info('ESO server online')
g.cpars['eso_server_online'] = True
if not isPoweredOn(g):
success = execCommand(g, 'pon')
if not success:
g.clog.warn('Unable to power on CLDC')
return False
# change other buttons
self.disable()
g.observe.start.enable()
g.observe.stop.disable()
g.setup.powerOff.enable()
success = execCommand(g, 'seqStart')
if not success:
g.clog.warn('Failed to start sequencer after Power On.')
try:
g.info.run.configure(text='{0:03d}'.format(getRunNumber(g)))
except Exception as err:
g.clog.warn('Failed to determine run number at start of run')
g.clog.warn(str(err))
g.info.run.configure(text='UNDEF')
return True
else:
g.clog.warn('Failed to bring server online')
return False | Power on action | entailment |
def setExpertLevel(self):
"""
Set expert level
"""
g = get_root(self).globals
level = g.cpars['expert_level']
# first define which buttons are visible
if level == 0:
# simple layout
for button in self.all_buttons:
button.grid_forget()
# then re-grid the two simple ones
self.powerOn.grid(row=0, column=0)
self.powerOff.grid(row=0, column=1)
elif level == 1 or level == 2:
# first remove all possible buttons
for button in self.all_buttons:
button.grid_forget()
# restore detailed layout
self.cldcOn.grid(row=0, column=1)
self.cldcOff.grid(row=1, column=1)
self.seqStart.grid(row=2, column=1)
self.seqStop.grid(row=3, column=1)
self.ngcOnline.grid(row=0, column=0)
self.ngcOff.grid(row=1, column=0)
self.ngcStandby.grid(row=2, column=0)
self.ngcReset.grid(row=3, column=0)
# now set whether buttons are permanently enabled or not
if level == 0 or level == 1:
for button in self.all_buttons:
button.setNonExpert()
elif level == 2:
for button in self.all_buttons:
button.setExpert() | Set expert level | entailment |
def setExpertLevel(self):
"""
Modifies widget according to expertise level, which in this
case is just matter of hiding or revealing the button to
set CCD temps
"""
g = get_root(self).globals
level = g.cpars['expert_level']
if level == 0:
if self.val.get() == 'CCD TECs':
self.val.set('Observe')
self._changed()
self.tecs.grid_forget()
else:
self.tecs.grid(row=0, column=3, sticky=tk.W) | Modifies widget according to expertise level, which in this
case is just matter of hiding or revealing the button to
set CCD temps | entailment |
def start(self):
"""
Starts the timer from zero
"""
self.startTime = time.time()
self.configure(text='{0:<d} s'.format(0))
self.update() | Starts the timer from zero | entailment |
def update(self):
"""
Updates @ 10Hz to give smooth running clock, checks
run status @0.2Hz to reduce load on servers.
"""
g = get_root(self).globals
try:
self.count += 1
delta = int(round(time.time() - self.startTime))
self.configure(text='{0:<d} s'.format(delta))
if self.count % 50 == 0:
if not isRunActive(g):
# try and write FITS table before enabling start button, otherwise
# a new start will clear table
try:
insertFITSHDU(g)
except Exception as err:
g.clog.warn('Could not add FITS Table to run')
g.clog.warn(str(err))
g.observe.start.enable()
g.observe.stop.disable()
g.setup.ngcReset.enable()
g.setup.powerOn.disable()
g.setup.powerOff.enable()
g.clog.info('Timer detected stopped run')
warn_cmd = '/usr/bin/ssh [email protected] spd-say "\'exposure finished\'"'
subprocess.check_output(warn_cmd, shell=True, stderr=subprocess.PIPE)
# enable idle mode now run has stopped
g.clog.info('Setting chips to idle')
idle = {'appdata': {'app': 'Idle'}}
try:
success = postJSON(g, idle)
if not success:
raise Exception('postJSON returned false')
except Exception as err:
g.clog.warn('Failed to enable idle mode')
g.clog.warn(str(err))
g.clog.info('Stopping offsets (if running')
try:
success = stopNodding(g)
if not success:
raise Exception('failed to stop dithering')
except Exception as err:
g.clog.warn('Failed to stop GTC offset script')
g.clog.warn(str(err))
self.stop()
return
except Exception as err:
if self.count % 100 == 0:
g.clog.warn('Timer.update: error = ' + str(err))
self.id = self.after(100, self.update) | Updates @ 10Hz to give smooth running clock, checks
run status @0.2Hz to reduce load on servers. | entailment |
def dumpJSON(self):
"""
Return dictionary of data for FITS headers.
"""
g = get_root(self).globals
return dict(
RA=self.ra['text'],
DEC=self.dec['text'],
tel=g.cpars['telins_name'],
alt=self._getVal(self.alt),
az=self._getVal(self.az),
secz=self._getVal(self.airmass),
pa=self._getVal(self.pa),
foc=self._getVal(self.focus),
mdist=self._getVal(self.mdist)
) | Return dictionary of data for FITS headers. | entailment |
def update_tcs_table(self):
"""
Periodically update a table of info from the TCS.
Only works at GTC
"""
g = get_root(self).globals
if not g.cpars['tcs_on'] or not g.cpars['telins_name'].lower() == 'gtc':
self.after(60000, self.update_tcs_table)
return
try:
tel_server = tcs.get_telescope_server()
telpars = tel_server.getTelescopeParams()
add_gtc_header_table_row(self.tcs_table, telpars)
except Exception as err:
g.clog.warn('Could not update table of TCS info')
# schedule next call for 60s later
self.after(60000, self.update_tcs_table) | Periodically update a table of info from the TCS.
Only works at GTC | entailment |
def update_tcs(self):
"""
Periodically update TCS info.
A long running process, so run in a thread and fill a queue
"""
g = get_root(self).globals
if not g.cpars['tcs_on']:
self.after(20000, self.update_tcs)
return
if g.cpars['telins_name'] == 'WHT':
tcsfunc = tcs.getWhtTcs
elif g.cpars['telins_name'] == 'GTC':
tcsfunc = tcs.getGtcTcs
else:
g.clog.debug('TCS error: could not recognise ' +
g.cpars['telins_name'])
return
def tcs_threaded_update():
try:
ra, dec, pa, focus = tcsfunc()
self.tcs_data_queue.put((ra, dec, pa, focus))
except Exception as err:
t, v, tb = sys.exc_info()
error = traceback.format_exception_only(t, v)[0].strip()
tback = 'TCS Traceback (most recent call last):\n' + \
''.join(traceback.format_tb(tb))
g.FIFO.put(('TCS', error, tback))
t = threading.Thread(target=tcs_threaded_update)
t.start()
self.after(20000, self.update_tcs) | Periodically update TCS info.
A long running process, so run in a thread and fill a queue | entailment |
def update_slidepos(self):
"""
Periodically update the slide position.
Also farmed out to a thread to avoid hanging GUI main thread
"""
g = get_root(self).globals
if not g.cpars['focal_plane_slide_on']:
self.after(20000, self.update_slidepos)
return
def slide_threaded_update():
try:
(pos_ms, pos_mm, pos_px), msg = g.fpslide.slide.return_position()
self.slide_pos_queue.put((pos_ms, pos_mm, pos_px))
except Exception as err:
t, v, tb = sys.exc_info()
error = traceback.format_exception_only(t, v)[0].strip()
tback = 'Slide Traceback (most recent call last):\n' + \
''.join(traceback.format_tb(tb))
g.FIFO.put(('Slide', error, tback))
t = threading.Thread(target=slide_threaded_update)
t.start()
self.after(20000, self.update_slidepos) | Periodically update the slide position.
Also farmed out to a thread to avoid hanging GUI main thread | entailment |
def update(self):
"""
Updates run & tel status window. Runs
once every 2 seconds.
"""
g = get_root(self).globals
if g.astro is None or g.fpslide is None:
self.after(100, self.update)
return
try:
if g.cpars['tcs_on']:
try:
# Poll TCS for ra,dec etc.
ra, dec, pa, focus = self.tcs_data_queue.get(block=False)
# format ra, dec as HMS
coo = coord.SkyCoord(ra, dec, unit=(u.deg, u.deg))
ratxt = coo.ra.to_string(sep=':', unit=u.hour, precision=0)
dectxt = coo.dec.to_string(sep=':', unit=u.deg,
alwayssign=True,
precision=0)
self.ra.configure(text=ratxt)
self.dec.configure(text=dectxt)
# wrap pa from 0 to 360
pa = coord.Longitude(pa*u.deg)
self.pa.configure(text='{0:6.2f}'.format(pa.value))
# set focus
self.focus.configure(text='{0:+5.2f}'.format(focus))
# Calculate most of the
# stuff that we don't get from the telescope
now = Time.now()
with warnings.catch_warnings():
warnings.simplefilter('ignore')
# ignore astropy deprecation warnings
lon = g.astro.obs.longitude
lst = now.sidereal_time(kind='mean',
longitude=lon)
ha = coo.ra.hourangle*u.hourangle - lst
hatxt = ha.wrap_at(12*u.hourangle).to_string(sep=':', precision=0)
self.ha.configure(text=hatxt)
altaz_frame = coord.AltAz(obstime=now, location=g.astro.obs)
altaz = coo.transform_to(altaz_frame)
self.alt.configure(text='{0:<4.1f}'.format(altaz.alt.value))
self.az.configure(text='{0:<5.1f}'.format(altaz.az.value))
# set airmass
self.airmass.configure(text='{0:<4.2f}'.format(altaz.secz))
# distance to the moon. Warn if too close
# (configurable) to it.
md = coord.get_moon(now, g.astro.obs).separation(coo)
self.mdist.configure(text='{0:<7.2f}'.format(md.value))
if md < g.cpars['mdist_warn']*u.deg:
self.mdist.configure(bg=g.COL['warn'])
else:
self.mdist.configure(bg=g.COL['main'])
except Empty:
# silently do nothing if queue is empty
pass
except Exception as err:
self.ra.configure(text='UNDEF')
self.dec.configure(text='UNDEF')
self.pa.configure(text='UNDEF')
self.ha.configure(text='UNDEF')
self.alt.configure(text='UNDEF')
self.az.configure(text='UNDEF')
self.airmass.configure(text='UNDEF')
self.mdist.configure(text='UNDEF')
g.clog.warn('TCS error: ' + str(err))
if g.cpars['hcam_server_on'] and \
g.cpars['eso_server_online']:
# get run number (set by the 'Start' button')
try:
# get run number from hipercam server
run = getRunNumber(g)
self.run.configure(text='{0:03d}'.format(run))
# Find the number of frames in this run
try:
frame_no = getFrameNumber(g)
self.frame.configure(text='{0:04d}'.format(frame_no))
except Exception as err:
if err.code == 404:
self.frame.configure(text='0')
else:
g.clog.debug('Error occurred trying to set frame')
self.frame.configure(text='UNDEF')
except Exception as err:
g.clog.debug('Error trying to set run: ' + str(err))
# get the slide position
# poll at 5x slower rate than the frame
if self.count % 5 == 0 and g.cpars['focal_plane_slide_on']:
try:
pos_ms, pos_mm, pos_px = self.slide_pos_queue.get(block=False)
self.fpslide.configure(text='{0:d}'.format(
int(round(pos_px))))
if pos_px < 1050.:
self.fpslide.configure(bg=g.COL['warn'])
else:
self.fpslide.configure(bg=g.COL['main'])
except Exception as err:
pass
# get the CCD temperature poll at 5x slower rate than the frame
if self.count % 5 == 0:
try:
if g.ccd_hw is not None and g.ccd_hw.ok:
self.ccd_temps.configure(text='OK')
self.ccd_temps.configure(bg=g.COL['main'])
else:
self.ccd_temps.configure(text='ERR')
self.ccd_temps.configure(bg=g.COL['warn'])
except Exception as err:
g.clog.warn(str(err))
self.ccd_temps.configure(text='UNDEF')
self.ccd_temps.configure(bg=g.COL['warn'])
except Exception as err:
# this is a safety catchall trap as it is important
# that this routine keeps going
g.clog.warn('Unexpected error: ' + str(err))
# run every 2 seconds
self.count += 1
self.after(2000, self.update) | Updates run & tel status window. Runs
once every 2 seconds. | entailment |
def update(self):
"""
Updates @ 10Hz to give smooth running clock.
"""
try:
# update counter
self.counter += 1
g = get_root(self).globals
# current time
now = Time.now()
# configure times
self.utc.configure(text=now.datetime.strftime('%H:%M:%S'))
self.mjd.configure(text='{0:11.5f}'.format(now.mjd))
with warnings.catch_warnings():
warnings.simplefilter('ignore')
# ignore astropy deprecation warnings
lon = self.obs.longitude
lst = now.sidereal_time(kind='mean', longitude=lon)
self.lst.configure(text=lst.to_string(sep=':', precision=0))
if self.counter % 600 == 1:
# only re-compute Sun & Moon info once every 600 calls
altaz_frame = coord.AltAz(obstime=now, location=self.obs)
sun = coord.get_sun(now)
sun_aa = sun.transform_to(altaz_frame)
moon = coord.get_moon(now, self.obs)
moon_aa = moon.transform_to(altaz_frame)
elongation = sun.separation(moon)
moon_phase_angle = np.arctan2(sun.distance*np.sin(elongation),
moon.distance - sun.distance*np.cos(elongation))
moon_phase = (1 + np.cos(moon_phase_angle))/2.0
self.sunalt.configure(
text='{0:+03d} deg'.format(int(sun_aa.alt.deg))
)
self.moonra.configure(
text=moon.ra.to_string(unit='hour', sep=':', precision=0)
)
self.moondec.configure(
text=moon.dec.to_string(unit='deg', sep=':', precision=0)
)
self.moonalt.configure(text='{0:+03d} deg'.format(
int(moon_aa.alt.deg)
))
self.moonphase.configure(text='{0:02d} %'.format(
int(100.*moon_phase.value)
))
if (now > self.lastRiset and now > self.lastAstro):
# Only re-compute rise and setting times when necessary,
# and only re-compute when both rise/set and astro
# twilight times have gone by
# For sunrise and set we set the horizon down to match a
# standard amount of refraction at the horizon and subtract size of disc
horizon = -64*u.arcmin
sunset = calc_riseset(now, 'sun', self.obs, 'next', 'setting', horizon)
sunrise = calc_riseset(now, 'sun', self.obs, 'next', 'rising', horizon)
# Astro twilight: geometric centre at -18 deg
horizon = -18*u.deg
astroset = calc_riseset(now, 'sun', self.obs, 'next', 'setting', horizon)
astrorise = calc_riseset(now, 'sun', self.obs, 'next', 'rising', horizon)
if sunrise > sunset:
# In the day time we report the upcoming sunset and
# end of evening twilight
self.lriset.configure(text='Sets:', font=g.DEFAULT_FONT)
self.lastRiset = sunset
self.lastAstro = astroset
elif astrorise > astroset and astrorise < sunrise:
# During evening twilight, we report the sunset just
# passed and end of evening twilight
self.lriset.configure(text='Sets:', font=g.DEFAULT_FONT)
horizon = -64*u.arcmin
self.lastRiset = calc_riseset(now, 'sun', self.obs, 'previous', 'setting', horizon)
self.lastAstro = astroset
elif astrorise > astroset and astrorise < sunrise:
# During night, report upcoming start of morning
# twilight and sunrise
self.lriset.configure(text='Rises:',
font=g.DEFAULT_FONT)
horizon = -64*u.arcmin
self.lastRiset = sunrise
self.lastAstro = astrorise
else:
# During morning twilight report start of twilight
# just passed and upcoming sunrise
self.lriset.configure(text='Rises:',
font=g.DEFAULT_FONT)
horizon = -18*u.deg
self.lastRiset = sunrise
self.lastAstro = calc_riseset(now, 'sun', self.obs, 'previous', 'rising', horizon)
# Configure the corresponding text fields
self.riset.configure(
text=self.lastRiset.datetime.strftime("%H:%M:%S")
)
self.astro.configure(
text=self.lastAstro.datetime.strftime("%H:%M:%S")
)
except Exception as err:
# catchall
g.clog.warn('AstroFrame.update: error = ' + str(err))
# run again after 100 milli-seconds
self.after(100, self.update) | Updates @ 10Hz to give smooth running clock. | entailment |
def check(self):
"""
Checks the values of the window pairs. If any problems are found, it
flags them by changing the background colour.
Returns (status, synced)
status : flag for whether parameters are viable at all
synced : flag for whether the windows are synchronised.
"""
status = True
synced = True
xbin = self.xbin.value()
ybin = self.ybin.value()
npair = self.npair.value()
g = get_root(self).globals
# individual pair checks
for xslw, xsrw, ysw, nxw, nyw in zip(self.xsl[:npair], self.xsr[:npair],
self.ys[:npair], self.nx[:npair],
self.ny[:npair]):
xslw.config(bg=g.COL['main'])
xsrw.config(bg=g.COL['main'])
ysw.config(bg=g.COL['main'])
nxw.config(bg=g.COL['main'])
nyw.config(bg=g.COL['main'])
status = status if xslw.ok() else False
status = status if xsrw.ok() else False
status = status if ysw.ok() else False
status = status if nxw.ok() else False
status = status if nyw.ok() else False
xsl = xslw.value()
xsr = xsrw.value()
ys = ysw.value()
nx = nxw.value()
ny = nyw.value()
# Are unbinned dimensions consistent with binning factors?
if nx is None or nx % xbin != 0:
nxw.config(bg=g.COL['error'])
status = False
elif (nx // xbin) % 4 != 0:
"""
The NGC collects pixel data in chunks before transmission.
As a result, to avoid loss of data from frames, the binned
x-size must be a multiple of 4.
"""
nxw.config(bg=g.COL['error'])
status = False
if ny is None or ny % ybin != 0:
nyw.config(bg=g.COL['error'])
status = False
# overlap checks
if xsl is None or xsr is None or xsl >= xsr:
xsrw.config(bg=g.COL['error'])
status = False
if xsl is None or xsr is None or nx is None or xsl + nx > xsr:
xsrw.config(bg=g.COL['error'])
status = False
# Are the windows synchronised? This means that they would
# be consistent with the pixels generated were the whole CCD
# to be binned by the same factors. If relevant values are not
# set, we count that as "synced" because the purpose of this is
# to enable / disable the sync button and we don't want it to be
# enabled just because xs or ys are not set.
perform_check = all([param is not None for param in (xsl, xsr, ys, nx, ny)])
if (perform_check and
((xsl - 1) % xbin != 0 or (xsr - 1025) % xbin != 0 or
(ys - 1) % ybin != 0)):
synced = False
# Range checks
if xsl is None or nx is None or xsl + nx - 1 > xslw.imax:
xslw.config(bg=g.COL['error'])
status = False
if xsr is None or nx is None or xsr + nx - 1 > xsrw.imax:
xsrw.config(bg=g.COL['error'])
status = False
if ys is None or ny is None or ys + ny - 1 > ysw.imax:
ysw.config(bg=g.COL['error'])
status = False
# Pair overlap checks. Compare one pair with the next one in the
# same quadrant (if there is one). Only bother if we have survived
# so far, which saves a lot of checks
if status:
for index in range(npair-2):
ys1 = self.ys[index].value()
ny1 = self.ny[index].value()
ysw2 = self.ys[index+2]
ys2 = ysw2.value()
if ys1 + ny1 > ys2:
ysw2.config(bg=g.COL['error'])
status = False
if synced:
self.sbutt.config(bg=g.COL['main'])
self.sbutt.disable()
else:
if not self.frozen:
self.sbutt.enable()
self.sbutt.config(bg=g.COL['warn'])
return status | Checks the values of the window pairs. If any problems are found, it
flags them by changing the background colour.
Returns (status, synced)
status : flag for whether parameters are viable at all
synced : flag for whether the windows are synchronised. | entailment |
def sync(self):
"""
Synchronise the settings. This means that the pixel start
values are shifted downwards so that they are synchronised
with a full-frame binned version. This does nothing if the
binning factors == 1.
"""
# needs some mods for ultracam ??
xbin = self.xbin.value()
ybin = self.ybin.value()
n = 0
for xsl, xsr, ys, nx, ny in self:
if xbin > 1:
xsl = xbin*((xsl-1)//xbin)+1
self.xsl[n].set(xsl)
xsr = xbin*((xsr-1025)//xbin)+1025
self.xsr[n].set(xsr)
if ybin > 1:
ys = ybin*((ys-1)//ybin)+1
self.ys[n].set(ys)
n += 1
g = get_root(self).globals
self.sbutt.config(bg=g.COL['main'])
self.sbutt.config(state='disable') | Synchronise the settings. This means that the pixel start
values are shifted downwards so that they are synchronised
with a full-frame binned version. This does nothing if the
binning factors == 1. | entailment |
def freeze(self):
"""
Freeze (disable) all settings so they can't be altered
"""
for xsl, xsr, ys, nx, ny in \
zip(self.xsl, self.xsr,
self.ys, self.nx, self.ny):
xsl.disable()
xsr.disable()
ys.disable()
nx.disable()
ny.disable()
self.npair.disable()
self.xbin.disable()
self.ybin.disable()
self.sbutt.disable()
self.frozen = True | Freeze (disable) all settings so they can't be altered | entailment |
def disable(self, everything=False):
"""
Disable all but possibly not binning, which is needed for FF apps
Parameters
---------
everything : bool
disable binning as well
"""
self.freeze()
if not everything:
self.xbin.enable()
self.ybin.enable()
self.frozen = False | Disable all but possibly not binning, which is needed for FF apps
Parameters
---------
everything : bool
disable binning as well | entailment |
def enable(self):
"""
Enables WinPair settings
"""
npair = self.npair.value()
for label, xsl, xsr, ys, nx, ny in \
zip(self.label[:npair], self.xsl[:npair], self.xsr[:npair],
self.ys[:npair], self.nx[:npair], self.ny[:npair]):
label.config(state='normal')
xsl.enable()
xsr.enable()
ys.enable()
nx.enable()
ny.enable()
for label, xsl, xsr, ys, nx, ny in \
zip(self.label[npair:], self.xsl[npair:], self.xsr[npair:],
self.ys[npair:], self.nx[npair:], self.ny[npair:]):
label.config(state='disable')
xsl.disable()
xsr.disable()
ys.disable()
nx.disable()
ny.disable()
self.npair.enable()
self.xbin.enable()
self.ybin.enable()
self.sbutt.enable() | Enables WinPair settings | entailment |
def check(self):
"""
Checks the values of the window quads. If any problems are found it
flags the offending window by changing the background colour.
Returns:
status : bool
"""
status = synced = True
xbin = self.xbin.value()
ybin = self.ybin.value()
nquad = self.nquad.value()
g = get_root(self).globals
# individual window checks
for (xsllw, xsulw, xslrw, xsurw, ysw, nxw, nyw) in zip(
self.xsll[:nquad],
self.xsul[:nquad], self.xslr[:nquad],
self.xsur[:nquad], self.ys[:nquad], self.nx[:nquad], self.ny[:nquad]):
all_fields = (xsllw, xsulw, xslrw, xsurw, ysw, nxw, nyw)
for field in all_fields:
field.config(bg=g.COL['main'])
status = status if field.ok() else False
xsll = xsllw.value()
xsul = xsulw.value()
xslr = xslrw.value()
xsur = xsurw.value()
ys = ysw.value()
nx = nxw.value()
ny = nyw.value()
# Are unbinned dimensions consistent with binning factors?
if nx is None or nx % xbin != 0:
nxw.config(bg=g.COL['error'])
status = False
elif (nx // xbin) % 4 != 0:
"""
The NGC collects pixel data in chunks before transmission.
As a result, to avoid loss of data from frames, the binned
x-size must be a multiple of 4.
"""
nxw.config(bg=g.COL['error'])
status = False
if ny is None or ny % ybin != 0:
nyw.config(bg=g.COL['error'])
status = False
# overlap checks in x direction
if xsll is None or xslr is None or xsll >= xslr:
xslrw.config(bg=g.COL['error'])
status = False
if xsul is None or xsur is None or xsul >= xsur:
xsurw.config(bg=g.COL['error'])
status = False
if nx is None or xsll is None or xsll + nx > xslr:
xslrw.config(bg=g.COL['error'])
status = False
if xsul is None or nx is None or xsul + nx > xsur:
xsurw.config(bg=g.COL['error'])
status = False
# Are the windows synchronised? This means that they would
# be consistent with the pixels generated were the whole CCD
# to be binned by the same factors. If relevant values are not
# set, we count that as "synced" because the purpose of this is
# to enable / disable the sync button and we don't want it to be
# enabled just because xs or ys are not set.
perform_check = all([param is not None for param in (
xsll, xslr, ys, nx, ny
)])
if (perform_check and ((xsll - 1) % xbin != 0 or (xslr - 1025) % xbin != 0 or
(ys - 1) % ybin != 0)):
synced = False
perform_check = all([param is not None for param in (
xsul, xsur, ys, nx, ny
)])
if (perform_check and ((xsul - 1) % xbin != 0 or (xsur - 1025) % xbin != 0 or
(ys - 1) % ybin != 0)):
synced = False
# Range checks
rchecks = ((xsll, nx, xsllw), (xslr, nx, xslrw),
(xsul, nx, xsulw), (xsur, nx, xsurw),
(ys, ny, ysw))
for check in rchecks:
val, size, widg = check
if val is None or size is None or val + size - 1 > widg.imax:
widg.config(bg=g.COL['error'])
status = False
# Quad overlap checks. Compare one quad with the next one
# in the same quadrant if there is one. Only bother if we
# have survived so far, which saves a lot of checks.
if status:
for index in range(nquad-1):
ys1 = self.ys[index].value()
ny1 = self.ny[index].value()
ysw2 = self.ys[index+1]
ys2 = ysw2.value()
if any([thing is None for thing in (ys1, ny1, ys2)]) or ys1 + ny1 > ys2:
ysw2.config(bg=g.COL['error'])
status = False
if synced:
self.sbutt.config(bg=g.COL['main'])
self.sbutt.disable()
else:
if not self.frozen:
self.sbutt.enable()
self.sbutt.config(bg=g.COL['warn'])
return status | Checks the values of the window quads. If any problems are found it
flags the offending window by changing the background colour.
Returns:
status : bool | entailment |
def sync(self):
"""
Synchronise the settings.
This routine changes the window settings so that the pixel start
values are shifted downwards until they are synchronised with a
full-frame binned version. This does nothing if the binning factor
is 1.
"""
xbin = self.xbin.value()
ybin = self.ybin.value()
if xbin == 1 and ybin == 1:
self.sbutt.config(state='disable')
return
for n, (xsll, xsul, xslr, xsur, ys, nx, ny) in enumerate(self):
if (xsll-1) % xbin != 0:
xsll = xbin * ((xsll-1)//xbin)+1
self.xsll[n].set(xsll)
if (xsul-1) % xbin != 0:
xsul = xbin * ((xsul-1)//xbin)+1
self.xsul[n].set(xsul)
if (xslr-1025) % xbin != 0:
xslr = xbin * ((xslr-1025)//xbin)+1025
self.xslr[n].set(xslr)
if (xsur-1025) % xbin != 0:
xsur = xbin * ((xsur-1025)//xbin)+1025
self.xsur[n].set(xsur)
if ybin > 1 and (ys-1) % ybin != 0:
ys = ybin*((ys-1)//ybin)+1
self.ys[n].set(ys)
self.sbutt.config(bg=g.COL['main'])
self.sbutt.config(state='disable') | Synchronise the settings.
This routine changes the window settings so that the pixel start
values are shifted downwards until they are synchronised with a
full-frame binned version. This does nothing if the binning factor
is 1. | entailment |
def freeze(self):
"""
Freeze (disable) all settings
"""
for fields in zip(self.xsll, self.xsul, self.xslr, self.xsur,
self.ys, self.nx, self.ny):
for field in fields:
field.disable()
self.nquad.disable()
self.xbin.disable()
self.ybin.disable()
self.sbutt.disable()
self.frozen = True | Freeze (disable) all settings | entailment |
def enable(self):
"""
Enables WinQuad setting
"""
nquad = self.nquad.value()
for label, xsll, xsul, xslr, xsur, ys, nx, ny in \
zip(self.label[:nquad], self.xsll[:nquad], self.xsul[:nquad],
self.xslr[:nquad], self.xsur[:nquad], self.ys[:nquad],
self.nx[:nquad], self.ny[:nquad]):
label.config(state='normal')
for thing in (xsll, xsul, xslr, xsur, ys, nx, ny):
thing.enable()
for label, xsll, xsul, xslr, xsur, ys, nx, ny in \
zip(self.label[nquad:], self.xsll[nquad:], self.xsul[nquad:],
self.xslr[nquad:], self.xsur[nquad:], self.ys[nquad:],
self.nx[nquad:], self.ny[nquad:]):
label.config(state='disable')
for thing in (xsll, xsul, xslr, xsur, ys, nx, ny):
thing.disable()
self.nquad.enable()
self.xbin.enable()
self.ybin.enable()
self.sbutt.enable() | Enables WinQuad setting | entailment |
def check(self):
"""
Checks the values of the windows. If any problems are found,
it flags them by changing the background colour. Only active
windows are checked.
Returns status, flag for whether parameters are viable.
"""
status = True
synced = True
xbin = self.xbin.value()
ybin = self.ybin.value()
nwin = self.nwin.value()
# individual window checks
g = get_root(self).globals
for xsw, ysw, nxw, nyw in \
zip(self.xs[:nwin], self.ys[:nwin],
self.nx[:nwin], self.ny[:nwin]):
xsw.config(bg=g.COL['main'])
ysw.config(bg=g.COL['main'])
nxw.config(bg=g.COL['main'])
nyw.config(bg=g.COL['main'])
status = status if xsw.ok() else False
status = status if ysw.ok() else False
status = status if nxw.ok() else False
status = status if nyw.ok() else False
xs = xsw.value()
ys = ysw.value()
nx = nxw.value()
ny = nyw.value()
# Are unbinned dimensions consistent with binning factors?
if nx is None or nx % xbin != 0:
nxw.config(bg=g.COL['error'])
status = False
elif (nx // xbin) % 4 != 0:
"""
The NGC collects pixel data in chunks before transmission.
As a result, to avoid loss of data from frames, the binned
x-size must be a multiple of 4.
"""
nxw.config(bg=g.COL['error'])
status = False
if ny is None or ny % ybin != 0:
nyw.config(bg=g.COL['error'])
status = False
# Are the windows synchronised? This means that they
# would be consistent with the pixels generated were
# the whole CCD to be binned by the same factors
# If relevant values are not set, we count that as
# "synced" because the purpose of this is to enable
# / disable the sync button and we don't want it to be
# enabled just because xs or ys are not set.
if (xs is not None and ys is not None and nx is not None and
ny is not None):
if (xs < 1025 and ((xs - 1) % xbin != 0 or (ys - 1) % ybin != 0)
or ((xs-1025) % xbin != 0 or (ys - 1) % ybin != 0)):
synced = False
# Range checks
if xs is None or nx is None or xs + nx - 1 > xsw.imax:
xsw.config(bg=g.COL['error'])
status = False
if ys is None or ny is None or ys + ny - 1 > ysw.imax:
ysw.config(bg=g.COL['error'])
status = False
# Overlap checks. Compare each window with the next one, requiring
# no y overlap and that the second is higher than the first
if status:
n1 = 0
for ysw1, nyw1 in zip(self.ys[:nwin-1], self.ny[:nwin-1]):
ys1 = ysw1.value()
ny1 = nyw1.value()
n1 += 1
ysw2 = self.ys[n1]
ys2 = ysw2.value()
if ys2 < ys1 + ny1:
ysw2.config(bg=g.COL['error'])
status = False
if synced:
self.sbutt.config(bg=g.COL['main'])
self.sbutt.disable()
else:
if not self.frozen:
self.sbutt.enable()
self.sbutt.config(bg=g.COL['warn'])
return status | Checks the values of the windows. If any problems are found,
it flags them by changing the background colour. Only active
windows are checked.
Returns status, flag for whether parameters are viable. | entailment |
def sync(self, *args):
"""
Synchronise the settings. This means that the pixel start
values are shifted downwards so that they are synchronised
with a full-frame binned version. This does nothing if the
binning factor == 1
"""
xbin = self.xbin.value()
ybin = self.ybin.value()
n = 0
for xs, ys, nx, ny in self:
if xbin > 1 and xs % xbin != 1:
if xs < 1025:
xs = xbin*((xs-1)//xbin)+1
else:
xs = xbin*((xs-1025)//xbin)+1025
self.xs[n].set(xs)
if ybin > 1 and ys % ybin != 1:
ys = ybin*((ys-1)//ybin)+1
self.ys[n].set(ys)
n += 1
self.sbutt.config(bg=g.COL['main'])
self.sbutt.config(state='disable') | Synchronise the settings. This means that the pixel start
values are shifted downwards so that they are synchronised
with a full-frame binned version. This does nothing if the
binning factor == 1 | entailment |
def freeze(self):
"""
Freeze all settings so they can't be altered
"""
for xs, ys, nx, ny in \
zip(self.xs, self.ys, self.nx, self.ny):
xs.disable()
ys.disable()
nx.disable()
ny.disable()
self.nwin.disable()
self.xbin.disable()
self.ybin.disable()
self.sbutt.disable()
self.frozen = True | Freeze all settings so they can't be altered | entailment |
def enable(self):
"""
Enables all settings
"""
nwin = self.nwin.value()
for label, xs, ys, nx, ny in \
zip(self.label[:nwin], self.xs[:nwin], self.ys[:nwin],
self.nx[:nwin], self.ny[:nwin]):
label.config(state='normal')
xs.enable()
ys.enable()
nx.enable()
ny.enable()
for label, xs, ys, nx, ny in \
zip(self.label[nwin:], self.xs[nwin:], self.ys[nwin:],
self.nx[nwin:], self.ny[nwin:]):
label.config(state='disable')
xs.disable()
ys.disable()
nx.disable()
ny.disable()
self.nwin.enable()
self.xbin.enable()
self.ybin.enable()
self.sbutt.enable() | Enables all settings | entailment |
def check_download(self, link_item_dict: Dict[str, LinkItem], folder: Path, log: bool = True) -> Tuple[
Dict[str, LinkItem], Dict[str, LinkItem]]:
"""
Check if the download of the given dict was successful. No proving if the content of the file is correct too.
:param link_item_dict: dict which to check
:type link_item_dict: Dict[str, ~unidown.plugin.link_item.LinkItem]
:param folder: folder where the downloads are saved
:type folder: ~pathlib.Path
:param log: if the lost items should be logged
:type log: bool
:return: succeeded and lost dicts
:rtype: Tuple[Dict[str, ~unidown.plugin.link_item.LinkItem], Dict[str, ~unidown.plugin.link_item.LinkItem]]
"""
succeed = {link: item for link, item in link_item_dict.items() if folder.joinpath(item.name).is_file()}
lost = {link: item for link, item in link_item_dict.items() if link not in succeed}
if lost and log:
for link, item in lost.items():
self.log.error(f"Not downloaded: {self.info.host+link} - {item.name}")
return succeed, lost | Check if the download of the given dict was successful. No proving if the content of the file is correct too.
:param link_item_dict: dict which to check
:type link_item_dict: Dict[str, ~unidown.plugin.link_item.LinkItem]
:param folder: folder where the downloads are saved
:type folder: ~pathlib.Path
:param log: if the lost items should be logged
:type log: bool
:return: succeeded and lost dicts
:rtype: Tuple[Dict[str, ~unidown.plugin.link_item.LinkItem], Dict[str, ~unidown.plugin.link_item.LinkItem]] | entailment |
def delete_data(self):
"""
Delete everything which is related to the plugin. **Do not use if you do not know what you do!**
"""
self.clean_up()
tools.delete_dir_rec(self._download_path)
if self._save_state_file.exists():
self._save_state_file.unlink() | Delete everything which is related to the plugin. **Do not use if you do not know what you do!** | entailment |
def download_as_file(self, url: str, folder: Path, name: str, delay: float = 0) -> str:
"""
Download the given url to the given target folder.
:param url: link
:type url: str
:param folder: target folder
:type folder: ~pathlib.Path
:param name: target file name
:type name: str
:param delay: after download wait in seconds
:type delay: float
:return: url
:rtype: str
:raises ~urllib3.exceptions.HTTPError: if the connection has an error
"""
while folder.joinpath(name).exists(): # TODO: handle already existing files
self.log.warning('already exists: ' + name)
name = name + '_d'
with self._downloader.request('GET', url, preload_content=False, retries=urllib3.util.retry.Retry(3)) as reader:
if reader.status == 200:
with folder.joinpath(name).open(mode='wb') as out_file:
out_file.write(reader.data)
else:
raise HTTPError(f"{url} | {reader.status}")
if delay > 0:
time.sleep(delay)
return url | Download the given url to the given target folder.
:param url: link
:type url: str
:param folder: target folder
:type folder: ~pathlib.Path
:param name: target file name
:type name: str
:param delay: after download wait in seconds
:type delay: float
:return: url
:rtype: str
:raises ~urllib3.exceptions.HTTPError: if the connection has an error | entailment |
def download(self, link_item_dict: Dict[str, LinkItem], folder: Path, desc: str, unit: str, delay: float = 0) -> \
List[str]:
"""
.. warning::
The parameters may change in future versions. (e.g. change order and accept another host)
Download the given LinkItem dict from the plugins host, to the given path. Proceeded with multiple connections
:attr:`~unidown.plugin.a_plugin.APlugin._simul_downloads`. After
:func:`~unidown.plugin.a_plugin.APlugin.check_download` is recommend.
This function don't use an internal `link_item_dict`, `delay` or `folder` directly set in options or instance
vars, because it can be used aside of the normal download routine inside the plugin itself for own things.
As of this it still needs access to the logger, so a staticmethod is not possible.
:param link_item_dict: data which gets downloaded
:type link_item_dict: Dict[str, ~unidown.plugin.link_item.LinkItem]
:param folder: target download folder
:type folder: ~pathlib.Path
:param desc: description of the progressbar
:type desc: str
:param unit: unit of the download, shown in the progressbar
:type unit: str
:param delay: delay between the downloads in seconds
:type delay: float
:return: list of urls of downloads without errors
:rtype: List[str]
"""
if 'delay' in self._options:
delay = self._options['delay']
# TODO: add other optional host?
if not link_item_dict:
return []
job_list = []
with ThreadPoolExecutor(max_workers=self._simul_downloads) as executor:
for link, item in link_item_dict.items():
job = executor.submit(self.download_as_file, link, folder, item.name, delay)
job_list.append(job)
pbar = tqdm(as_completed(job_list), total=len(job_list), desc=desc, unit=unit, leave=True, mininterval=1,
ncols=100, disable=dynamic_data.DISABLE_TQDM)
for _ in pbar:
pass
download_without_errors = []
for job in job_list:
try:
download_without_errors.append(job.result())
except HTTPError as ex:
self.log.warning("Failed to download: " + str(ex))
# Todo: connection lost handling (check if the connection to the server itself is lost)
return download_without_errors | .. warning::
The parameters may change in future versions. (e.g. change order and accept another host)
Download the given LinkItem dict from the plugins host, to the given path. Proceeded with multiple connections
:attr:`~unidown.plugin.a_plugin.APlugin._simul_downloads`. After
:func:`~unidown.plugin.a_plugin.APlugin.check_download` is recommend.
This function don't use an internal `link_item_dict`, `delay` or `folder` directly set in options or instance
vars, because it can be used aside of the normal download routine inside the plugin itself for own things.
As of this it still needs access to the logger, so a staticmethod is not possible.
:param link_item_dict: data which gets downloaded
:type link_item_dict: Dict[str, ~unidown.plugin.link_item.LinkItem]
:param folder: target download folder
:type folder: ~pathlib.Path
:param desc: description of the progressbar
:type desc: str
:param unit: unit of the download, shown in the progressbar
:type unit: str
:param delay: delay between the downloads in seconds
:type delay: float
:return: list of urls of downloads without errors
:rtype: List[str] | entailment |
def _create_save_state(self, link_item_dict: Dict[str, LinkItem]) -> SaveState:
"""
Create protobuf savestate of the module and the given data.
:param link_item_dict: data
:type link_item_dict: Dict[str, ~unidown.plugin.link_item.LinkItem]
:return: the savestate
:rtype: ~unidown.plugin.save_state.SaveState
"""
return SaveState(dynamic_data.SAVE_STATE_VERSION, self.info, self.last_update, link_item_dict) | Create protobuf savestate of the module and the given data.
:param link_item_dict: data
:type link_item_dict: Dict[str, ~unidown.plugin.link_item.LinkItem]
:return: the savestate
:rtype: ~unidown.plugin.save_state.SaveState | entailment |
def save_save_state(self, data_dict: Dict[str, LinkItem]): # TODO: add progressbar
"""
Save meta data about the downloaded things and the plugin to file.
:param data_dict: data
:type data_dict: Dict[link, ~unidown.plugin.link_item.LinkItem]
"""
json_data = json_format.MessageToJson(self._create_save_state(data_dict).to_protobuf())
with self._save_state_file.open(mode='w', encoding="utf8") as writer:
writer.write(json_data) | Save meta data about the downloaded things and the plugin to file.
:param data_dict: data
:type data_dict: Dict[link, ~unidown.plugin.link_item.LinkItem] | entailment |
def load_save_state(self) -> SaveState:
"""
Load the savestate of the plugin.
:return: savestate
:rtype: ~unidown.plugin.save_state.SaveState
:raises ~unidown.plugin.exceptions.PluginException: broken savestate json
:raises ~unidown.plugin.exceptions.PluginException: different savestate versions
:raises ~unidown.plugin.exceptions.PluginException: different plugin versions
:raises ~unidown.plugin.exceptions.PluginException: different plugin names
:raises ~unidown.plugin.exceptions.PluginException: could not parse the protobuf
"""
if not self._save_state_file.exists():
self.log.info("No savestate file found.")
return SaveState(dynamic_data.SAVE_STATE_VERSION, self.info, datetime(1970, 1, 1), {})
savestat_proto = ""
with self._save_state_file.open(mode='r', encoding="utf8") as data_file:
try:
savestat_proto = json_format.Parse(data_file.read(), SaveStateProto(), ignore_unknown_fields=False)
except ParseError:
raise PluginException(
f"Broken savestate json. Please fix or delete (you may lose data in this case) the file: {self._save_state_file}")
try:
save_state = SaveState.from_protobuf(savestat_proto)
except ValueError as ex:
raise PluginException(f"Could not parse the protobuf {self._save_state_file}: {ex}")
else:
del savestat_proto
if save_state.version != dynamic_data.SAVE_STATE_VERSION:
raise PluginException("Different save state version handling is not implemented yet.")
if save_state.plugin_info.version != self.info.version:
raise PluginException("Different plugin version handling is not implemented yet.")
if save_state.plugin_info.name != self.name:
raise PluginException("Save state plugin ({name}) does not match the current ({cur_name}).".format(
name=save_state.plugin_info.name, cur_name=self.name))
return save_state | Load the savestate of the plugin.
:return: savestate
:rtype: ~unidown.plugin.save_state.SaveState
:raises ~unidown.plugin.exceptions.PluginException: broken savestate json
:raises ~unidown.plugin.exceptions.PluginException: different savestate versions
:raises ~unidown.plugin.exceptions.PluginException: different plugin versions
:raises ~unidown.plugin.exceptions.PluginException: different plugin names
:raises ~unidown.plugin.exceptions.PluginException: could not parse the protobuf | entailment |
def get_updated_data(self, old_data: Dict[str, LinkItem]) -> Dict[str, LinkItem]:
"""
Get links who needs to be downloaded by comparing old and the new data.
:param old_data: old data
:type old_data: Dict[str, ~unidown.plugin.link_item.LinkItem]
:return: data which is newer or dont exist in the old one
:rtype: Dict[str, ~unidown.plugin.link_item.LinkItem]
"""
if not self.download_data:
return {}
new_link_item_dict = {}
for link, link_item in tqdm(self.download_data.items(), desc="Compare with save", unit="item", leave=True,
mininterval=1, ncols=100, disable=dynamic_data.DISABLE_TQDM):
# TODO: add methode to log lost items, which are in old but not in new
# if link in new_link_item_dict: # TODO: is ever false, since its the key of a dict: move to the right place
# self.log.warning("Duplicate: " + link + " - " + new_link_item_dict[link] + " : " + link_item)
# if the new_data link does not exists in old_data or new_data time is newer
if (link not in old_data) or (link_item.time > old_data[link].time):
new_link_item_dict[link] = link_item
return new_link_item_dict | Get links who needs to be downloaded by comparing old and the new data.
:param old_data: old data
:type old_data: Dict[str, ~unidown.plugin.link_item.LinkItem]
:return: data which is newer or dont exist in the old one
:rtype: Dict[str, ~unidown.plugin.link_item.LinkItem] | entailment |
def update_dict(self, base: Dict[str, LinkItem], new: Dict[str, LinkItem]):
"""
Use for updating save state dicts and get the new save state dict. Provides a debug option at info level.
Updates the base dict. Basically executes `base.update(new)`.
:param base: base dict **gets overridden!**
:type base: Dict[str, ~unidown.plugin.link_item.LinkItem]
:param new: data which updates the base
:type new: Dict[str, ~unidown.plugin.link_item.LinkItem]
"""
if logging.INFO >= logging.getLevelName(dynamic_data.LOG_LEVEL): # TODO: logging here or outside
for link, item in new.items():
if link in base:
self.log.info('Actualize item: ' + link + ' | ' + str(base[link]) + ' -> ' + str(item))
base.update(new) | Use for updating save state dicts and get the new save state dict. Provides a debug option at info level.
Updates the base dict. Basically executes `base.update(new)`.
:param base: base dict **gets overridden!**
:type base: Dict[str, ~unidown.plugin.link_item.LinkItem]
:param new: data which updates the base
:type new: Dict[str, ~unidown.plugin.link_item.LinkItem] | entailment |
def _get_options_dic(self, options: List[str]) -> Dict[str, str]:
"""
Convert the option list to a dictionary where the key is the option and the value is the related option.
Is called in the init.
:param options: options given to the plugin.
:type options: List[str]
:return: dictionary which contains the option key as str related to the option string
:rtype Dict[str, str]
"""
options_dic = {}
for option in options:
cur_option = option.split("=")
if len(cur_option) != 2:
self.log.warning(f"'{option}' is not valid and will be ignored.")
options_dic[cur_option[0]] = cur_option[1]
return options_dic | Convert the option list to a dictionary where the key is the option and the value is the related option.
Is called in the init.
:param options: options given to the plugin.
:type options: List[str]
:return: dictionary which contains the option key as str related to the option string
:rtype Dict[str, str] | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.