Search is not available for this dataset
text
stringlengths
75
104k
def members(group_id): """List user group members.""" page = request.args.get('page', 1, type=int) per_page = request.args.get('per_page', 5, type=int) q = request.args.get('q', '') s = request.args.get('s', '') group = Group.query.get_or_404(group_id) if group.can_see_members(current_user): members = Membership.query_by_group(group_id, with_invitations=True) if q: members = Membership.search(members, q) if s: members = Membership.order(members, Membership.state, s) members = members.paginate(page, per_page=per_page) return render_template( "invenio_groups/members.html", group=group, members=members, page=page, per_page=per_page, q=q, s=s, ) flash( _( 'You are not allowed to see members of this group %(group_name)s.', group_name=group.name ), 'error' ) return redirect(url_for('.index'))
def leave(group_id): """Leave group.""" group = Group.query.get_or_404(group_id) if group.can_leave(current_user): try: group.remove_member(current_user) except Exception as e: flash(str(e), "error") return redirect(url_for('.index')) flash( _( 'You have successfully left %(group_name)s group.', group_name=group.name ), 'success' ) return redirect(url_for('.index')) flash( _( 'You cannot leave the group %(group_name)s', group_name=group.name ), 'error' ) return redirect(url_for('.index'))
def approve(group_id, user_id): """Approve a user.""" membership = Membership.query.get_or_404((user_id, group_id)) group = membership.group if group.can_edit(current_user): try: membership.accept() except Exception as e: flash(str(e), 'error') return redirect(url_for('.requests', group_id=membership.group.id)) flash(_('%(user)s accepted to %(name)s group.', user=membership.user.email, name=membership.group.name), 'success') return redirect(url_for('.requests', group_id=membership.group.id)) flash( _( 'You cannot approve memberships for the group %(group_name)s', group_name=group.name ), 'error' ) return redirect(url_for('.index'))
def remove(group_id, user_id): """Remove user from a group.""" group = Group.query.get_or_404(group_id) user = User.query.get_or_404(user_id) if group.can_edit(current_user): try: group.remove_member(user) except Exception as e: flash(str(e), "error") return redirect(urlparse(request.referrer).path) flash(_('User %(user_email)s was removed from %(group_name)s group.', user_email=user.email, group_name=group.name), 'success') return redirect(urlparse(request.referrer).path) flash( _( 'You cannot delete users of the group %(group_name)s', group_name=group.name ), 'error' ) return redirect(url_for('.index'))
def accept(group_id): """Accpet pending invitation.""" membership = Membership.query.get_or_404((current_user.get_id(), group_id)) # no permission check, because they are checked during Memberships creating try: membership.accept() except Exception as e: flash(str(e), 'error') return redirect(url_for('.invitations', group_id=membership.group.id)) flash(_('You are now part of %(name)s group.', user=membership.user.email, name=membership.group.name), 'success') return redirect(url_for('.invitations', group_id=membership.group.id))
def new_member(group_id): """Add (invite) new member.""" group = Group.query.get_or_404(group_id) if group.can_invite_others(current_user): form = NewMemberForm() if form.validate_on_submit(): emails = filter(None, form.data['emails'].splitlines()) group.invite_by_emails(emails) flash(_('Requests sent!'), 'success') return redirect(url_for('.members', group_id=group.id)) return render_template( "invenio_groups/new_member.html", group=group, form=form ) flash( _( 'You cannot invite users or yourself (i.e. join) to the group ' '%(group_name)s', group_name=group.name ), 'error' ) return redirect(url_for('.index'))
def locate_spheres(image, feature_rad, dofilter=False, order=(3 ,3, 3), trim_edge=True, **kwargs): """ Get an initial featuring of sphere positions in an image. Parameters ----------- image : :class:`peri.util.Image` object Image object which defines the image file as well as the region. feature_rad : float Radius of objects to find, in pixels. This is a featuring radius and not a real radius, so a better value is frequently smaller than the real radius (half the actual radius is good). If ``use_tp`` is True, then the twice ``feature_rad`` is passed as trackpy's ``diameter`` keyword. dofilter : boolean, optional Whether to remove the background before featuring. Doing so can often greatly increase the success of initial featuring and decrease later optimization time. Filtering functions by fitting the image to a low-order polynomial and featuring the residuals. In doing so, this will change the mean intensity of the featured image and hence the good value of ``minmass`` will change when ``dofilter`` is True. Default is False. order : 3-element tuple, optional If `dofilter`, the 2+1D Leg Poly approximation to the background illumination field. Default is (3,3,3). Other Parameters ---------------- invert : boolean, optional Whether to invert the image for featuring. Set to True if the image is dark particles on a bright background. Default is True minmass : Float or None, optional The minimum mass/masscut of a particle. Default is None, which calculates internally. use_tp : Bool, optional Whether or not to use trackpy. Default is False, since trackpy cuts out particles at the edge. Returns -------- positions : np.ndarray [N,3] Positions of the particles in order (z,y,x) in image pixel units. Notes ----- Optionally filters the image by fitting the image I(x,y,z) to a polynomial, then subtracts this fitted intensity variation and uses centroid methods to find the particles. """ # We just want a smoothed field model of the image so that the residuals # are simply the particles without other complications m = models.SmoothFieldModel() I = ilms.LegendrePoly2P1D(order=order, constval=image.get_image().mean()) s = states.ImageState(image, [I], pad=0, mdl=m) if dofilter: opt.do_levmarq(s, s.params) pos = addsub.feature_guess(s, feature_rad, trim_edge=trim_edge, **kwargs)[0] return pos
def get_initial_featuring(statemaker, feature_rad, actual_rad=None, im_name=None, tile=None, invert=True, desc='', use_full_path=False, featuring_params={}, statemaker_kwargs={}, **kwargs): """ Completely optimizes a state from an image of roughly monodisperse particles. The user can interactively select the image. The state is periodically saved during optimization, with different filename for different stages of the optimization. Parameters ---------- statemaker : Function A statemaker function. Given arguments `im` (a :class:`~peri.util.Image`), `pos` (numpy.ndarray), `rad` (ndarray), and any additional `statemaker_kwargs`, must return a :class:`~peri.states.ImageState`. There is an example function in scripts/statemaker_example.py feature_rad : Int, odd The particle radius for featuring, as passed to locate_spheres. actual_rad : Float, optional The actual radius of the particles. Default is feature_rad im_name : string, optional The file name of the image to load. If not set, it is selected interactively through Tk. tile : :class:`peri.util.Tile`, optional The tile of the raw image to be analyzed. Default is None, the entire image. invert : Bool, optional Whether to invert the image for featuring, as passed to trackpy. Default is True. desc : String, optional A description to be inserted in saved state. The save name will be, e.g., '0.tif-peri-' + desc + 'initial-burn.pkl'. Default is '' use_full_path : Bool, optional Set to True to use the full path name for the image. Default is False. featuring_params : Dict, optional kwargs-like dict of any additional keyword arguments to pass to ``get_initial_featuring``, such as ``'use_tp'`` or ``'minmass'``. Default is ``{}``. statemaker_kwargs : Dict, optional kwargs-like dict of any additional keyword arguments to pass to the statemaker function. Default is ``{}``. Other Parameters ---------------- max_mem : Numeric The maximum additional memory to use for the optimizers, as passed to optimize.burn. Default is 1e9. min_rad : Float, optional The minimum particle radius, as passed to addsubtract.add_subtract. Particles with a fitted radius smaller than this are identified as fake and removed. Default is 0.5 * actual_rad. max_rad : Float, optional The maximum particle radius, as passed to addsubtract.add_subtract. Particles with a fitted radius larger than this are identified as fake and removed. Default is 1.5 * actual_rad, however you may find better results if you make this more stringent. rz_order : int, optional If nonzero, the order of an additional augmented rscl(z) parameter for optimization. Default is 0; i.e. no rscl(z) optimization. zscale : Float, optional The zscale of the image. Default is 1.0 Returns ------- s : :class:`peri.states.ImageState` The optimized state. See Also -------- feature_from_pos_rad : Using a previous state's globals and user-provided positions and radii as an initial guess, completely optimizes a state. get_particle_featuring : Using a previous state's globals and positions as an initial guess, completely optimizes a state. translate_featuring : Use a previous state's globals and centroids methods for an initial particle guess, completely optimizes a state. Notes ----- Proceeds by centroid-featuring the image for an initial guess of particle positions, then optimizing the globals + positions until termination as called in _optimize_from_centroid. The ``Other Parameters`` are passed to _optimize_from_centroid. """ if actual_rad is None: actual_rad = feature_rad _, im_name = _pick_state_im_name('', im_name, use_full_path=use_full_path) im = util.RawImage(im_name, tile=tile) pos = locate_spheres(im, feature_rad, invert=invert, **featuring_params) if np.size(pos) == 0: msg = 'No particles found. Try using a smaller `feature_rad`.' raise ValueError(msg) rad = np.ones(pos.shape[0], dtype='float') * actual_rad s = statemaker(im, pos, rad, **statemaker_kwargs) RLOG.info('State Created.') if desc is not None: states.save(s, desc=desc+'initial') optimize_from_initial(s, invert=invert, desc=desc, **kwargs) return s
def feature_from_pos_rad(statemaker, pos, rad, im_name=None, tile=None, desc='', use_full_path=False, statemaker_kwargs={}, **kwargs): """ Gets a completely-optimized state from an image and an initial guess of particle positions and radii. The state is periodically saved during optimization, with different filename for different stages of the optimization. The user can select the image. Parameters ---------- statemaker : Function A statemaker function. Given arguments `im` (a :class:`~peri.util.Image`), `pos` (numpy.ndarray), `rad` (ndarray), and any additional `statemaker_kwargs`, must return a :class:`~peri.states.ImageState`. There is an example function in scripts/statemaker_example.py pos : [N,3] element numpy.ndarray. The initial guess for the N particle positions. rad : N element numpy.ndarray. The initial guess for the N particle radii. im_name : string or None, optional The filename of the image to feature. Default is None, in which the user selects the image. tile : :class:`peri.util.Tile`, optional A tile of the sub-region of the image to feature. Default is None, i.e. entire image. desc : String, optional A description to be inserted in saved state. The save name will be, e.g., '0.tif-peri-' + desc + 'initial-burn.pkl'. Default is '' use_full_path : Bool, optional Set to True to use the full path name for the image. Default is False. statemaker_kwargs : Dict, optional kwargs-like dict of any additional keyword arguments to pass to the statemaker function. Default is ``{}``. Other Parameters ---------------- max_mem : Numeric The maximum additional memory to use for the optimizers, as passed to optimize.burn. Default is 1e9. min_rad : Float, optional The minimum particle radius, as passed to addsubtract.add_subtract. Particles with a fitted radius smaller than this are identified as fake and removed. Default is 0.5 * actual_rad. max_rad : Float, optional The maximum particle radius, as passed to addsubtract.add_subtract. Particles with a fitted radius larger than this are identified as fake and removed. Default is 1.5 * actual_rad, however you may find better results if you make this more stringent. invert : {'guess', True, False} Whether to invert the image for featuring, as passed to addsubtract.add_subtract. Default is to guess from the current state's particle positions. rz_order : int, optional If nonzero, the order of an additional augmented rscl(z) parameter for optimization. Default is 0; i.e. no rscl(z) optimization. zscale : Float, optional The zscale of the image. Default is 1.0 Returns ------- s : :class:`peri.states.ImageState` The optimized state. See Also -------- get_initial_featuring : Features an image from scratch, using centroid methods as initial particle locations. get_particle_featuring : Using a previous state's globals and positions as an initial guess, completely optimizes a state. translate_featuring : Use a previous state's globals and centroids methods for an initial particle guess, completely optimizes a state. Notes ----- The ``Other Parameters`` are passed to _optimize_from_centroid. Proceeds by centroid-featuring the image for an initial guess of particle positions, then optimizing the globals + positions until termination as called in _optimize_from_centroid. """ if np.size(pos) == 0: raise ValueError('`pos` is an empty array.') elif np.shape(pos)[1] != 3: raise ValueError('`pos` must be an [N,3] element numpy.ndarray.') _, im_name = _pick_state_im_name('', im_name, use_full_path=use_full_path) im = util.RawImage(im_name, tile=tile) s = statemaker(im, pos, rad, **statemaker_kwargs) RLOG.info('State Created.') if desc is not None: states.save(s, desc=desc+'initial') optimize_from_initial(s, desc=desc, **kwargs) return s
def optimize_from_initial(s, max_mem=1e9, invert='guess', desc='', rz_order=3, min_rad=None, max_rad=None): """ Optimizes a state from an initial set of positions and radii, without any known microscope parameters. Parameters ---------- s : :class:`peri.states.ImageState` The state to optimize. It is modified internally and returned. max_mem : Numeric, optional The maximum memory for the optimizer to use. Default is 1e9 (bytes) invert : Bool or `'guess'`, optional Set to True if the image is dark particles on a bright background, False otherwise. Used for add-subtract. The default is to guess from the state's current particles. desc : String, optional An additional description to infix for periodic saving along the way. Default is the null string ``''``. rz_order : int, optional ``rz_order`` as passed to opt.burn. Default is 3 min_rad : Float or None, optional The minimum radius to identify a particles as bad, as passed to add-subtract. Default is None, which picks half the median radii. If your sample is not monodisperse you should pick a different value. max_rad : Float or None, optional The maximum radius to identify a particles as bad, as passed to add-subtract. Default is None, which picks 1.5x the median radii. If your sample is not monodisperse you should pick a different value. Returns ------- s : :class:`peri.states.ImageState` The optimized state, which is the same as the input ``s`` but modified in-place. """ RLOG.info('Initial burn:') if desc is not None: desc_burn = desc + 'initial-burn' desc_polish = desc + 'addsub-polish' else: desc_burn, desc_polish = [None] * 2 opt.burn(s, mode='burn', n_loop=3, fractol=0.1, desc=desc_burn, max_mem=max_mem, include_rad=False, dowarn=False) opt.burn(s, mode='burn', n_loop=3, fractol=0.1, desc=desc_burn, max_mem=max_mem, include_rad=True, dowarn=False) RLOG.info('Start add-subtract') rad = s.obj_get_radii() if min_rad is None: min_rad = 0.5 * np.median(rad) if max_rad is None: max_rad = 1.5 * np.median(rad) addsub.add_subtract(s, tries=30, min_rad=min_rad, max_rad=max_rad, invert=invert) if desc is not None: states.save(s, desc=desc + 'initial-addsub') RLOG.info('Final polish:') d = opt.burn(s, mode='polish', n_loop=8, fractol=3e-4, desc=desc_polish, max_mem=max_mem, rz_order=rz_order, dowarn=False) if not d['converged']: RLOG.warn('Optimization did not converge; consider re-running') return s
def translate_featuring(state_name=None, im_name=None, use_full_path=False, **kwargs): """ Translates one optimized state into another image where the particles have moved by a small amount (~1 particle radius). Returns a completely-optimized state. The user can interactively selects the initial state and the second raw image. The state is periodically saved during optimization, with different filename for different stages of the optimization. Parameters ---------- state_name : String or None, optional The name of the initially-optimized state. Default is None, which prompts the user to select the name interactively through a Tk window. im_name : String or None, optional The name of the new image to optimize. Default is None, which prompts the user to select the name interactively through a Tk window. use_full_path : Bool, optional Set to True to use the full path of the state instead of partial path names (e.g. /full/path/name/state.pkl vs state.pkl). Default is False Other Parameters ---------------- max_mem : Numeric The maximum additional memory to use for the optimizers, as passed to optimize.burn. Default is 1e9. desc : String, optional A description to be inserted in saved state. The save name will be, e.g., '0.tif-peri-' + desc + 'initial-burn.pkl'. Default is '' min_rad : Float, optional The minimum particle radius, as passed to addsubtract.add_subtract. Particles with a fitted radius smaller than this are identified as fake and removed. Default is 0.5 * actual_rad. max_rad : Float, optional The maximum particle radius, as passed to addsubtract.add_subtract. Particles with a fitted radius larger than this are identified as fake and removed. Default is 1.5 * actual_rad, however you may find better results if you make this more stringent. invert : {True, False, 'guess'} Whether to invert the image for featuring, as passed to addsubtract.add_subtract. Default is to guess from the state's current particles. rz_order : int, optional If nonzero, the order of an additional augmented rscl(z) parameter for optimization. Default is 0; i.e. no rscl(z) optimization. do_polish : Bool, optional Set to False to only optimize the particles and add-subtract. Default is True, which then runs a polish afterwards. Returns ------- s : :class:`peri.states.ImageState` The optimized state. See Also -------- get_initial_featuring : Features an image from scratch, using centroid methods as initial particle locations. feature_from_pos_rad : Using a previous state's globals and user-provided positions and radii as an initial guess, completely optimizes a state. get_particle_featuring : Using a previous state's globals and positions as an initial guess, completely optimizes a state. Notes ----- The ``Other Parameters`` are passed to _translate_particles. Proceeds by: 1. Optimize particle positions only. 2. Optimize particle positions and radii only. 3. Add-subtract missing and bad particles. 4. If polish, optimize the illumination, background, and particles. 5. If polish, optimize everything. """ state_name, im_name = _pick_state_im_name( state_name, im_name, use_full_path=use_full_path) s = states.load(state_name) im = util.RawImage(im_name, tile=s.image.tile) s.set_image(im) _translate_particles(s, **kwargs) return s
def get_particles_featuring(feature_rad, state_name=None, im_name=None, use_full_path=False, actual_rad=None, invert=True, featuring_params={}, **kwargs): """ Combines centroid featuring with the globals from a previous state. Runs trackpy.locate on an image, sets the globals from a previous state, calls _translate_particles Parameters ---------- feature_rad : Int, odd The particle radius for featuring, as passed to locate_spheres. state_name : String or None, optional The name of the initially-optimized state. Default is None, which prompts the user to select the name interactively through a Tk window. im_name : String or None, optional The name of the new image to optimize. Default is None, which prompts the user to select the name interactively through a Tk window. use_full_path : Bool, optional Set to True to use the full path of the state instead of partial path names (e.g. /full/path/name/state.pkl vs state.pkl). Default is False actual_rad : Float or None, optional The initial guess for the particle radii. Default is the median of the previous state. invert : Bool Whether to invert the image for featuring, as passed to addsubtract.add_subtract and locate_spheres. Set to False if the image is bright particles on a dark background. Default is True (dark particles on bright background). featuring_params : Dict, optional kwargs-like dict of any additional keyword arguments to pass to ``get_initial_featuring``, such as ``'use_tp'`` or ``'minmass'``. Default is ``{}``. Other Parameters ---------------- max_mem : Numeric The maximum additional memory to use for the optimizers, as passed to optimize.burn. Default is 1e9. desc : String, optional A description to be inserted in saved state. The save name will be, e.g., '0.tif-peri-' + desc + 'initial-burn.pkl'. Default is '' min_rad : Float, optional The minimum particle radius, as passed to addsubtract.add_subtract. Particles with a fitted radius smaller than this are identified as fake and removed. Default is 0.5 * actual_rad. max_rad : Float, optional The maximum particle radius, as passed to addsubtract.add_subtract. Particles with a fitted radius larger than this are identified as fake and removed. Default is 1.5 * actual_rad, however you may find better results if you make this more stringent. rz_order : int, optional If nonzero, the order of an additional augmented rscl(z) parameter for optimization. Default is 0; i.e. no rscl(z) optimization. do_polish : Bool, optional Set to False to only optimize the particles and add-subtract. Default is True, which then runs a polish afterwards. Returns ------- s : :class:`peri.states.ImageState` The optimized state. See Also -------- get_initial_featuring : Features an image from scratch, using centroid methods as initial particle locations. feature_from_pos_rad : Using a previous state's globals and user-provided positions and radii as an initial guess, completely optimizes a state. translate_featuring : Use a previous state's globals and centroids methods for an initial particle guess, completely optimizes a state. Notes ----- The ``Other Parameters`` are passed to _translate_particles. Proceeds by: 1. Find a guess of the particle positions through centroid methods. 2. Optimize particle positions only. 3. Optimize particle positions and radii only. 4. Add-subtract missing and bad particles. 5. If polish, optimize the illumination, background, and particles. 6. If polish, optimize everything. """ state_name, im_name = _pick_state_im_name( state_name, im_name, use_full_path=use_full_path) s = states.load(state_name) if actual_rad is None: actual_rad = np.median(s.obj_get_radii()) im = util.RawImage(im_name, tile=s.image.tile) pos = locate_spheres(im, feature_rad, invert=invert, **featuring_params) _ = s.obj_remove_particle(np.arange(s.obj_get_radii().size)) s.obj_add_particle(pos, np.ones(pos.shape[0])*actual_rad) s.set_image(im) _translate_particles(s, invert=invert, **kwargs) return s
def _pick_state_im_name(state_name, im_name, use_full_path=False): """ If state_name or im_name is None, picks them interactively through Tk, and then sets with or without the full path. Parameters ---------- state_name : {string, None} The name of the state. If None, selected through Tk. im_name : {string, None} The name of the image. If None, selected through Tk. use_full_path : Bool, optional Set to True to return the names as full paths rather than relative paths. Default is False (relative path). """ initial_dir = os.getcwd() if (state_name is None) or (im_name is None): wid = tk.Tk() wid.withdraw() if state_name is None: state_name = tkfd.askopenfilename( initialdir=initial_dir, title='Select pre-featured state') os.chdir(os.path.dirname(state_name)) if im_name is None: im_name = tkfd.askopenfilename( initialdir=initial_dir, title='Select new image') if (not use_full_path) and (os.path.dirname(im_name) != ''): im_path = os.path.dirname(im_name) os.chdir(im_path) im_name = os.path.basename(im_name) else: os.chdir(initial_dir) return state_name, im_name
def _translate_particles(s, max_mem=1e9, desc='', min_rad='calc', max_rad='calc', invert='guess', rz_order=0, do_polish=True): """ Workhorse for translating particles. See get_particles_featuring for docs. """ if desc is not None: desc_trans = desc + 'translate-particles' desc_burn = desc + 'addsub_burn' desc_polish = desc + 'addsub_polish' else: desc_trans, desc_burn, desc_polish = [None]*3 RLOG.info('Translate Particles:') opt.burn(s, mode='do-particles', n_loop=4, fractol=0.1, desc=desc_trans, max_mem=max_mem, include_rad=False, dowarn=False) opt.burn(s, mode='do-particles', n_loop=4, fractol=0.05, desc=desc_trans, max_mem=max_mem, include_rad=True, dowarn=False) RLOG.info('Start add-subtract') addsub.add_subtract(s, tries=30, min_rad=min_rad, max_rad=max_rad, invert=invert) if desc is not None: states.save(s, desc=desc + 'translate-addsub') if do_polish: RLOG.info('Final Burn:') opt.burn(s, mode='burn', n_loop=3, fractol=3e-4, desc=desc_burn, max_mem=max_mem, rz_order=rz_order,dowarn=False) RLOG.info('Final Polish:') d = opt.burn(s, mode='polish', n_loop=4, fractol=3e-4, desc=desc_polish, max_mem=max_mem, rz_order=rz_order, dowarn=False) if not d['converged']: RLOG.warn('Optimization did not converge; consider re-running')
def link_zscale(st): """Links the state ``st`` psf zscale with the global zscale""" # FIXME should be made more generic to other parameters and categories psf = st.get('psf') psf.param_dict['zscale'] = psf.param_dict['psf-zscale'] psf.params[psf.params.index('psf-zscale')] = 'zscale' psf.global_zscale = True psf.param_dict.pop('psf-zscale') st.trigger_parameter_change() st.reset()
def finish_state(st, desc='finish-state', invert='guess'): """ Final optimization for the best-possible state. Runs a local add-subtract to capture any difficult-to-feature particles, then does another set of optimization designed to get to the best possible fit. Parameters ---------- st : :class:`peri.states.ImageState` The state to finish desc : String, optional Description to intermittently save the state as, as passed to state.save. Default is `'finish-state'`. invert : {'guess', True, False} Whether to invert the image for featuring, as passed to addsubtract.add_subtract. Default is to guess from the state's current particles. See Also -------- `peri.opt.addsubtract.add_subtract_locally` `peri.opt.optimize.finish` """ for minmass in [None, 0]: for _ in range(3): npart, poses = addsub.add_subtract_locally(st, region_depth=7, minmass=minmass, invert=invert) if npart == 0: break opt.finish(st, n_loop=1, separate_psf=True, desc=desc, dowarn=False) opt.burn(st, mode='polish', desc=desc, n_loop=2, dowarn=False) d = opt.finish(st, desc=desc, n_loop=4, dowarn=False) if not d['converged']: RLOG.warn('Optimization did not converge; consider re-running')
def optimize_particle(state, index, method='gn', doradius=True): """ Methods available are gn : Gauss-Newton with JTJ (recommended) nr : Newton-Rhaphson with hessian if doradius, also optimize the radius. """ blocks = state.param_particle(index) if not doradius: blocks = blocks[:-1] g = state.gradloglikelihood(blocks=blocks) if method == 'gn': h = state.jtj(blocks=blocks) if method == 'nr': h = state.hessloglikelihood(blocks=blocks) step = np.linalg.solve(h, g) h = np.zeros_like(g) for i in range(len(g)): state.update(blocks[i], state.state[blocks[i]] - step[i]) return g,h
def makestate(im, pos, rad, slab=None, mem_level='hi'): """ Workhorse for creating & optimizing states with an initial centroid guess. This is an example function that works for a particular microscope. For your own microscope, you'll need to change particulars such as the psf type and the orders of the background and illumination. Parameters ---------- im : :class:`~peri.util.RawImage` A RawImage of the data. pos : [N,3] element numpy.ndarray. The initial guess for the N particle positions. rad : N element numpy.ndarray. The initial guess for the N particle radii. slab : :class:`peri.comp.objs.Slab` or None, optional If not None, a slab corresponding to that in the image. Default is None. mem_level : {'lo', 'med-lo', 'med', 'med-hi', 'hi'}, optional A valid memory level for the state to control the memory overhead at the expense of accuracy. Default is `'hi'` Returns ------- :class:`~peri.states.ImageState` An ImageState with a linked z-scale, a ConfocalImageModel, and all the necessary components with orders at which are useful for my particular test case. """ if slab is not None: o = comp.ComponentCollection( [ objs.PlatonicSpheresCollection(pos, rad, zscale=zscale), slab ], category='obj' ) else: o = objs.PlatonicSpheresCollection(pos, rad, zscale=zscale) p = exactpsf.FixedSSChebLinePSF() npts, iorder = _calc_ilm_order(im.get_image().shape) i = ilms.BarnesStreakLegPoly2P1D(npts=npts, zorder=iorder) b = ilms.LegendrePoly2P1D(order=(9 ,3, 5), category='bkg') c = comp.GlobalScalar('offset', 0.0) s = states.ImageState(im, [o, i, b, c, p]) runner.link_zscale(s) if mem_level != 'hi': s.set_mem_level(mem_level) opt.do_levmarq(s, ['ilm-scale'], max_iter=1, run_length=6, max_mem=1e4) return s
def _calc_ilm_order(imshape): """ Calculates an ilm order based on the shape of an image. This is based on something that works for our particular images. Your mileage will vary. Parameters ---------- imshape : 3-element list-like The shape of the image. Returns ------- npts : tuple The number of points to use for the ilm. zorder : int The order of the z-polynomial. """ zorder = int(imshape[0] / 6.25) + 1 l_npts = int(imshape[1] / 42.5)+1 npts = () for a in range(l_npts): if a < 5: npts += (int(imshape[2] * [59, 39, 29, 19, 14][a]/512.) + 1,) else: npts += (int(imshape[2] * 11/512.) + 1,) return npts, zorder
def _check_for_inception(self, root_dict): ''' Used to check if there is a dict in a dict ''' for key in root_dict: if isinstance(root_dict[key], dict): root_dict[key] = ResponseObject(root_dict[key]) return root_dict
def randomize_parameters(self, ptp=0.2, fourier=False, vmin=None, vmax=None): """ Create random parameters for this ILM that mimic experiments as closely as possible without real assumptions. """ if vmin is not None and vmax is not None: ptp = vmax - vmin elif vmax is not None and vmin is None: vmin = vmax - ptp elif vmin is not None and vmax is None: vmax = vmin + ptp else: vmax = 1.0 vmin = vmax - ptp self.set_values(self.category+'-scale', 1.0) self.set_values(self.category+'-off', 0.0) for k, v in iteritems(self.poly_params): norm = (self.zorder + 1.0)*2 self.set_values(k, ptp*(np.random.rand() - 0.5) / norm) for i, p in enumerate(self.barnes_params): N = len(p) if fourier: t = ((np.random.rand(N)-0.5) + 1.j*(np.random.rand(N)-0.5))/(np.arange(N)+1) q = np.real(np.fft.ifftn(t)) / (i+1) else: t = ptp*np.sqrt(N)*(np.random.rand(N)-0.5) q = np.cumsum(t) / (i+1) q = ptp * q / q.ptp() / len(self.barnes_params) q -= q.mean() self.set_values(p, q) self._norm_stat = [ptp, vmin] if self.shape: self.initialize() if self._parent: param = self.category+'-scale' self.trigger_update(param, self.get_values(param))
def _barnes(self, pos): """Creates a barnes interpolant & calculates its values""" b_in = self.b_in dist = lambda x: np.sqrt(np.dot(x,x)) #we take a filter size as the max distance between the grids along #x or y: sz = self.npts[1] coeffs = self.get_values(self.barnes_params) b = BarnesInterpolationND( b_in, coeffs, filter_size=self.filtsize, damp=0.9, iterations=3, clip=self.local_updates, clipsize=self.barnes_clip_size, blocksize=100 # FIXME magic blocksize ) return b(pos)
def schedules(self): ''' Returns details of the posting schedules associated with a social media profile. ''' url = PATHS['GET_SCHEDULES'] % self.id self.__schedules = self.api.get(url=url) return self.__schedules
def schedules(self, schedules): ''' Set the posting schedules for the specified social media profile. ''' url = PATHS['UPDATE_SCHEDULES'] % self.id data_format = "schedules[0][%s][]=%s&" post_data = "" for format_type, values in schedules.iteritems(): for value in values: post_data += data_format % (format_type, value) self.api.post(url=url, data=post_data)
def moment(p, v, order=1): """ Calculates the moments of the probability distribution p with vector v """ if order == 1: return (v*p).sum() elif order == 2: return np.sqrt( ((v**2)*p).sum() - (v*p).sum()**2 )
def psf_slice(self, zint, size=11, zoffset=0., getextent=False): """ Calculates the 3D psf at a particular z pixel height Parameters ---------- zint : float z pixel height in image coordinates , converted to 1/k by the function using the slab position as well size : int, list, tuple The size over which to calculate the psf, can be 1 or 3 elements for the different axes in image pixel coordinates zoffset : float Offset in pixel units to use in the calculation of the psf cutval : float If not None, the psf will be cut along a curve corresponding to p(r) == 0 with exponential damping exp(-d^4) getextent : boolean If True, also return the extent of the psf in pixels for example to get the support size. Can only be used with cutval. """ # calculate the current pixel value in 1/k, making sure we are above the slab zint = max(self._p2k(self._tz(zint)), 0) offset = np.array([zoffset*(zint>0), 0, 0]) scale = [self.param_dict[self.zscale], 1.0, 1.0] # create the coordinate vectors for where to actually calculate the tile = util.Tile(left=0, size=size, centered=True) vecs = tile.coords(form='flat') vecs = [self._p2k(s*i+o) for i,s,o in zip(vecs, scale, offset)] psf = self.psffunc(*vecs[::-1], zint=zint, **self.pack_args()).T vec = tile.coords(form='meshed') # create a smoothly varying point spread function by cutting off the psf # at a certain value and smoothly taking it to zero if self.cutoffval is not None and not self.cutbyval: # find the edges of the PSF edge = psf > psf.max() * self.cutoffval dd = nd.morphology.distance_transform_edt(~edge) # calculate the new PSF and normalize it to the new support psf = psf * np.exp(-dd**4) psf /= psf.sum() if getextent: # the size is determined by the edge plus a 2 pad for the # exponential damping to zero at the edge size = np.array([ (vec*edge).min(axis=(1,2,3))-2, (vec*edge).max(axis=(1,2,3))+2, ]).T return psf, vec, size return psf, vec # perform a cut by value instead if self.cutoffval is not None and self.cutbyval: cutval = self.cutoffval * psf.max() dd = (psf - cutval) / cutval dd[dd > 0] = 0. # calculate the new PSF and normalize it to the new support psf = psf * np.exp(-(dd / self.cutfallrate)**4) psf /= psf.sum() # let the small values determine the edges edge = psf > cutval * self.cutedgeval if getextent: # the size is determined by the edge plus a 2 pad for the # exponential damping to zero at the edge size = np.array([ (vec*edge).min(axis=(1,2,3))-2, (vec*edge).max(axis=(1,2,3))+2, ]).T return psf, vec, size return psf, vec return psf, vec
def _p2k(self, v): """ Convert from pixel to 1/k_incoming (laser_wavelength/(2\pi)) units """ return 2*np.pi*self.pxsize*v/self.param_dict['psf-laser-wavelength']
def _tz(self, z): """ Transform z to real-space coordinates from tile coordinates """ return (z-self.param_dict['psf-zslab'])*self.param_dict[self.zscale]
def measure_size_drift(self, z, size=31, zoffset=0.): """ Returns the 'size' of the psf in each direction a particular z (px) """ drift = 0.0 for i in range(self.measurement_iterations): psf, vec = self.psf_slice(z, size=size, zoffset=zoffset+drift) psf = psf / psf.sum() drift += moment(psf, vec[0], order=1) psize = [moment(psf, j, order=2) for j in vec] return np.array(psize), drift
def characterize_psf(self): """ Get support size and drift polynomial for current set of params """ # there may be an issue with the support and characterization-- # it might be best to do the characterization with the same support # as the calculated psf. l,u = max(self.zrange[0], self.param_dict['psf-zslab']), self.zrange[1] size_l, drift_l = self.measure_size_drift(l) size_u, drift_u = self.measure_size_drift(u) # must be odd for now or have a better system for getting the center self.support = util.oddify(2*self.support_factor*size_u.astype('int')) self.drift_poly = np.polyfit([l, u], [drift_l, drift_u], 1) if self.cutoffval is not None: psf, vec, size_l = self.psf_slice(l, size=51, zoffset=drift_l, getextent=True) psf, vec, size_u = self.psf_slice(u, size=51, zoffset=drift_u, getextent=True) ss = [np.abs(i).sum(axis=-1) for i in [size_l, size_u]] self.support = util.oddify(util.amax(*ss))
def _kpad(self, field, finalshape, zpad=False, norm=True): """ fftshift and pad the field with zeros until it has size finalshape. if zpad is off, then no padding is put on the z direction. returns the fourier transform of the field """ currshape = np.array(field.shape) if any(finalshape < currshape): raise IndexError("PSF tile size is less than minimum support size") d = finalshape - currshape # fix off-by-one issues when going odd to even tile sizes o = d % 2 d = np.floor_divide(d, 2) if not zpad: o[0] = 0 axes = None pad = tuple((d[i]+o[i],d[i]) for i in [0,1,2]) rpsf = np.pad(field, pad, mode='constant', constant_values=0) rpsf = np.fft.ifftshift(rpsf, axes=axes) kpsf = fft.rfftn(rpsf, **fftkwargs) if norm: kpsf /= kpsf[0,0,0] return kpsf
def pack_args(self): """ Pack the parameters into the form necessary for the integration routines above. For example, packs for calculate_linescan_psf """ mapper = { 'psf-kfki': 'kfki', 'psf-alpha': 'alpha', 'psf-n2n1': 'n2n1', 'psf-sigkf': 'sigkf', 'psf-sph6-ab': 'sph6_ab', 'psf-laser-wavelength': 'laser_wavelength', 'psf-pinhole-width': 'pinhole_width' } bads = [self.zscale, 'psf-zslab'] d = {} for k,v in iteritems(mapper): if k in self.param_dict: d[v] = self.param_dict[k] d.update({ 'polar_angle': self.polar_angle, 'normalize': self.normalize, 'include_K3_det':self.use_J1 }) if self.polychromatic: d.update({'nkpts': self.nkpts}) d.update({'k_dist': self.k_dist}) if self.do_pinhole: d.update({'nlpts': self.num_line_pts}) d.update({'use_laggauss': True}) return d
def psffunc(self, *args, **kwargs): """Calculates a linescan psf""" if self.polychromatic: func = psfcalc.calculate_polychrome_linescan_psf else: func = psfcalc.calculate_linescan_psf return func(*args, **kwargs)
def psffunc(self, x, y, z, **kwargs): """Calculates a pinhole psf""" #do_pinhole?? FIXME if self.polychromatic: func = psfcalc.calculate_polychrome_pinhole_psf else: func = psfcalc.calculate_pinhole_psf x0, y0 = [psfcalc.vec_to_halfvec(v) for v in [x,y]] vls = psfcalc.wrap_and_calc_psf(x0, y0, z, func, **kwargs) return vls / vls.sum()
def characterize_psf(self): """ Get support size and drift polynomial for current set of params """ l,u = max(self.zrange[0], self.param_dict['psf-zslab']), self.zrange[1] size_l, drift_l = self.measure_size_drift(l, size=self.support) size_u, drift_u = self.measure_size_drift(u, size=self.support) self.drift_poly = np.polyfit([l, u], [drift_l, drift_u], 1)
def _req(self, url, method='GET', **kw): '''Make request and convert JSON response to python objects''' send = requests.post if method == 'POST' else requests.get try: r = send( url, headers=self._token_header(), timeout=self.settings['timeout'], **kw) except requests.exceptions.Timeout: raise ApiError('Request timed out (%s seconds)' % self.settings['timeout']) try: json = r.json() except ValueError: raise ApiError('Received not JSON response from API') if json.get('status') != 'ok': raise ApiError('API error: received unexpected json from API: %s' % json) return json
def get_active_bets(self, project_id=None): '''Returns all active bets''' url = urljoin( self.settings['bets_url'], 'bets?state=fresh,active,accept_end&page=1&page_size=100') if project_id is not None: url += '&kava_project_id={}'.format(project_id) bets = [] has_next_page = True while has_next_page: res = self._req(url) bets.extend(res['bets']['results']) url = res['bets'].get('next') has_next_page = bool(url) return bets
def get_bets(self, type=None, order_by=None, state=None, project_id=None, page=None, page_size=None): """Return bets with given filters and ordering. :param type: return bets only with this type. Use None to include all (default). :param order_by: '-last_stake' or 'last_stake' to sort by stake's created date or None for default ordering. :param state: one of 'active', 'closed', 'all' (default 'active'). :param project_id: return bets associated with given project id in kava :param page: default 1. :param page_site: page size (default 100). """ if page is None: page = 1 if page_size is None: page_size = 100 if state == 'all': _states = [] # all states == no filter elif state == 'closed': _states = self.CLOSED_STATES else: _states = self.ACTIVE_STATES url = urljoin( self.settings['bets_url'], 'bets?page={}&page_size={}'.format(page, page_size)) url += '&state={}'.format(','.join(_states)) if type is not None: url += '&type={}'.format(type) if order_by in ['-last_stake', 'last_stake']: url += '&order_by={}'.format(order_by) if project_id is not None: url += '&kava_project_id={}'.format(project_id) res = self._req(url) return res['bets']['results']
def get_project_slug(self, bet): '''Return slug of a project that given bet is associated with or None if bet is not associated with any project. ''' if bet.get('form_params'): params = json.loads(bet['form_params']) return params.get('project') return None
def subscribe(self, event, bet_ids): '''Subscribe to event for given bet ids.''' if not self._subscriptions.get(event): self._subscriptions[event] = set() self._subscriptions[event] = self._subscriptions[event].union(bet_ids)
def preview(context): """Opens local preview of your blog website""" config = context.obj pelican(config, '--verbose', '--ignore-cache') server_proc = None os.chdir(config['OUTPUT_DIR']) try: try: command = 'python -m http.server ' + str(PORT) server_proc = run(command, bg=True) time.sleep(3) click.launch('http://localhost:8000') time.sleep(5) pelican(config, '--autoreload') except Exception: if server_proc is not None: server_proc.kill() raise except KeyboardInterrupt: abort(context)
def get_collection_endpoint(cls): """ Get the relative path to the API resource collection If self.collection_endpoint is not set, it will default to the lowercase name of the resource class plus an "s" and the terminating "/" :param cls: Resource class :return: Relative path to the resource collection """ return cls.Meta.collection_endpoint if cls.Meta.collection_endpoint is not None else cls.__name__.lower() + "s/"
def send(self, url, http_method, **client_args): """ Make the actual request to the API :param url: URL :param http_method: The method used to make the request to the API :param client_args: Arguments to be sent to the auth client :return: requests' response object """ return self.client.send(url, http_method, **client_args)
def get(self, resource_id): """ Get one single resource from the API :param resource_id: Id of the resource to be retrieved :return: Retrieved resource """ response = self.send(self.get_resource_endpoint(resource_id), "get") try: resource = self.resource_class(self.client) except (ValueError, TypeError): return None else: resource.update_from_dict(self.client.get_response_data(response, self.Meta.parse_json)) return resource
def filter(self, **search_args): """ Get a filtered list of resources :param search_args: To be translated into ?arg1=value1&arg2=value2... :return: A list of resources """ search_args = search_args or {} raw_resources = [] for url, paginator_params in self.paginator.get_urls(self.get_collection_endpoint()): search_args.update(paginator_params) response = self.paginator.process_response(self.send(url, "get", params=search_args)) raw_resources += self.client.get_response_data(response, self.Meta.parse_json)[self.json_collection_attribute] if self.json_collection_attribute is not None else self.client.get_response_data(response, self.Meta.parse_json) resources = [] for raw_resource in raw_resources: try: resource = self.resource_class(self.client) except (ValueError, TypeError): continue else: resource.update_from_dict(raw_resource) resources.append(resource) return resources
def create(self, **kwargs): """ Create a resource on the server :params kwargs: Attributes (field names and values) of the new resource """ resource = self.resource_class(self.client) resource.update_from_dict(kwargs) resource.save(force_create=True) return resource
def send(self, relative_path, http_method, **requests_args): """ Subclasses must implement this method, that will be used to send API requests with proper auth :param relative_path: URL path relative to self.base_url :param http_method: HTTP method :param requests_args: kargs to be sent to requests :return: """ url = urljoin(self.base_url, relative_path) return self.session.request(http_method, url, **requests_args)
def get_response_data(self, response, parse_json=True): """ Get response data or throw an appropiate exception :param response: requests response object :param parse_json: if True, response will be parsed as JSON :return: response data, either as json or as a regular response.content object """ if response.status_code in (requests.codes.ok, requests.codes.created): if parse_json: return response.json() return response.content elif response.status_code == requests.codes.bad_request: response_json = response.json() raise BadRequestException(response_json.get("error", False) or response_json.get("errors", _("Bad Request: {text}").format(text=response.text))) elif response.status_code == requests.codes.not_found: raise NotFoundException(_("Resource not found: {url}").format(url=response.url)) elif response.status_code == requests.codes.internal_server_error: raise ServerErrorException(_("Internal server error")) elif response.status_code in (requests.codes.unauthorized, requests.codes.forbidden): raise AuthErrorException(_("Access denied")) elif response.status_code == requests.codes.too_many_requests: raise RateLimitException(_(response.text)) else: raise ServerErrorException(_("Unknown error occurred"))
def send(self, relative_path, http_method, **requests_args): """ Make a unauthorized request :param relative_path: URL path relative to self.base_url :param http_method: HTTP method :param requests_args: kargs to be sent to requests :return: requests' response object """ if http_method != "get": warnings.warn(_("You are using methods other than get with no authentication!!!")) return super(NoAuthClient, self).send(relative_path, http_method, **requests_args)
def write(context): """Starts a new article""" config = context.obj title = click.prompt('Title') author = click.prompt('Author', default=config.get('DEFAULT_AUTHOR')) slug = slugify(title) creation_date = datetime.now() basename = '{:%Y-%m-%d}_{}.md'.format(creation_date, slug) meta = ( ('Title', title), ('Date', '{:%Y-%m-%d %H:%M}:00'.format(creation_date)), ('Modified', '{:%Y-%m-%d %H:%M}:00'.format(creation_date)), ('Author', author), ) file_content = '' for key, value in meta: file_content += '{}: {}\n'.format(key, value) file_content += '\n\n' file_content += 'Text...\n\n' file_content += '![image description]({filename}/images/my-photo.jpg)\n\n' file_content += 'Text...\n\n' os.makedirs(config['CONTENT_DIR'], exist_ok=True) path = os.path.join(config['CONTENT_DIR'], basename) with click.open_file(path, 'w') as f: f.write(file_content) click.echo(path) click.launch(path)
def lint(context): """Looks for errors in source code of your blog""" config = context.obj try: run('flake8 {dir} --exclude={exclude}'.format( dir=config['CWD'], exclude=','.join(EXCLUDE), )) except SubprocessError: context.exit(1)
def set_real_value_class(self): """ value_class is initially a string with the import path to the resource class, but we need to get the actual class before doing any work We do not expect the actual clas to be in value_class since the beginning to avoid nasty import egg-before-chicken errors """ if self.value_class is not None and isinstance(self.value_class, str): module_name, dot, class_name = self.value_class.rpartition(".") module = __import__(module_name, fromlist=[class_name]) self.value_class = getattr(module, class_name) self._initialized = True
def publish(context): """Saves changes and sends them to GitHub""" header('Recording changes...') run('git add -A') header('Displaying changes...') run('git -c color.status=always status') if not click.confirm('\nContinue publishing'): run('git reset HEAD --') abort(context) header('Saving changes...') try: run('git commit -m "{message}"'.format( message='Publishing {}'.format(choose_commit_emoji()) ), capture=True) except subprocess.CalledProcessError as e: if 'nothing to commit' not in e.stdout: raise else: click.echo('Nothing to commit.') header('Pushing to GitHub...') branch = get_branch() run('git push origin {branch}:{branch}'.format(branch=branch)) pr_link = get_pr_link(branch) if pr_link: click.launch(pr_link)
def deploy(context): """Uploads new version of the blog website""" config = context.obj header('Generating HTML...') pelican(config, '--verbose', production=True) header('Removing unnecessary output...') unnecessary_paths = [ 'author', 'category', 'tag', 'feeds', 'tags.html', 'authors.html', 'categories.html', 'archives.html', ] for path in unnecessary_paths: remove_path(os.path.join(config['OUTPUT_DIR'], path)) if os.environ.get('TRAVIS'): # Travis CI header('Setting up Git...') run( 'git config user.name ' + run('git show --format="%cN" -s', capture=True) ) run( 'git config user.email ' + run('git show --format="%cE" -s', capture=True) ) github_token = os.environ.get('GITHUB_TOKEN') repo_slug = os.environ.get('TRAVIS_REPO_SLUG') origin = 'https://{}@github.com/{}.git'.format(github_token, repo_slug) run('git remote set-url origin ' + origin) header('Rewriting gh-pages branch...') run('ghp-import -m "{message}" {dir}'.format( message='Deploying {}'.format(choose_commit_emoji()), dir=config['OUTPUT_DIR'], )) header('Pushing to GitHub...') run('git push origin gh-pages:gh-pages --force')
def signed_number(number, precision=2): """ Return the given number as a string with a sign in front of it, ie. `+` if the number is positive, `-` otherwise. """ prefix = '' if number <= 0 else '+' number_str = '{}{:.{precision}f}'.format(prefix, number, precision=precision) return number_str
def balance(ctx): """ Show Zebra balance. Like the hours balance, vacation left, etc. """ backend = plugins_registry.get_backends_by_class(ZebraBackend)[0] timesheet_collection = get_timesheet_collection_for_context(ctx, None) hours_to_be_pushed = timesheet_collection.get_hours(pushed=False, ignored=False, unmapped=False) today = datetime.date.today() user_info = backend.get_user_info() timesheets = backend.get_timesheets(get_first_dow(today), get_last_dow(today)) total_duration = sum([float(timesheet['time']) for timesheet in timesheets]) vacation = hours_to_days(user_info['vacation']['difference']) vacation_balance = '{} days, {:.2f} hours'.format(*vacation) hours_balance = user_info['hours']['hours']['balance'] click.echo("Hours balance: {}".format(signed_number(hours_balance))) click.echo("Hours balance after push: {}".format(signed_number(hours_balance + hours_to_be_pushed))) click.echo("Hours done this week: {:.2f}".format(total_duration)) click.echo("Vacation left: {}".format(vacation_balance))
def to_zebra_params(params): """ Transforms the given `params` dict to values that are understood by Zebra (eg. False is represented as 'false') """ def to_zebra_value(value): transform_funcs = { bool: lambda v: 'true' if v else 'false', } return transform_funcs.get(type(value), lambda v: v)(value) return {param: to_zebra_value(value) for param, value in params.items()}
def show_response_messages(response_json): """ Show all messages in the `messages` key of the given dict. """ message_type_kwargs = { 'warning': {'fg': 'yellow'}, 'error': {'fg': 'red'}, } for message in response_json.get('messages', []): click.secho(message['text'], **message_type_kwargs.get(message['type'], {}))
def update_alias_mapping(settings, alias, new_mapping): """ Override `alias` mapping in the user configuration file with the given `new_mapping`, which should be a tuple with 2 or 3 elements (in the form `(project_id, activity_id, role_id)`). """ mapping = aliases_database[alias] new_mapping = Mapping(mapping=new_mapping, backend=mapping.backend) aliases_database[alias] = new_mapping settings.add_alias(alias, new_mapping) settings.write_config()
def photos(context, path): """Adds images to the last article""" config = context.obj header('Looking for the latest article...') article_filename = find_last_article(config['CONTENT_DIR']) if not article_filename: return click.secho('No articles.', fg='red') click.echo(os.path.basename(article_filename)) header('Looking for images...') images = list(sorted(find_images(path))) if not images: return click.secho('Found no images.', fg='red') for filename in images: click.secho(filename, fg='green') if not click.confirm('\nAdd these images to the latest article'): abort(config) url_prefix = os.path.join('{filename}', IMAGES_PATH) images_dir = os.path.join(config['CONTENT_DIR'], IMAGES_PATH) os.makedirs(images_dir, exist_ok=True) header('Processing images...') urls = [] for filename in images: image_basename = os.path.basename(filename).replace(' ', '-').lower() urls.append(os.path.join(url_prefix, image_basename)) image_filename = os.path.join(images_dir, image_basename) print(filename, image_filename) import_image(filename, image_filename) content = '\n' for url in urls: url = url.replace('\\', '/') content += '\n![image description]({})\n'.format(url) header('Adding to article: {}'.format(article_filename)) with click.open_file(article_filename, 'a') as f: f.write(content) click.launch(article_filename)
def _generate_circle(self): """Generates the circle. """ total_weight = 0 for node in self.nodes: total_weight += self.weights.get(node, 1) for node in self.nodes: weight = 1 if node in self.weights: weight = self.weights.get(node) factor = math.floor((40 * len(self.nodes) * weight) / total_weight) for j in range(0, int(factor)): b_key = bytearray(self._hash_digest('%s-%s' % (node, j))) for i in range(0, 3): key = self._hash_val(b_key, lambda x: x + i * 4) self.ring[key] = node self._sorted_keys.append(key) self._sorted_keys.sort()
def get_node(self, string_key): """Given a string key a corresponding node in the hash ring is returned. If the hash ring is empty, `None` is returned. """ pos = self.get_node_pos(string_key) if pos is None: return None return self.ring[self._sorted_keys[pos]]
def get_node_pos(self, string_key): """Given a string key a corresponding node in the hash ring is returned along with it's position in the ring. If the hash ring is empty, (`None`, `None`) is returned. """ if not self.ring: return None key = self.gen_key(string_key) nodes = self._sorted_keys pos = bisect(nodes, key) if pos == len(nodes): return 0 else: return pos
def iterate_nodes(self, string_key, distinct=True): """Given a string key it returns the nodes as a generator that can hold the key. The generator iterates one time through the ring starting at the correct position. if `distinct` is set, then the nodes returned will be unique, i.e. no virtual copies will be returned. """ if not self.ring: yield None, None returned_values = set() def distinct_filter(value): if str(value) not in returned_values: returned_values.add(str(value)) return value pos = self.get_node_pos(string_key) for key in self._sorted_keys[pos:]: val = distinct_filter(self.ring[key]) if val: yield val for i, key in enumerate(self._sorted_keys): if i < pos: val = distinct_filter(self.ring[key]) if val: yield val
def gen_key(self, key): """Given a string key it returns a long value, this long value represents a place on the hash ring. md5 is currently used because it mixes well. """ b_key = self._hash_digest(key) return self._hash_val(b_key, lambda x: x)
def get_number_of_app_ports(app): """ Get the number of ports for the given app JSON. This roughly follows the logic in marathon-lb for finding app IPs/ports, although we are only interested in the quantity of ports an app should have and don't consider the specific IPs/ports of individual tasks: https://github.com/mesosphere/marathon-lb/blob/v1.10.3/utils.py#L393-L415 :param app: The app JSON from the Marathon API. :return: The number of ports for the app. """ mode = _get_networking_mode(app) ports_list = None if mode == 'host': ports_list = _get_port_definitions(app) elif mode == 'container/bridge': ports_list = _get_port_definitions(app) if ports_list is None: ports_list = _get_container_port_mappings(app) elif mode == 'container': ports_list = _get_ip_address_discovery_ports(app) # Marathon 1.5+: the ipAddress field is missing -> ports_list is None # Marathon <1.5: the ipAddress field can be present, but ports_list can # still be empty while the container port mapping is not :-/ if not ports_list: ports_list = _get_container_port_mappings(app) else: raise RuntimeError( "Unknown Marathon networking mode '{}'".format(mode)) return len(ports_list)
def _get_networking_mode(app): """ Get the Marathon networking mode for the app. """ # Marathon 1.5+: there is a `networks` field networks = app.get('networks') if networks: # Modes cannot be mixed, so assigning the last mode is fine return networks[-1].get('mode', 'container') # Older Marathon: determine equivalent network mode container = app.get('container') if container is not None and 'docker' in container: docker_network = container['docker'].get('network') if docker_network == 'USER': return 'container' elif docker_network == 'BRIDGE': return 'container/bridge' return 'container' if _is_legacy_ip_per_task(app) else 'host'
def _get_container_port_mappings(app): """ Get the ``portMappings`` field for the app container. """ container = app['container'] # Marathon 1.5+: container.portMappings field port_mappings = container.get('portMappings') # Older Marathon: container.docker.portMappings field if port_mappings is None and 'docker' in container: port_mappings = container['docker'].get('portMappings') return port_mappings
def sort_pem_objects(pem_objects): """ Given a list of pem objects, sort the objects into the private key, leaf certificate, and list of CA certificates in the trust chain. This function assumes that the list of pem objects will contain exactly one private key and exactly one leaf certificate and that only key and certificate type objects are provided. """ keys, certs, ca_certs = [], [], [] for pem_object in pem_objects: if isinstance(pem_object, pem.Key): keys.append(pem_object) else: # This assumes all pem objects provided are either of type pem.Key # or pem.Certificate. Technically, there are CSR and CRL types, but # we should never be passed those. if _is_ca(pem_object): ca_certs.append(pem_object) else: certs.append(pem_object) [key], [cert] = keys, certs return key, cert, ca_certs
def _cert_data_to_pem_objects(cert_data): """ Given a non-None response from the Vault key/value store, convert the key/values into a list of PEM objects. """ pem_objects = [] for key in ['privkey', 'cert', 'chain']: pem_objects.extend(pem.parse(cert_data[key].encode('utf-8'))) return pem_objects
def raise_for_not_ok_status(response): """ Raises a `requests.exceptions.HTTPError` if the response has a non-200 status code. """ if response.code != OK: raise HTTPError('Non-200 response code (%s) for url: %s' % ( response.code, uridecode(response.request.absoluteURI))) return response
def _sse_content_with_protocol(response, handler, **sse_kwargs): """ Sometimes we need the protocol object so that we can manipulate the underlying transport in tests. """ protocol = SseProtocol(handler, **sse_kwargs) finished = protocol.when_finished() response.deliverBody(protocol) return finished, protocol
def sse_content(response, handler, **sse_kwargs): """ Callback to collect the Server-Sent Events content of a response. Callbacks passed will receive event data. :param response: The response from the SSE request. :param handler: The handler for the SSE protocol. """ # An SSE response must be 200/OK and have content-type 'text/event-stream' raise_for_not_ok_status(response) raise_for_header(response, 'Content-Type', 'text/event-stream') finished, _ = _sse_content_with_protocol(response, handler, **sse_kwargs) return finished
def _request(self, failure, endpoints, *args, **kwargs): """ Recursively make requests to each endpoint in ``endpoints``. """ # We've run out of endpoints, fail if not endpoints: return failure endpoint = endpoints.pop(0) d = super(MarathonClient, self).request(*args, url=endpoint, **kwargs) # If something goes wrong, call ourselves again with the remaining # endpoints d.addErrback(self._request, endpoints, *args, **kwargs) return d
def get_json_field(self, field, **kwargs): """ Perform a GET request and get the contents of the JSON response. Marathon's JSON responses tend to contain an object with a single key which points to the actual data of the response. For example /v2/apps returns something like {"apps": [ {"app1"}, {"app2"} ]}. We're interested in the contents of "apps". This method will raise an error if: * There is an error response code * The field with the given name cannot be found """ d = self.request( 'GET', headers={'Accept': 'application/json'}, **kwargs) d.addCallback(raise_for_status) d.addCallback(raise_for_header, 'Content-Type', 'application/json') d.addCallback(json_content) d.addCallback(self._get_json_field, field) return d
def _get_json_field(self, response_json, field_name): """ Get a JSON field from the response JSON. :param: response_json: The parsed JSON content of the response. :param: field_name: The name of the field in the JSON to get. """ if field_name not in response_json: raise KeyError('Unable to get value for "%s" from Marathon ' 'response: "%s"' % ( field_name, json.dumps(response_json),)) return response_json[field_name]
def get_events(self, callbacks): """ Attach to Marathon's event stream using Server-Sent Events (SSE). :param callbacks: A dict mapping event types to functions that handle the event data """ d = self.request( 'GET', path='/v2/events', unbuffered=True, # The event_type parameter was added in Marathon 1.3.7. It can be # used to specify which event types we are interested in. On older # versions of Marathon it is ignored, and we ignore events we're # not interested in anyway. params={'event_type': sorted(callbacks.keys())}, headers={ 'Accept': 'text/event-stream', 'Cache-Control': 'no-store' }) def handler(event, data): callback = callbacks.get(event) # Deserialize JSON if a callback is present if callback is not None: callback(json.loads(data)) return d.addCallback( sse_content, handler, reactor=self._reactor, **self._sse_kwargs)
def parse( self, value: str, type_: typing.Type[typing.Any] = str, subtype: typing.Type[typing.Any] = str, ) -> typing.Any: """ Parse value from string. Convert :code:`value` to .. code-block:: python >>> parser = Config() >>> parser.parse('12345', type_=int) <<< 12345 >>> >>> parser.parse('1,2,3,4', type_=list, subtype=int) <<< [1, 2, 3, 4] :param value: string :param type\\_: the type to return :param subtype: subtype for iterator types :return: the parsed config value """ if type_ is bool: return type_(value.lower() in self.TRUE_STRINGS) try: if isinstance(type_, type) and issubclass( type_, (list, tuple, set, frozenset) ): return type_( self.parse(v.strip(" "), subtype) for v in value.split(",") if value.strip(" ") ) return type_(value) except ValueError as e: raise ConfigError(*e.args)
def get( self, key: str, default: typing.Any = UNSET, type_: typing.Type[typing.Any] = str, subtype: typing.Type[typing.Any] = str, mapper: typing.Optional[typing.Callable[[object], object]] = None, ) -> typing.Any: """ Parse a value from an environment variable. .. code-block:: python >>> os.environ['FOO'] <<< '12345' >>> >>> os.environ['BAR'] <<< '1,2,3,4' >>> >>> 'BAZ' in os.environ <<< False >>> >>> parser = Config() >>> parser.get('FOO', type_=int) <<< 12345 >>> >>> parser.get('BAR', type_=list, subtype=int) <<< [1, 2, 3, 4] >>> >>> parser.get('BAZ', default='abc123') <<< 'abc123' >>> >>> parser.get('FOO', type_=int, mapper=lambda x: x*10) <<< 123450 :param key: the key to look up the value under :param default: default value to return when when no value is present :param type\\_: the type to return :param subtype: subtype for iterator types :param mapper: a function to post-process the value with :return: the parsed config value """ value = self.environ.get(key, UNSET) if value is UNSET and default is UNSET: raise ConfigError("Unknown environment variable: {0}".format(key)) if value is UNSET: value = default else: value = self.parse(typing.cast(str, value), type_, subtype) if mapper: value = mapper(value) return value
def _request(self, endpoint, *args, **kwargs): """ Perform a request to a specific endpoint. Raise an error if the status code indicates a client or server error. """ kwargs['url'] = endpoint return (super(MarathonLbClient, self).request(*args, **kwargs) .addCallback(raise_for_status))
def _check_request_results(self, results): """ Check the result of each request that we made. If a failure occurred, but some requests succeeded, log and count the failures. If all requests failed, raise an error. :return: The list of responses, with a None value for any requests that failed. """ responses = [] failed_endpoints = [] for index, result_tuple in enumerate(results): success, result = result_tuple if success: responses.append(result) else: endpoint = self.endpoints[index] self.log.failure( 'Failed to make a request to a marathon-lb instance: ' '{endpoint}', result, LogLevel.error, endpoint=endpoint) responses.append(None) failed_endpoints.append(endpoint) if len(failed_endpoints) == len(self.endpoints): raise RuntimeError( 'Failed to make a request to all marathon-lb instances') if failed_endpoints: self.log.error( 'Failed to make a request to {x}/{y} marathon-lb instances: ' '{endpoints}', x=len(failed_endpoints), y=len(self.endpoints), endpoints=failed_endpoints) return responses
def maybe_key(pem_path): """ Set up a client key if one does not exist already. https://gist.github.com/glyph/27867a478bb71d8b6046fbfb176e1a33#file-local-certs-py-L32-L50 :type pem_path: twisted.python.filepath.FilePath :param pem_path: The path to the certificate directory to use. :rtype: twisted.internet.defer.Deferred """ acme_key_file = pem_path.child(u'client.key') if acme_key_file.exists(): key = _load_pem_private_key_bytes(acme_key_file.getContent()) else: key = generate_private_key(u'rsa') acme_key_file.setContent(_dump_pem_private_key_bytes(key)) return succeed(JWKRSA(key=key))
def maybe_key_vault(client, mount_path): """ Set up a client key in Vault if one does not exist already. :param client: The Vault API client to use. :param mount_path: The Vault key/value mount path to use. :rtype: twisted.internet.defer.Deferred """ d = client.read_kv2('client_key', mount_path=mount_path) def get_or_create_key(client_key): if client_key is not None: key_data = client_key['data']['data'] key = _load_pem_private_key_bytes(key_data['key'].encode('utf-8')) return JWKRSA(key=key) else: key = generate_private_key(u'rsa') key_data = { 'key': _dump_pem_private_key_bytes(key).decode('utf-8') } d = client.create_or_update_kv2( 'client_key', key_data, mount_path=mount_path) return d.addCallback(lambda _result: JWKRSA(key=key)) return d.addCallback(get_or_create_key)
def create_txacme_client_creator(key, reactor, url, alg=RS256): """ Create a creator for txacme clients to provide to the txacme service. See ``txacme.client.Client.from_url()``. We create the underlying JWSClient with a non-persistent pool to avoid https://github.com/mithrandi/txacme/issues/86. :return: a callable that returns a deffered that returns the client """ # Creating an Agent without specifying a pool gives us the default pool # which is non-persistent. jws_client = JWSClient(HTTPClient(agent=Agent(reactor)), key, alg) return partial(txacme_Client.from_url, reactor, url, key, alg, jws_client)
def generate_wildcard_pem_bytes(): """ Generate a wildcard (subject name '*') self-signed certificate valid for 10 years. https://cryptography.io/en/latest/x509/tutorial/#creating-a-self-signed-certificate :return: Bytes representation of the PEM certificate data """ key = generate_private_key(u'rsa') name = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'*')]) cert = ( x509.CertificateBuilder() .issuer_name(name) .subject_name(name) .not_valid_before(datetime.today() - timedelta(days=1)) .not_valid_after(datetime.now() + timedelta(days=3650)) .serial_number(int(uuid.uuid4())) .public_key(key.public_key()) .sign( private_key=key, algorithm=hashes.SHA256(), backend=default_backend()) ) return b''.join(( key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()), cert.public_bytes(serialization.Encoding.PEM) ))
def from_env(cls, reactor=None, env=os.environ): """ Create a Vault client with configuration from the environment. Supports a limited number of the available config options: https://www.vaultproject.io/docs/commands/index.html#environment-variables https://github.com/hashicorp/vault/blob/v0.11.3/api/client.go#L28-L40 Supported: - ``VAULT_ADDR`` - ``VAULT_CACERT`` - ``VAULT_CLIENT_CERT`` - ``VAULT_CLIENT_KEY`` - ``VAULT_TLS_SERVER_NAME`` - ``VAULT_TOKEN`` Not currently supported: - ``VAULT_CAPATH`` - ``VAULT_CLIENT_TIMEOUT`` - ``VAULT_MAX_RETRIES`` - ``VAULT_MFA`` - ``VAULT_RATE_LIMIT`` - ``VAULT_SKIP_VERIFY`` - ``VAULT_WRAP_TTL`` """ address = env.get('VAULT_ADDR', 'https://127.0.0.1:8200') # This seems to be what the Vault CLI defaults to token = env.get('VAULT_TOKEN', 'TEST') ca_cert = env.get('VAULT_CACERT') tls_server_name = env.get('VAULT_TLS_SERVER_NAME') client_cert = env.get('VAULT_CLIENT_CERT') client_key = env.get('VAULT_CLIENT_KEY') cf = ClientPolicyForHTTPS.from_pem_files( caKey=ca_cert, privateKey=client_key, certKey=client_cert, tls_server_name=tls_server_name ) client, reactor = default_client(reactor, contextFactory=cf) return cls(address, token, client=client, reactor=reactor)
def read(self, path, **params): """ Read data from Vault. Returns the JSON-decoded response. """ d = self.request('GET', '/v1/' + path, params=params) return d.addCallback(self._handle_response)
def write(self, path, **data): """ Write data to Vault. Returns the JSON-decoded response. """ d = self.request('PUT', '/v1/' + path, json=data) return d.addCallback(self._handle_response, check_cas=True)
def read_kv2(self, path, version=None, mount_path='secret'): """ Read some data from a key/value version 2 secret engine. """ params = {} if version is not None: params['version'] = version read_path = '{}/data/{}'.format(mount_path, path) return self.read(read_path, **params)
def create_or_update_kv2(self, path, data, cas=None, mount_path='secret'): """ Create or update some data in a key/value version 2 secret engine. :raises CasError: Raises an error if the ``cas`` value, when provided, doesn't match Vault's version for the key. """ params = { 'options': {}, 'data': data } if cas is not None: params['options']['cas'] = cas write_path = '{}/data/{}'.format(mount_path, path) return self.write(write_path, **params)
def get_single_header(headers, key): """ Get a single value for the given key out of the given set of headers. :param twisted.web.http_headers.Headers headers: The set of headers in which to look for the header value :param str key: The header key """ raw_headers = headers.getRawHeaders(key) if raw_headers is None: return None # Take the final header as the authorative header, _ = cgi.parse_header(raw_headers[-1]) return header
def raise_for_status(response): """ Raises a `requests.exceptions.HTTPError` if the response did not succeed. Adapted from the Requests library: https://github.com/kennethreitz/requests/blob/v2.8.1/requests/models.py#L825-L837 """ http_error_msg = '' if 400 <= response.code < 500: http_error_msg = '%s Client Error for url: %s' % ( response.code, uridecode(response.request.absoluteURI)) elif 500 <= response.code < 600: http_error_msg = '%s Server Error for url: %s' % ( response.code, uridecode(response.request.absoluteURI)) if http_error_msg: raise HTTPError(http_error_msg, response=response) return response
def _compose_url(self, url, kwargs): """ Compose a URL starting with the given URL (or self.url if that URL is None) and using the values in kwargs. :param str url: The base URL to use. If None, ``self.url`` will be used instead. :param dict kwargs: A dictionary of values to override in the base URL. Relevant keys will be popped from the dictionary. """ if url is None: url = self.url if url is None: raise ValueError( 'url not provided and this client has no url attribute') split_result = urisplit(url) userinfo = split_result.userinfo # Build up the kwargs to pass to uricompose compose_kwargs = {} for key in ['scheme', 'host', 'port', 'path', 'fragment']: if key in kwargs: compose_kwargs[key] = kwargs.pop(key) else: compose_kwargs[key] = getattr(split_result, key) if 'params' in kwargs: compose_kwargs['query'] = kwargs.pop('params') else: compose_kwargs['query'] = split_result.query # Take the userinfo out of the URL and pass as 'auth' to treq so it can # be used for HTTP basic auth headers if 'auth' not in kwargs and userinfo is not None: # treq expects a 2-tuple (username, password) kwargs['auth'] = tuple(userinfo.split(':', 2)) return uricompose(**compose_kwargs)
def request(self, method, url=None, **kwargs): """ Perform a request. :param: method: The HTTP method to use (example is `GET`). :param: url: The URL to use. The default value is the URL this client was created with (`self.url`) (example is `http://localhost:8080`) :param: kwargs: Any other parameters that will be passed to `treq.request`, for example headers. Or any URL parameters to override, for example path, query or fragment. """ url = self._compose_url(url, kwargs) kwargs.setdefault('timeout', self._timeout) d = self._client.request(method, url, reactor=self._reactor, **kwargs) d.addCallback(self._log_request_response, method, url, kwargs) d.addErrback(self._log_request_error, url) return d
def listen(self, reactor, endpoint_description): """ Run the server, i.e. start listening for requests on the given host and port. :param reactor: The ``IReactorTCP`` to use. :param endpoint_description: The Twisted description for the endpoint to listen on. :return: A deferred that returns an object that provides ``IListeningPort``. """ endpoint = serverFromString(reactor, endpoint_description) return endpoint.listen(Site(self.app.resource()))
def health(self, request): """ Listens to incoming health checks from Marathon on ``/health``. """ if self.health_handler is None: return self._no_health_handler(request) health = self.health_handler() response_code = OK if health.healthy else SERVICE_UNAVAILABLE request.setResponseCode(response_code) write_request_json(request, health.json_message)
def main(reactor, argv=sys.argv[1:], env=os.environ, acme_url=LETSENCRYPT_DIRECTORY.asText()): """ A tool to automatically request, renew and distribute Let's Encrypt certificates for apps running on Marathon and served by marathon-lb. """ parser = argparse.ArgumentParser( description='Automatically manage ACME certificates for Marathon apps') parser.add_argument('-a', '--acme', help='The address for the ACME Directory Resource ' '(default: %(default)s)', default=acme_url) parser.add_argument('-e', '--email', help='An email address to register with the ACME ' 'service (optional)') parser.add_argument('-m', '--marathon', metavar='MARATHON[,MARATHON,...]', help='The addresses for the Marathon HTTP API ' '(default: %(default)s)', default='http://marathon.mesos:8080') parser.add_argument('-l', '--lb', metavar='LB[,LB,...]', help='The addresses for the marathon-lb HTTP API ' '(default: %(default)s)', default='http://marathon-lb.marathon.mesos:9090') parser.add_argument('-g', '--group', help='The marathon-lb group to issue certificates for ' '(default: %(default)s)', default='external') parser.add_argument('--allow-multiple-certs', help=('Allow multiple certificates for a single app ' 'port. This allows multiple domains for an app, ' 'but is not recommended.'), action='store_true') parser.add_argument('--listen', help='The address for the port to listen on (default: ' '%(default)s)', default=':8000') parser.add_argument('--marathon-timeout', help=('Amount of time in seconds to wait for HTTP ' 'response headers to be received for all ' 'requests to Marathon. Set to 0 to disable. ' '(default: %(default)s)'), type=float, default=10) parser.add_argument('--sse-timeout', help=('Amount of time in seconds to wait for some ' 'event data to be received from Marathon. Set ' 'to 0 to disable. (default: %(default)s)'), type=float, default=60) parser.add_argument('--log-level', help='The minimum severity level to log messages at ' '(default: %(default)s)', choices=['debug', 'info', 'warn', 'error', 'critical'], default='info'), parser.add_argument('--vault', help=('Enable storage of certificates in Vault. This ' 'can be further configured with VAULT_-style ' 'environment variables.'), action='store_true') parser.add_argument('storage_path', metavar='storage-path', help=('Path for storing certificates. If --vault is ' 'used then this is the mount path for the ' 'key/value engine in Vault. If not, this is the ' 'path to a directory.')) parser.add_argument('--version', action='version', version=__version__) args = parser.parse_args(argv) # Set up logging init_logging(args.log_level) # Set up marathon-acme marathon_addrs = args.marathon.split(',') mlb_addrs = args.lb.split(',') sse_timeout = args.sse_timeout if args.sse_timeout > 0 else None acme_url = URL.fromText(_to_unicode(args.acme)) endpoint_description = parse_listen_addr(args.listen) log_args = [ ('storage-path', args.storage_path), ('vault', args.vault), ('acme', acme_url), ('email', args.email), ('allow-multiple-certs', args.allow_multiple_certs), ('marathon', marathon_addrs), ('sse-timeout', sse_timeout), ('lb', mlb_addrs), ('group', args.group), ('endpoint-description', endpoint_description), ] log_args = ['{}={!r}'.format(k, v) for k, v in log_args] log.info('Starting marathon-acme {} with: {}'.format( __version__, ', '.join(log_args))) if args.vault: key_d, cert_store = init_vault_storage( reactor, env, args.storage_path) else: key_d, cert_store = init_file_storage(args.storage_path) # Once we have the client key, create the txacme client creator key_d.addCallback(create_txacme_client_creator, reactor, acme_url) # Once we have the client creator, create the service key_d.addCallback( create_marathon_acme, cert_store, args.email, args.allow_multiple_certs, marathon_addrs, args.marathon_timeout, sse_timeout, mlb_addrs, args.group, reactor) # Finally, run the thing return key_d.addCallback(lambda ma: ma.run(endpoint_description))
def parse_listen_addr(listen_addr): """ Parse an address of the form [ipaddress]:port into a tcp or tcp6 Twisted endpoint description string for use with ``twisted.internet.endpoints.serverFromString``. """ if ':' not in listen_addr: raise ValueError( "'%s' does not have the correct form for a listen address: " '[ipaddress]:port' % (listen_addr,)) host, port = listen_addr.rsplit(':', 1) # Validate the host if host == '': protocol = 'tcp' interface = None else: if host.startswith('[') and host.endswith(']'): # IPv6 wrapped in [] host = host[1:-1] ip_address = ipaddress.ip_address(_to_unicode(host)) protocol = 'tcp6' if ip_address.version == 6 else 'tcp' interface = str(ip_address) # Validate the port if not port.isdigit() or int(port) < 1 or int(port) > 65535: raise ValueError( "'%s' does not appear to be a valid port number" % (port,)) args = [protocol, port] kwargs = {'interface': interface} if interface is not None else {} return _create_tx_endpoints_string(args, kwargs)
def create_marathon_acme( client_creator, cert_store, acme_email, allow_multiple_certs, marathon_addrs, marathon_timeout, sse_timeout, mlb_addrs, group, reactor): """ Create a marathon-acme instance. :param client_creator: The txacme client creator function. :param cert_store: The txacme certificate store instance. :param acme_email: Email address to use when registering with the ACME service. :param allow_multiple_certs: Whether to allow multiple certificates per app port. :param marathon_addr: Address for the Marathon instance to find app domains that require certificates. :param marathon_timeout: Amount of time in seconds to wait for response headers to be received from Marathon. :param sse_timeout: Amount of time in seconds to wait for some event data to be received from Marathon. :param mlb_addrs: List of addresses for marathon-lb instances to reload when a new certificate is issued. :param group: The marathon-lb group (``HAPROXY_GROUP``) to consider when finding app domains. :param reactor: The reactor to use. """ marathon_client = MarathonClient(marathon_addrs, timeout=marathon_timeout, sse_kwargs={'timeout': sse_timeout}, reactor=reactor) marathon_lb_client = MarathonLbClient(mlb_addrs, reactor=reactor) return MarathonAcme( marathon_client, group, cert_store, marathon_lb_client, client_creator, reactor, acme_email, allow_multiple_certs )
def init_storage_dir(storage_dir): """ Initialise the storage directory with the certificates directory and a default wildcard self-signed certificate for HAProxy. :return: the storage path and certs path """ storage_path = FilePath(storage_dir) # Create the default wildcard certificate if it doesn't already exist default_cert_path = storage_path.child('default.pem') if not default_cert_path.exists(): default_cert_path.setContent(generate_wildcard_pem_bytes()) # Create a directory for unmanaged certs. We don't touch this again, but it # needs to be there and it makes sense to create it at the same time as # everything else. unmanaged_certs_path = storage_path.child('unmanaged-certs') if not unmanaged_certs_path.exists(): unmanaged_certs_path.createDirectory() # Store certificates in a directory inside the storage directory, so # HAProxy will read just the certificates there. certs_path = storage_path.child('certs') if not certs_path.exists(): certs_path.createDirectory() return storage_path, certs_path