repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
grundic/yagocd | yagocd/client.py | 1 | 13831 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import copy
from yagocd.resources import BaseManager
from yagocd.resources.agent import AgentManager
from yagocd.resources.artifact import ArtifactManager
from yagocd.resources.configuration import ConfigurationManager
from yagocd.resources.elastic_profile import ElasticAgentProfileManager
from yagocd.resources.encryption import EncryptionManager
from yagocd.resources.environment import EnvironmentManager
from yagocd.resources.feed import FeedManager
from yagocd.resources.info import InfoManager
from yagocd.resources.job import JobManager
from yagocd.resources.material import MaterialManager
from yagocd.resources.notification_filter import NotificationFilterManager
from yagocd.resources.package import PackageManager
from yagocd.resources.package_repository import PackageRepositoryManager
from yagocd.resources.pipeline import PipelineManager
from yagocd.resources.pipeline_config import PipelineConfigManager
from yagocd.resources.plugin_info import PluginInfoManager
from yagocd.resources.property import PropertyManager
from yagocd.resources.scm import SCMManager
from yagocd.resources.stage import StageManager
from yagocd.resources.template import TemplateManager
from yagocd.resources.user import UserManager
from yagocd.resources.version import VersionManager
from yagocd.session import Session
class Yagocd(object):
"""
Main class of the package, that gives user access to Go REST API methods.
"""
DEFAULT_OPTIONS = {
'server': 'http://localhost:8153',
'context_path': 'go/',
'api_path': 'api/',
'verify': True,
'headers': {
'Accept': BaseManager.ACCEPT_HEADER,
}
}
def __init__(self, server=None, auth=None, options=None):
"""
Construct a GoCD client instance.
:param server: url of the Go server
:param auth: authorization, that will be passed to requests.
Could tuple of (username, password) for basic authentication.
:param options: dictionary of additional options.
* context_path -- server context path to use (default is ``go/``)
* api_path -- api endpoint to use. By default ``api/`` will be used, but in some cases this will be
overwritten by some managers, because of API.
* verify -- verify SSL certs. Defaults to ``True``.
* headers -- default headers for requests (default is ``'Accept': 'application/vnd.go.cd.v1+json'``)
"""
options = {} if options is None else options
if server is not None:
options['server'] = server
merged = copy.deepcopy(self.DEFAULT_OPTIONS)
merged.update(options)
self._session = Session(auth, merged)
# manager instances
self._agent_manager = None
self._artifact_manager = None
self._configuration_manager = None
self._elastic_agent_profile_manager = None
self._encryption_manager = None
self._environment_manager = None
self._feed_manager = None
self._job_manager = None
self._info_manager = None
self._material_manager = None
self._notification_filter_manager = None
self._package_manager = None
self._package_repository_manager = None
self._pipeline_manager = None
self._pipeline_config_manager = None
self._plugin_info_manager = None
self._property_manager = None
self._scm_manager = None
self._stage_manager = None
self._template_manager = None
self._user_manager = None
self._version_manager = None
@property
def server_url(self):
"""
Property for getting server url.
:return: server url for this instance.
"""
return self._session.server_url
@property
def agents(self):
"""
Property for accessing :class:`AgentManager` instance, which is used to manage agents.
:rtype: yagocd.resources.agent.AgentManager
"""
if self._agent_manager is None:
self._agent_manager = AgentManager(session=self._session)
return self._agent_manager
@property
def artifacts(self):
"""
Property for accessing :class:`ArtifactManager` instance, which is used to manage artifacts.
:rtype: yagocd.resources.artifact.ArtifactManager
"""
if self._artifact_manager is None:
self._artifact_manager = ArtifactManager(session=self._session)
return self._artifact_manager
@property
def configurations(self):
"""
Property for accessing :class:`ConfigurationManager` instance, which is used to manage configurations.
:rtype: yagocd.resources.configuration.ConfigurationManager
"""
if self._configuration_manager is None:
self._configuration_manager = ConfigurationManager(session=self._session)
return self._configuration_manager
@property
def encryption(self):
"""
Property for accessing :class:`EncryptionManager` instance,
which is used to manage encryption.
:rtype: yagocd.resources.encryption.EncryptionManager
"""
if self._encryption_manager is None:
self._encryption_manager = EncryptionManager(session=self._session)
return self._encryption_manager
@property
def elastic_profiles(self):
"""
Property for accessing :class:`ElasticAgentProfileManager` instance,
which is used to manage elastic agent profiles.
:rtype: yagocd.resources.elastic_profile.ElasticAgentProfileManager
"""
if self._elastic_agent_profile_manager is None:
self._elastic_agent_profile_manager = ElasticAgentProfileManager(session=self._session)
return self._elastic_agent_profile_manager
@property
def environments(self):
"""
Property for accessing :class:`EnvironmentManager` instance, which is used to manage environments.
:rtype: yagocd.resources.environment.EnvironmentManager
"""
if self._environment_manager is None:
self._environment_manager = EnvironmentManager(session=self._session)
return self._environment_manager
@property
def feeds(self):
"""
Property for accessing :class:`FeedManager` instance, which is used to manage feeds.
:rtype: yagocd.resources.feed.FeedManager
"""
if self._feed_manager is None:
self._feed_manager = FeedManager(session=self._session)
return self._feed_manager
@property
def jobs(self):
"""
Property for accessing :class:`JobManager` instance, which is used to manage feeds.
:rtype: yagocd.resources.job.JobManager
"""
if self._job_manager is None:
self._job_manager = JobManager(session=self._session)
return self._job_manager
@property
def info(self):
"""
Property for accessing :class:`InfoManager` instance, which is used to general server info.
:rtype: yagocd.resources.info.InfoManager
"""
if self._info_manager is None:
self._info_manager = InfoManager(session=self._session)
return self._info_manager
@property
def notification_filters(self):
"""
Property for accessing :class:`NotificationFilterManager` instance, which is used to manage notification
filters.
:rtype: yagocd.resources.notification_filter.NotificationFilterManager
"""
if self._notification_filter_manager is None:
self._notification_filter_manager = NotificationFilterManager(session=self._session)
return self._notification_filter_manager
@property
def materials(self):
"""
Property for accessing :class:`MaterialManager` instance, which is used to manage materials.
:rtype: yagocd.resources.material.MaterialManager
"""
if self._material_manager is None:
self._material_manager = MaterialManager(session=self._session)
return self._material_manager
@property
def packages(self):
"""
Property for accessing :class:`PackageManager` instance, which is used to manage packages.
:rtype: yagocd.resources.package.PackageManager
"""
if self._package_manager is None:
self._package_manager = PackageManager(session=self._session)
return self._package_manager
@property
def package_repositories(self):
"""
Property for accessing :class:`PackageRepositoryManager` instance, which is used to manage package repos.
:rtype: yagocd.resources.package_repository.PackageRepositoryManager
"""
if self._package_repository_manager is None:
self._package_repository_manager = PackageRepositoryManager(session=self._session)
return self._package_repository_manager
@property
def pipelines(self):
"""
Property for accessing :class:`PipelineManager` instance, which is used to manage pipelines.
:rtype: yagocd.resources.pipeline.PipelineManager
"""
if self._pipeline_manager is None:
self._pipeline_manager = PipelineManager(session=self._session)
return self._pipeline_manager
@property
def pipeline_configs(self):
"""
Property for accessing :class:`PipelineConfigManager` instance, which is used to manage pipeline configurations.
:rtype: yagocd.resources.pipeline_config.PipelineConfigManager
"""
if self._pipeline_config_manager is None:
self._pipeline_config_manager = PipelineConfigManager(session=self._session)
return self._pipeline_config_manager
@property
def plugin_info(self):
"""
Property for accessing :class:`PluginInfoManager` instance, which is used to manage pipeline configurations.
:rtype: yagocd.resources.plugin_info.PluginInfoManager
"""
if self._plugin_info_manager is None:
self._plugin_info_manager = PluginInfoManager(session=self._session)
return self._plugin_info_manager
@property
def properties(self):
"""
Property for accessing :class:`PropertyManager` instance, which is used to manage properties of the jobs.
:rtype: yagocd.resources.property.PropertyManager
"""
if self._property_manager is None:
self._property_manager = PropertyManager(session=self._session)
return self._property_manager
@property
def scms(self):
"""
Property for accessing :class:`SCMManager` instance, which is used to manage pluggable SCM materials.
:rtype: yagocd.resources.scm.SCMManager
"""
if self._scm_manager is None:
self._scm_manager = SCMManager(session=self._session)
return self._scm_manager
@property
def stages(self):
"""
Property for accessing :class:`StageManager` instance, which is used to manage stages.
:rtype: yagocd.resources.stage.StageManager
"""
if self._stage_manager is None:
self._stage_manager = StageManager(session=self._session)
return self._stage_manager
@property
def templates(self):
"""
Property for accessing :class:`TemplateManager` instance, which is used to manage templates.
:rtype: yagocd.resources.template.TemplateManager
"""
if self._template_manager is None:
self._template_manager = TemplateManager(session=self._session)
return self._template_manager
@property
def users(self):
"""
Property for accessing :class:`UserManager` instance, which is used to manage users.
:rtype: yagocd.resources.user.UserManager
"""
if self._user_manager is None:
self._user_manager = UserManager(session=self._session)
return self._user_manager
@property
def versions(self):
"""
Property for accessing :class:`VersionManager` instance, which is used to get server info.
:rtype: yagocd.resources.version.VersionManager
"""
if self._version_manager is None:
self._version_manager = VersionManager(session=self._session)
return self._version_manager
| isc | -6,278,391,549,560,444,000 | 36.080429 | 120 | 0.665317 | false |
softwaresaved/fat | lowfat/migrations/0120_auto_20180206_1505.py | 2 | 1063 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-02-06 15:05
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lowfat', '0119_auto_20171214_0722'),
]
operations = [
migrations.AlterField(
model_name='claimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='claimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
migrations.AlterField(
model_name='historicalclaimant',
name='application_year',
field=models.IntegerField(default=2018),
),
migrations.AlterField(
model_name='historicalclaimant',
name='inauguration_grant_expiration',
field=models.DateField(default=datetime.date(2020, 3, 31)),
),
]
| bsd-3-clause | -5,224,543,613,362,958,000 | 28.527778 | 71 | 0.594544 | false |
brunosantos/Bsan-kodi-repo | plugin.video.kodi/gravador.py | 1 | 10215 | import datetime
import os
import re
import sys
from default import downloadPath, art, mensagemprogresso, gravadorpath, savefile
from requests import abrir_url
from resources.lib.daring import selfAddon, tvporpath
from utils import limpar, horaportuguesa, normalize, clean, savefile
def menugravador():
if downloadPath=='':
xbmcgui.Dialog().ok('TV Portuguesa','Necessitas de introduzir a pasta onde vão ficar','as gravações. Escolhe uma pasta com algum espaço','livre disponível.')
dialog = xbmcgui.Dialog()
pastafinal = dialog.browse(int(3), "Escolha pasta para as gravações", 'files')
selfAddon.setSetting('pastagravador',value=pastafinal)
return
xbmc.executebuiltin("ReplaceWindow(VideoFiles," + downloadPath + ")")
def iniciagravador(finalurl,siglacanal,name,directo):
print "A iniciar gravador 1/2"
if downloadPath=='':
xbmcgui.Dialog().ok('TV Portuguesa','Necessitas de introduzir a pasta onde vão ficar','as gravações. Escolhe uma pasta com algum espaço','livre disponível.')
dialog = xbmcgui.Dialog()
pastafinal = dialog.browse(int(3), "Escolha pasta para as gravações", 'files')
selfAddon.setSetting('pastagravador',value=pastafinal)
return
if directo==True:
if re.search('rtmp://',finalurl) or re.search('rtmpe://',finalurl):
#if re.search('rtmp://',finalurl):
finalurl=finalurl.replace('playPath=','-y ').replace('swfVfy=1','').replace('conn=','-C ').replace('live=true','-v').replace('swfUrl=','-W ').replace('pageUrl=','-p ').replace(' token=','-T ').replace('app=','-a ').replace(' ',' ').replace('timeout=','-m ')
verifica_so('-r ' + finalurl,name,siglacanal,directo)
else: xbmc.executebuiltin("XBMC.Notification(TV Portuguesa, Stream não gravável. Escolha outro.,'100000'," + tvporpath + art + "icon32-ver1.png)")
def verifica_so(args,nomecanal,siglacanal,directo):
print "A iniciar gravador 2/2"
mensagemprogresso.create('TV Portuguesa','A carregar gravador...')
#correrdump(args,nomecanal,'windows',siglacanal,directo)
if selfAddon.getSetting('rtmpdumpalternativo')=='':
if xbmc.getCondVisibility('system.platform.windows'): correrdump(args,nomecanal,'gravador-windows',siglacanal,directo)
elif xbmc.getCondVisibility('system.platform.osx'): correrdump(args,nomecanal,'gravador-mac86atv1',siglacanal,directo)
elif xbmc.getCondVisibility('system.platform.linux'):
if os.uname()[4] == "armv6l":
pasta=os.path.join(gravadorpath,'rpi')
basescript='#!/bin/sh\nexport LD_LIBRARY_PATH="%s"\n' % (pasta)
correrdump(args,nomecanal,'gravador-rpi',siglacanal,directo,script=basescript)
elif os.uname()[4] == "x86_64":
pasta=os.path.join(gravadorpath,'linux64')
basescript='#!/bin/sh\nexport LD_LIBRARY_PATH="%s"\n' % (pasta)
correrdump(args,nomecanal,'gravador-linux64',siglacanal,directo,script=basescript)
else:
pasta=os.path.join(gravadorpath,'linux86')
basescript='#!/bin/sh\nexport LD_LIBRARY_PATH="%s"\n' % (pasta)
correrdump(args,nomecanal,'gravador-linux86',siglacanal,directo,script=basescript)
else: correrdump(args,nomecanal,'alternativo',siglacanal,directo)
def correrdump(args,nomecanal,pathso,siglacanal,directo,script=False):
import subprocess
info=infocanal(siglacanal)
escolha=0 #### inicializador
mensagemprogresso.close()
if info!=False and directo!='listas': escolha=listadeprogramas(info) #### se ha programacao, mostra lista
if escolha==0:
if info!=False and directo!='listas': #### ha programacao
fimprograma=calculafinalprograma(info)
tituloprograma=' - '+ re.compile('<Title>(.+?)</Title>').findall(info)[0]
#nomecanal = nomecanal + tituloprograma
minutosrestantes=fimprograma / 60
opcao= xbmcgui.Dialog().yesno("TV Portuguesa", 'Faltam ' + str(minutosrestantes) + ' minutos para o fim do programa', "Deseja gravar o resto do programa ou", "definir um tempo de gravação?",'Definir tempo', 'Gravar restante')
if opcao==1:
if selfAddon.getSetting("acrescentogravacao") == "0": segundos=fimprograma
elif selfAddon.getSetting("acrescentogravacao") == "1": segundos=fimprograma+120
elif selfAddon.getSetting("acrescentogravacao") == "2": segundos=fimprograma+300
elif selfAddon.getSetting("acrescentogravacao") == "3": segundos=fimprograma+600
else: segundos=fimprograma + 120
minutos=segundos/60
else:
minutos = -1
while minutos < 1: minutos = int(xbmcgui.Dialog().numeric(0,"Num de minutos de gravacao"))
segundos=minutos*60
else:
minutos = -1
while minutos < 1: minutos = int(xbmcgui.Dialog().numeric(0,"Num de minutos de gravacao"))
segundos=minutos*60
nomecanal = limpar(re.sub('[^-a-zA-Z0-9_.()\\\/ ]+', '', nomecanal))
horaactual= horaportuguesa(False)
if pathso=='alternativo': caminhodump=selfAddon.getSetting("rtmpdumpalternativo")
else: caminhodump=os.path.join(gravadorpath,pathso)
if xbmc.getCondVisibility('system.platform.linux'):
st = os.stat(caminhodump)
os.chmod(caminhodump, st.st_mode | stat.S_IEXEC)
args=args.split(' ')
typeargs=[]
for types in args:
if len(types) != 2: typeargs.append('"' + types + '"')
else: typeargs.append(types)
args=' '.join(typeargs)
argumentos=args + ' -o "' + downloadPath + horaactual + ' - ' + nomecanal + '.flv" -B ' + str(segundos)
#argumentos=args + ' -o "' + downloadPath + horaactual + '.flv" -B ' + str(segundos)
if script:
conteudoscript=script + xbmc.translatePath(os.path.join(gravadorpath,pathso))+ ' $1 ' + argumentos
savefile('script.sh', conteudoscript ,pastafinal=gravadorpath)
caminhodump=xbmc.translatePath(os.path.join(gravadorpath,'script.sh'))
st = os.stat(caminhodump)
os.chmod(caminhodump, st.st_mode | stat.S_IEXEC)
try:
#proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if script:
proc = subprocess.Popen(caminhodump, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
#proc = subprocess.Popen(argumentos, executable=caminhodump + '.exe', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
cmd = '"%s" %s' % (caminhodump, argumentos)
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print "RTMPDump comecou a funcionar"
xbmc.executebuiltin("XBMC.Notification(TV Portuguesa, Gravação de "+str(minutos)+" minutos iniciou,'10000'," + tvporpath + "/resources/art/icon32-ver1.png)")
(stdout, stderr) = proc.communicate()
print "RTMPDump parou de funcionar"
stderr = normalize(stderr)
if u'Download complete' in stderr:
print 'stdout: ' + str(stdout)
print 'stderr: ' + str(stderr)
print "Download Completo!"
xbmc.executebuiltin("XBMC.Notification(TV Portuguesa, Gravação efectuada com sucesso,'10000'," + tvporpath + "/resources/art/icon32-ver1.png)")
else:
print 'stdout: ' + str(stdout)
print 'stderr: ' + str(stderr)
print "Download Falhou!"
xbmc.executebuiltin("XBMC.Notification(TV Portuguesa, Gravação falhou,'10000'," + tvporpath + "/resources/art/icon32-ver1.png)")
except Exception:
print ("Nao conseguiu abrir o programa")
xbmc.executebuiltin("XBMC.Notification(TV Portuguesa, Erro ao abrir programa de gravação,'10000'," + tvporpath + "/resources/art/icon32-ver1.png)")
(etype, value, traceback) = sys.exc_info()
print "Erro etype: " + str(etype)
print "Erro valor: " + str(value)
print "Erro traceback: " + str(traceback)
def infocanal(siglacanal):
if siglacanal=='SEM':
print "Canal sem programacao."
return False
try:
dia= horaportuguesa(True)
diaseguinte= horaportuguesa('diaseguinte')
url='http://services.sapo.pt/EPG/GetChannelListByDateInterval?channelSiglas='+siglacanal+'&startDate=' + dia +':01&endDate='+ diaseguinte + ':02'
link= clean(abrir_url(url))
return link
except:
print "Nao conseguiu capturar programacao."
return False
def listadeprogramas(link):
titles=[]
ligacao=[]
ref=int(0)
programas=re.compile('<Title>(.+?)</Title>.+?<StartTime>.+?-.+?-(.+?) (.+?):(.+?):.+?</StartTime>').findall(link)
for nomeprog,dia, horas,minutos in programas:
ref=ref+1
if dia==datetime.datetime.now().strftime('%d'): dia='Hoje'
else: dia='Amanhã'
if ref==2:
titles.append('')
titles.append('[COLOR red]A seguir: (não dá para gravar)[/COLOR]')
if ref!=1: titles.append(dia + ' ' + horas + ':' + minutos + ' - ' +nomeprog)
else: titles.append(dia + ' ' + horas + ':' + minutos + ' - ' +nomeprog)
ligacao.append('')
index = xbmcgui.Dialog().select('Escolha o programa a gravar', titles)
return index
def calculafinalprograma(link):
fim=re.compile('<EndTime>(.+?)-(.+?)-(.+?) (.+?):(.+?):.+?</EndTime>').findall(link)[0]
agora= horaportuguesa(False)
inicio=re.compile('(.+?)-(.+?)-(.+?) (.+?)-(.+?)-').findall(agora)[0]
start = datetime.datetime(year=int(inicio[0]), month=int(inicio[1]), day=int(inicio[2]), hour=int(inicio[3]), minute=int(inicio[4]))
end = datetime.datetime(year=int(fim[0]), month=int(fim[1]), day=int(fim[2]), hour=int(fim[3]), minute=int(fim[4]))
diff = end - start
segundos= (diff.microseconds + (diff.seconds + diff.days * 24 * 3600) * 10**6) / 10**6
return segundos | gpl-2.0 | 4,462,778,213,913,674,000 | 53.340426 | 270 | 0.624963 | false |
benmezger/dotfiles | dot_weechat/python/wee_slack.py | 2 | 224187 | # Copyright (c) 2014-2016 Ryan Huber <[email protected]>
# Copyright (c) 2015-2018 Tollef Fog Heen <[email protected]>
# Copyright (c) 2015-2020 Trygve Aaberge <[email protected]>
# Released under the MIT license.
from __future__ import print_function, unicode_literals
from collections import OrderedDict
from datetime import date, datetime, timedelta
from functools import partial, wraps
from io import StringIO
from itertools import chain, count, islice
import copy
import errno
import textwrap
import time
import json
import hashlib
import os
import re
import sys
import traceback
import collections
import ssl
import random
import socket
import string
# Prevent websocket from using numpy (it's an optional dependency). We do this
# because numpy causes python (and thus weechat) to crash when it's reloaded.
# See https://github.com/numpy/numpy/issues/11925
sys.modules["numpy"] = None
from websocket import ABNF, create_connection, WebSocketConnectionClosedException
try:
basestring # Python 2
unicode
str = unicode
except NameError: # Python 3
basestring = unicode = str
try:
from collections.abc import Mapping, Reversible, KeysView, ItemsView, ValuesView
except:
from collections import Mapping, KeysView, ItemsView, ValuesView
Reversible = object
try:
from urllib.parse import quote, urlencode
except ImportError:
from urllib import quote, urlencode
try:
from json import JSONDecodeError
except:
JSONDecodeError = ValueError
# hack to make tests possible.. better way?
try:
import weechat
except ImportError:
pass
SCRIPT_NAME = "slack"
SCRIPT_AUTHOR = "Ryan Huber <[email protected]>"
SCRIPT_VERSION = "2.7.0"
SCRIPT_LICENSE = "MIT"
SCRIPT_DESC = "Extends weechat for typing notification/search/etc on slack.com"
REPO_URL = "https://github.com/wee-slack/wee-slack"
TYPING_DURATION = 6
RECORD_DIR = "/tmp/weeslack-debug"
SLACK_API_TRANSLATOR = {
"channel": {
"history": "conversations.history",
"join": "conversations.join",
"leave": "conversations.leave",
"mark": "conversations.mark",
"info": "conversations.info",
},
"im": {
"history": "conversations.history",
"join": "conversations.open",
"leave": "conversations.close",
"mark": "conversations.mark",
},
"mpim": {
"history": "conversations.history",
"join": "conversations.open",
"leave": "conversations.close",
"mark": "conversations.mark",
"info": "conversations.info",
},
"group": {
"history": "conversations.history",
"join": "conversations.join",
"leave": "conversations.leave",
"mark": "conversations.mark",
"info": "conversations.info"
},
"private": {
"history": "conversations.history",
"join": "conversations.join",
"leave": "conversations.leave",
"mark": "conversations.mark",
"info": "conversations.info",
},
"shared": {
"history": "conversations.history",
"join": "conversations.join",
"leave": "conversations.leave",
"mark": "conversations.mark",
"info": "conversations.info",
},
"thread": {
"history": None,
"join": None,
"leave": None,
"mark": "subscriptions.thread.mark",
}
}
CONFIG_PREFIX = "plugins.var.python." + SCRIPT_NAME
###### Decorators have to be up here
def slack_buffer_or_ignore(f):
"""
Only run this function if we're in a slack buffer, else ignore
"""
@wraps(f)
def wrapper(data, current_buffer, *args, **kwargs):
if current_buffer not in EVENTROUTER.weechat_controller.buffers:
return w.WEECHAT_RC_OK
return f(data, current_buffer, *args, **kwargs)
return wrapper
def slack_buffer_required(f):
"""
Only run this function if we're in a slack buffer, else print error
"""
@wraps(f)
def wrapper(data, current_buffer, *args, **kwargs):
if current_buffer not in EVENTROUTER.weechat_controller.buffers:
command_name = f.__name__.replace('command_', '', 1)
w.prnt('', 'slack: command "{}" must be executed on slack buffer'.format(command_name))
return w.WEECHAT_RC_ERROR
return f(data, current_buffer, *args, **kwargs)
return wrapper
def utf8_decode(f):
"""
Decode all arguments from byte strings to unicode strings. Use this for
functions called from outside of this script, e.g. callbacks from weechat.
"""
@wraps(f)
def wrapper(*args, **kwargs):
return f(*decode_from_utf8(args), **decode_from_utf8(kwargs))
return wrapper
NICK_GROUP_HERE = "0|Here"
NICK_GROUP_AWAY = "1|Away"
NICK_GROUP_EXTERNAL = "2|External"
sslopt_ca_certs = {}
if hasattr(ssl, "get_default_verify_paths") and callable(ssl.get_default_verify_paths):
ssl_defaults = ssl.get_default_verify_paths()
if ssl_defaults.cafile is not None:
sslopt_ca_certs = {'ca_certs': ssl_defaults.cafile}
EMOJI = {}
EMOJI_WITH_SKIN_TONES_REVERSE = {}
###### Unicode handling
def encode_to_utf8(data):
if sys.version_info.major > 2:
return data
elif isinstance(data, unicode):
return data.encode('utf-8')
if isinstance(data, bytes):
return data
elif isinstance(data, collections.Mapping):
return type(data)(map(encode_to_utf8, data.items()))
elif isinstance(data, collections.Iterable):
return type(data)(map(encode_to_utf8, data))
else:
return data
def decode_from_utf8(data):
if sys.version_info.major > 2:
return data
elif isinstance(data, bytes):
return data.decode('utf-8')
if isinstance(data, unicode):
return data
elif isinstance(data, collections.Mapping):
return type(data)(map(decode_from_utf8, data.items()))
elif isinstance(data, collections.Iterable):
return type(data)(map(decode_from_utf8, data))
else:
return data
class WeechatWrapper(object):
def __init__(self, wrapped_class):
self.wrapped_class = wrapped_class
# Helper method used to encode/decode method calls.
def wrap_for_utf8(self, method):
def hooked(*args, **kwargs):
result = method(*encode_to_utf8(args), **encode_to_utf8(kwargs))
# Prevent wrapped_class from becoming unwrapped
if result == self.wrapped_class:
return self
return decode_from_utf8(result)
return hooked
# Encode and decode everything sent to/received from weechat. We use the
# unicode type internally in wee-slack, but has to send utf8 to weechat.
def __getattr__(self, attr):
orig_attr = self.wrapped_class.__getattribute__(attr)
if callable(orig_attr):
return self.wrap_for_utf8(orig_attr)
else:
return decode_from_utf8(orig_attr)
# Ensure all lines sent to weechat specifies a prefix. For lines after the
# first, we want to disable the prefix, which is done by specifying a space.
def prnt_date_tags(self, buffer, date, tags, message):
message = message.replace("\n", "\n \t")
return self.wrap_for_utf8(self.wrapped_class.prnt_date_tags)(buffer, date, tags, message)
class ProxyWrapper(object):
def __init__(self):
self.proxy_name = w.config_string(w.config_get('weechat.network.proxy_curl'))
self.proxy_string = ""
self.proxy_type = ""
self.proxy_address = ""
self.proxy_port = ""
self.proxy_user = ""
self.proxy_password = ""
self.has_proxy = False
if self.proxy_name:
self.proxy_string = "weechat.proxy.{}".format(self.proxy_name)
self.proxy_type = w.config_string(w.config_get("{}.type".format(self.proxy_string)))
if self.proxy_type == "http":
self.proxy_address = w.config_string(w.config_get("{}.address".format(self.proxy_string)))
self.proxy_port = w.config_integer(w.config_get("{}.port".format(self.proxy_string)))
self.proxy_user = w.config_string(w.config_get("{}.username".format(self.proxy_string)))
self.proxy_password = w.config_string(w.config_get("{}.password".format(self.proxy_string)))
self.has_proxy = True
else:
w.prnt("", "\nWarning: weechat.network.proxy_curl is set to {} type (name : {}, conf string : {}). Only HTTP proxy is supported.\n\n".format(self.proxy_type, self.proxy_name, self.proxy_string))
def curl(self):
if not self.has_proxy:
return ""
if self.proxy_user and self.proxy_password:
user = "{}:{}@".format(self.proxy_user, self.proxy_password)
else:
user = ""
if self.proxy_port:
port = ":{}".format(self.proxy_port)
else:
port = ""
return "-x{}{}{}".format(user, self.proxy_address, port)
class MappingReversible(Mapping, Reversible):
def keys(self):
return KeysViewReversible(self)
def items(self):
return ItemsViewReversible(self)
def values(self):
return ValuesViewReversible(self)
class KeysViewReversible(KeysView, Reversible):
def __reversed__(self):
return reversed(self._mapping)
class ItemsViewReversible(ItemsView, Reversible):
def __reversed__(self):
for key in reversed(self._mapping):
yield (key, self._mapping[key])
class ValuesViewReversible(ValuesView, Reversible):
def __reversed__(self):
for key in reversed(self._mapping):
yield self._mapping[key]
##### Helpers
def colorize_string(color, string, reset_color='reset'):
if color:
return w.color(color) + string + w.color(reset_color)
else:
return string
def print_error(message, buffer='', warning=False):
prefix = 'Warning' if warning else 'Error'
w.prnt(buffer, '{}{}: {}'.format(w.prefix('error'), prefix, message))
def print_message_not_found_error(msg_id):
if msg_id:
print_error("Invalid id given, must be an existing id or a number greater " +
"than 0 and less than the number of messages in the channel")
else:
print_error("No messages found in channel")
def token_for_print(token):
return '{}...{}'.format(token[:15], token[-10:])
def format_exc_tb():
return decode_from_utf8(traceback.format_exc())
def format_exc_only():
etype, value, _ = sys.exc_info()
return ''.join(decode_from_utf8(traceback.format_exception_only(etype, value)))
def get_localvar_type(slack_type):
if slack_type in ("im", "mpim"):
return "private"
else:
return "channel"
def get_nick_color(nick):
info_name_prefix = "irc_" if weechat_version < 0x1050000 else ""
return w.info_get(info_name_prefix + "nick_color_name", nick)
def get_thread_color(thread_id):
if config.color_thread_suffix == 'multiple':
return get_nick_color(thread_id)
else:
return config.color_thread_suffix
def sha1_hex(s):
return str(hashlib.sha1(s.encode('utf-8')).hexdigest())
def get_functions_with_prefix(prefix):
return {name[len(prefix):]: ref for name, ref in globals().items()
if name.startswith(prefix)}
def handle_socket_error(exception, team, caller_name):
if not (isinstance(exception, WebSocketConnectionClosedException) or
exception.errno in (errno.EPIPE, errno.ECONNRESET, errno.ETIMEDOUT)):
raise
w.prnt(team.channel_buffer,
'Lost connection to slack team {} (on {}), reconnecting.'.format(
team.domain, caller_name))
dbg('Socket failed on {} with exception:\n{}'.format(
caller_name, format_exc_tb()), level=5)
team.set_disconnected()
MESSAGE_ID_REGEX_STRING = r'(?P<msg_id>\d+|\$[0-9a-fA-F]{3,})'
REACTION_PREFIX_REGEX_STRING = r'{}?(?P<reaction_change>\+|-)'.format(MESSAGE_ID_REGEX_STRING)
EMOJI_CHAR_REGEX_STRING = '(?P<emoji_char>[\U00000080-\U0010ffff]+)'
EMOJI_NAME_REGEX_STRING = ':(?P<emoji_name>[a-z0-9_+-]+):'
EMOJI_CHAR_OR_NAME_REGEX_STRING = '({}|{})'.format(EMOJI_CHAR_REGEX_STRING, EMOJI_NAME_REGEX_STRING)
EMOJI_NAME_REGEX = re.compile(EMOJI_NAME_REGEX_STRING)
EMOJI_CHAR_OR_NAME_REGEX = re.compile(EMOJI_CHAR_OR_NAME_REGEX_STRING)
def regex_match_to_emoji(match, include_name=False):
emoji = match.group(1)
full_match = match.group()
char = EMOJI.get(emoji, full_match)
if include_name and char != full_match:
return '{} ({})'.format(char, full_match)
return char
def replace_string_with_emoji(text):
if config.render_emoji_as_string == 'both':
return EMOJI_NAME_REGEX.sub(
partial(regex_match_to_emoji, include_name=True),
text,
)
elif config.render_emoji_as_string:
return text
return EMOJI_NAME_REGEX.sub(regex_match_to_emoji, text)
def replace_emoji_with_string(text):
emoji = None
key = text
while emoji is None and len(key):
emoji = EMOJI_WITH_SKIN_TONES_REVERSE.get(key)
key = key[:-1]
return emoji or text
###### New central Event router
class EventRouter(object):
def __init__(self):
"""
complete
Eventrouter is the central hub we use to route:
1) incoming websocket data
2) outgoing http requests and incoming replies
3) local requests
It has a recorder that, when enabled, logs most events
to the location specified in RECORD_DIR.
"""
self.queue = []
self.slow_queue = []
self.slow_queue_timer = 0
self.teams = {}
self.subteams = {}
self.context = {}
self.weechat_controller = WeechatController(self)
self.previous_buffer = ""
self.reply_buffer = {}
self.cmds = get_functions_with_prefix("command_")
self.proc = get_functions_with_prefix("process_")
self.handlers = get_functions_with_prefix("handle_")
self.local_proc = get_functions_with_prefix("local_process_")
self.shutting_down = False
self.recording = False
self.recording_path = "/tmp"
self.handle_next_hook = None
self.handle_next_hook_interval = -1
def record(self):
"""
complete
Toggles the event recorder and creates a directory for data if enabled.
"""
self.recording = not self.recording
if self.recording:
if not os.path.exists(RECORD_DIR):
os.makedirs(RECORD_DIR)
def record_event(self, message_json, team, file_name_field, subdir=None):
"""
complete
Called each time you want to record an event.
message_json is a json in dict form
file_name_field is the json key whose value you want to be part of the file name
"""
now = time.time()
if team:
team_subdomain = team.subdomain
else:
team_json = message_json.get('team')
if team_json:
team_subdomain = team_json.get('domain')
else:
team_subdomain = 'unknown_team'
directory = "{}/{}".format(RECORD_DIR, team_subdomain)
if subdir:
directory = "{}/{}".format(directory, subdir)
if not os.path.exists(directory):
os.makedirs(directory)
mtype = message_json.get(file_name_field, 'unknown')
f = open('{}/{}-{}.json'.format(directory, now, mtype), 'w')
f.write("{}".format(json.dumps(message_json)))
f.close()
def store_context(self, data):
"""
A place to store data and vars needed by callback returns. We need this because
weechat's "callback_data" has a limited size and weechat will crash if you exceed
this size.
"""
identifier = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(40))
self.context[identifier] = data
dbg("stored context {} {} ".format(identifier, data.url))
return identifier
def retrieve_context(self, identifier):
"""
A place to retrieve data and vars needed by callback returns. We need this because
weechat's "callback_data" has a limited size and weechat will crash if you exceed
this size.
"""
return self.context.get(identifier)
def delete_context(self, identifier):
"""
Requests can span multiple requests, so we may need to delete this as a last step
"""
if identifier in self.context:
del self.context[identifier]
def shutdown(self):
"""
complete
This toggles shutdown mode. Shutdown mode tells us not to
talk to Slack anymore. Without this, typing /quit will trigger
a race with the buffer close callback and may result in you
leaving every slack channel.
"""
self.shutting_down = not self.shutting_down
def register_team(self, team):
"""
complete
Adds a team to the list of known teams for this EventRouter.
"""
if isinstance(team, SlackTeam):
self.teams[team.get_team_hash()] = team
else:
raise InvalidType(type(team))
def reconnect_if_disconnected(self):
for team in self.teams.values():
time_since_last_ping = time.time() - team.last_ping_time
time_since_last_pong = time.time() - team.last_pong_time
if team.connected and time_since_last_ping < 5 and time_since_last_pong > 30:
w.prnt(team.channel_buffer,
'Lost connection to slack team {} (no pong), reconnecting.'.format(
team.domain))
team.set_disconnected()
if not team.connected:
team.connect(reconnect=True)
dbg("reconnecting {}".format(team))
@utf8_decode
def receive_ws_callback(self, team_hash, fd):
"""
This is called by the global method of the same name.
It is triggered when we have incoming data on a websocket,
which needs to be read. Once it is read, we will ensure
the data is valid JSON, add metadata, and place it back
on the queue for processing as JSON.
"""
team = self.teams[team_hash]
while True:
try:
# Read the data from the websocket associated with this team.
opcode, data = team.ws.recv_data(control_frame=True)
except ssl.SSLWantReadError:
# No more data to read at this time.
return w.WEECHAT_RC_OK
except (WebSocketConnectionClosedException, socket.error) as e:
handle_socket_error(e, team, 'receive')
return w.WEECHAT_RC_OK
if opcode == ABNF.OPCODE_PONG:
team.last_pong_time = time.time()
return w.WEECHAT_RC_OK
elif opcode != ABNF.OPCODE_TEXT:
return w.WEECHAT_RC_OK
message_json = json.loads(data.decode('utf-8'))
if self.recording:
self.record_event(message_json, team, 'type', 'websocket')
message_json["wee_slack_metadata_team"] = team
self.receive(message_json)
return w.WEECHAT_RC_OK
@utf8_decode
def receive_httprequest_callback(self, data, command, return_code, out, err):
"""
complete
Receives the result of an http request we previously handed
off to weechat (weechat bundles libcurl). Weechat can fragment
replies, so it buffers them until the reply is complete.
It is then populated with metadata here so we can identify
where the request originated and route properly.
"""
request_metadata = self.retrieve_context(data)
dbg("RECEIVED CALLBACK with request of {} id of {} and code {} of length {}".format(request_metadata.request, request_metadata.response_id, return_code, len(out)))
if return_code == 0:
if len(out) > 0:
if request_metadata.response_id not in self.reply_buffer:
self.reply_buffer[request_metadata.response_id] = StringIO()
self.reply_buffer[request_metadata.response_id].write(out)
try:
j = json.loads(self.reply_buffer[request_metadata.response_id].getvalue())
except:
pass
# dbg("Incomplete json, awaiting more", True)
try:
j["wee_slack_process_method"] = request_metadata.request_normalized
if self.recording:
self.record_event(j, request_metadata.team, 'wee_slack_process_method', 'http')
j["wee_slack_request_metadata"] = request_metadata
self.reply_buffer.pop(request_metadata.response_id)
self.receive(j)
self.delete_context(data)
except:
dbg("HTTP REQUEST CALLBACK FAILED", True)
pass
# We got an empty reply and this is weird so just ditch it and retry
else:
dbg("length was zero, probably a bug..")
self.delete_context(data)
self.receive(request_metadata)
elif return_code == -1:
if request_metadata.response_id not in self.reply_buffer:
self.reply_buffer[request_metadata.response_id] = StringIO()
self.reply_buffer[request_metadata.response_id].write(out)
else:
self.reply_buffer.pop(request_metadata.response_id, None)
self.delete_context(data)
if request_metadata.request.startswith('rtm.'):
retry_text = ('retrying' if request_metadata.should_try() else
'will not retry after too many failed attempts')
w.prnt('', ('Failed connecting to slack team with token {}, {}. ' +
'If this persists, try increasing slack_timeout. Error (code {}): {}')
.format(token_for_print(request_metadata.token), retry_text, return_code, err))
dbg('rtm.start failed with return_code {}. stack:\n{}'
.format(return_code, ''.join(traceback.format_stack())), level=5)
self.receive(request_metadata)
return w.WEECHAT_RC_OK
def receive(self, dataobj, slow=False):
"""
Receives a raw object and places it on the queue for
processing. Object must be known to handle_next or
be JSON.
"""
dbg("RECEIVED FROM QUEUE")
if slow:
self.slow_queue.append(dataobj)
else:
self.queue.append(dataobj)
def handle_next(self):
"""
complete
Main handler of the EventRouter. This is called repeatedly
via callback to drain events from the queue. It also attaches
useful metadata and context to events as they are processed.
"""
wanted_interval = 100
if len(self.slow_queue) > 0 or len(self.queue) > 0:
wanted_interval = 10
if self.handle_next_hook is None or wanted_interval != self.handle_next_hook_interval:
if self.handle_next_hook:
w.unhook(self.handle_next_hook)
self.handle_next_hook = w.hook_timer(wanted_interval, 0, 0, "handle_next", "")
self.handle_next_hook_interval = wanted_interval
if len(self.slow_queue) > 0 and ((self.slow_queue_timer + 1) < time.time()):
dbg("from slow queue", 0)
self.queue.append(self.slow_queue.pop())
self.slow_queue_timer = time.time()
if len(self.queue) > 0:
j = self.queue.pop(0)
# Reply is a special case of a json reply from websocket.
kwargs = {}
if isinstance(j, SlackRequest):
if j.should_try():
if j.retry_ready():
local_process_async_slack_api_request(j, self)
else:
self.slow_queue.append(j)
else:
dbg("Max retries for Slackrequest")
else:
if "reply_to" in j:
dbg("SET FROM REPLY")
function_name = "reply"
elif "type" in j:
dbg("SET FROM type")
function_name = j["type"]
elif "wee_slack_process_method" in j:
dbg("SET FROM META")
function_name = j["wee_slack_process_method"]
else:
dbg("SET FROM NADA")
function_name = "unknown"
request = j.get("wee_slack_request_metadata")
if request:
team = request.team
channel = request.channel
metadata = request.metadata
else:
team = j.get("wee_slack_metadata_team")
channel = None
metadata = {}
if team:
if "channel" in j:
channel_id = j["channel"]["id"] if type(j["channel"]) == dict else j["channel"]
channel = team.channels.get(channel_id, channel)
if "user" in j:
user_id = j["user"]["id"] if type(j["user"]) == dict else j["user"]
metadata['user'] = team.users.get(user_id)
dbg("running {}".format(function_name))
if function_name.startswith("local_") and function_name in self.local_proc:
self.local_proc[function_name](j, self, team, channel, metadata)
elif function_name in self.proc:
self.proc[function_name](j, self, team, channel, metadata)
elif function_name in self.handlers:
self.handlers[function_name](j, self, team, channel, metadata)
else:
dbg("Callback not implemented for event: {}".format(function_name))
def handle_next(data, remaining_calls):
try:
EVENTROUTER.handle_next()
except:
if config.debug_mode:
traceback.print_exc()
else:
pass
return w.WEECHAT_RC_OK
class WeechatController(object):
"""
Encapsulates our interaction with weechat
"""
def __init__(self, eventrouter):
self.eventrouter = eventrouter
self.buffers = {}
self.previous_buffer = None
def iter_buffers(self):
for b in self.buffers:
yield (b, self.buffers[b])
def register_buffer(self, buffer_ptr, channel):
"""
complete
Adds a weechat buffer to the list of handled buffers for this EventRouter
"""
if isinstance(buffer_ptr, basestring):
self.buffers[buffer_ptr] = channel
else:
raise InvalidType(type(buffer_ptr))
def unregister_buffer(self, buffer_ptr, update_remote=False, close_buffer=False):
"""
complete
Adds a weechat buffer to the list of handled buffers for this EventRouter
"""
channel = self.buffers.get(buffer_ptr)
if channel:
channel.destroy_buffer(update_remote)
del self.buffers[buffer_ptr]
if close_buffer:
w.buffer_close(buffer_ptr)
def get_channel_from_buffer_ptr(self, buffer_ptr):
return self.buffers.get(buffer_ptr)
def get_all(self, buffer_ptr):
return self.buffers
def get_previous_buffer_ptr(self):
return self.previous_buffer
def set_previous_buffer(self, data):
self.previous_buffer = data
###### New Local Processors
def local_process_async_slack_api_request(request, event_router):
"""
complete
Sends an API request to Slack. You'll need to give this a well formed SlackRequest object.
DEBUGGING!!! The context here cannot be very large. Weechat will crash.
"""
if not event_router.shutting_down:
weechat_request = 'url:{}'.format(request.request_string())
weechat_request += '&nonce={}'.format(''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(4)))
params = {'useragent': 'wee_slack {}'.format(SCRIPT_VERSION)}
request.tried()
context = event_router.store_context(request)
# TODO: let flashcode know about this bug - i have to 'clear' the hashtable or retry requests fail
w.hook_process_hashtable('url:', params, config.slack_timeout, "", context)
w.hook_process_hashtable(weechat_request, params, config.slack_timeout, "receive_httprequest_callback", context)
###### New Callbacks
@utf8_decode
def ws_ping_cb(data, remaining_calls):
for team in EVENTROUTER.teams.values():
if team.ws and team.connected:
try:
team.ws.ping()
team.last_ping_time = time.time()
except (WebSocketConnectionClosedException, socket.error) as e:
handle_socket_error(e, team, 'ping')
return w.WEECHAT_RC_OK
@utf8_decode
def reconnect_callback(*args):
EVENTROUTER.reconnect_if_disconnected()
return w.WEECHAT_RC_OK
@utf8_decode
def buffer_renamed_cb(data, signal, current_buffer):
channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if isinstance(channel, SlackChannelCommon) and not channel.buffer_rename_in_progress:
if w.buffer_get_string(channel.channel_buffer, "old_full_name"):
channel.label_full_drop_prefix = True
channel.label_full = w.buffer_get_string(channel.channel_buffer, "name")
else:
channel.label_short_drop_prefix = True
channel.label_short = w.buffer_get_string(channel.channel_buffer, "short_name")
channel.rename()
return w.WEECHAT_RC_OK
@utf8_decode
def buffer_closing_callback(data, signal, current_buffer):
"""
Receives a callback from weechat when a buffer is being closed.
"""
EVENTROUTER.weechat_controller.unregister_buffer(current_buffer, True, False)
return w.WEECHAT_RC_OK
@utf8_decode
def buffer_input_callback(signal, buffer_ptr, data):
"""
incomplete
Handles everything a user types in the input bar. In our case
this includes add/remove reactions, modifying messages, and
sending messages.
"""
if weechat_version < 0x2090000:
data = data.replace('\r', '\n')
eventrouter = eval(signal)
channel = eventrouter.weechat_controller.get_channel_from_buffer_ptr(buffer_ptr)
if not channel:
return w.WEECHAT_RC_ERROR
reaction = re.match(r"{}{}\s*$".format(REACTION_PREFIX_REGEX_STRING, EMOJI_CHAR_OR_NAME_REGEX_STRING), data)
substitute = re.match("{}?s/".format(MESSAGE_ID_REGEX_STRING), data)
if reaction:
emoji = reaction.group("emoji_char") or reaction.group("emoji_name")
if reaction.group("reaction_change") == "+":
channel.send_add_reaction(reaction.group("msg_id"), emoji)
elif reaction.group("reaction_change") == "-":
channel.send_remove_reaction(reaction.group("msg_id"), emoji)
elif substitute:
try:
old, new, flags = re.split(r'(?<!\\)/', data)[1:]
except ValueError:
print_error('Incomplete regex for changing a message, '
'it should be in the form s/old text/new text/')
else:
# Replacement string in re.sub() is a string, not a regex, so get
# rid of escapes.
new = new.replace(r'\/', '/')
old = old.replace(r'\/', '/')
channel.edit_nth_previous_message(substitute.group("msg_id"), old, new, flags)
else:
if data.startswith(('//', ' ')):
data = data[1:]
channel.send_message(data)
# this is probably wrong channel.mark_read(update_remote=True, force=True)
return w.WEECHAT_RC_OK
# Workaround for supporting multiline messages. It intercepts before the input
# callback is called, as this is called with the whole message, while it is
# normally split on newline before being sent to buffer_input_callback.
# WeeChat only splits on newline, so we replace it with carriage return, and
# replace it back in buffer_input_callback.
def input_text_for_buffer_cb(data, modifier, current_buffer, string):
if current_buffer not in EVENTROUTER.weechat_controller.buffers:
return string
return re.sub('\r?\n', '\r', decode_from_utf8(string))
@utf8_decode
def buffer_switch_callback(data, signal, current_buffer):
"""
Every time we change channels in weechat, we call this to:
1) set read marker 2) determine if we have already populated
channel history data 3) set presence to active
"""
prev_buffer_ptr = EVENTROUTER.weechat_controller.get_previous_buffer_ptr()
# this is to see if we need to gray out things in the buffer list
prev = EVENTROUTER.weechat_controller.get_channel_from_buffer_ptr(prev_buffer_ptr)
if prev:
prev.mark_read()
new_channel = EVENTROUTER.weechat_controller.get_channel_from_buffer_ptr(current_buffer)
if new_channel:
if not new_channel.got_history or new_channel.history_needs_update:
new_channel.get_history()
set_own_presence_active(new_channel.team)
EVENTROUTER.weechat_controller.set_previous_buffer(current_buffer)
return w.WEECHAT_RC_OK
@utf8_decode
def buffer_list_update_callback(data, somecount):
"""
A simple timer-based callback that will update the buffer list
if needed. We only do this max 1x per second, as otherwise it
uses a lot of cpu for minimal changes. We use buffer short names
to indicate typing via "#channel" <-> ">channel" and
user presence via " name" <-> "+name".
"""
for buf in EVENTROUTER.weechat_controller.buffers.values():
buf.refresh()
return w.WEECHAT_RC_OK
def quit_notification_callback(data, signal, args):
stop_talking_to_slack()
return w.WEECHAT_RC_OK
@utf8_decode
def typing_notification_cb(data, signal, current_buffer):
msg = w.buffer_get_string(current_buffer, "input")
if len(msg) > 8 and msg[0] != "/":
global typing_timer
now = time.time()
if typing_timer + 4 < now:
channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if channel and channel.type != "thread":
identifier = channel.identifier
request = {"type": "typing", "channel": identifier}
channel.team.send_to_websocket(request, expect_reply=False)
typing_timer = now
return w.WEECHAT_RC_OK
@utf8_decode
def typing_update_cb(data, remaining_calls):
w.bar_item_update("slack_typing_notice")
return w.WEECHAT_RC_OK
@utf8_decode
def slack_never_away_cb(data, remaining_calls):
if config.never_away:
for team in EVENTROUTER.teams.values():
set_own_presence_active(team)
return w.WEECHAT_RC_OK
@utf8_decode
def typing_bar_item_cb(data, item, current_window, current_buffer, extra_info):
"""
Privides a bar item indicating who is typing in the current channel AND
why is typing a DM to you globally.
"""
typers = []
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
# first look for people typing in this channel
if current_channel:
# this try is mostly becuase server buffers don't implement is_someone_typing
try:
if current_channel.type != 'im' and current_channel.is_someone_typing():
typers += current_channel.get_typing_list()
except:
pass
# here is where we notify you that someone is typing in DM
# regardless of which buffer you are in currently
for team in EVENTROUTER.teams.values():
for channel in team.channels.values():
if channel.type == "im":
if channel.is_someone_typing():
typers.append("D/" + channel.name)
pass
typing = ", ".join(typers)
if typing != "":
typing = colorize_string(config.color_typing_notice, "typing: " + typing)
return typing
@utf8_decode
def away_bar_item_cb(data, item, current_window, current_buffer, extra_info):
channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if not channel:
return ''
if channel.team.is_user_present(channel.team.myidentifier):
return ''
else:
away_color = w.config_string(w.config_get('weechat.color.item_away'))
if channel.team.my_manual_presence == 'away':
return colorize_string(away_color, 'manual away')
else:
return colorize_string(away_color, 'auto away')
@utf8_decode
def channel_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds all channels on all teams to completion list
"""
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
should_include_channel = lambda channel: channel.active and channel.type in ['channel', 'group', 'private', 'shared']
other_teams = [team for team in EVENTROUTER.teams.values() if not current_channel or team != current_channel.team]
for team in other_teams:
for channel in team.channels.values():
if should_include_channel(channel):
w.hook_completion_list_add(completion, channel.name, 0, w.WEECHAT_LIST_POS_SORT)
if current_channel:
for channel in sorted(current_channel.team.channels.values(), key=lambda channel: channel.name, reverse=True):
if should_include_channel(channel):
w.hook_completion_list_add(completion, channel.name, 0, w.WEECHAT_LIST_POS_BEGINNING)
if should_include_channel(current_channel):
w.hook_completion_list_add(completion, current_channel.name, 0, w.WEECHAT_LIST_POS_BEGINNING)
return w.WEECHAT_RC_OK
@utf8_decode
def dm_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds all dms/mpdms on all teams to completion list
"""
for team in EVENTROUTER.teams.values():
for channel in team.channels.values():
if channel.active and channel.type in ['im', 'mpim']:
w.hook_completion_list_add(completion, channel.name, 0, w.WEECHAT_LIST_POS_SORT)
return w.WEECHAT_RC_OK
@utf8_decode
def nick_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds all @-prefixed nicks to completion list
"""
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if current_channel is None or current_channel.members is None:
return w.WEECHAT_RC_OK
base_command = w.hook_completion_get_string(completion, "base_command")
if base_command in ['invite', 'msg', 'query', 'whois']:
members = current_channel.team.members
else:
members = current_channel.members
for member in members:
user = current_channel.team.users.get(member)
if user and not user.deleted:
w.hook_completion_list_add(completion, user.name, 1, w.WEECHAT_LIST_POS_SORT)
w.hook_completion_list_add(completion, "@" + user.name, 1, w.WEECHAT_LIST_POS_SORT)
return w.WEECHAT_RC_OK
@utf8_decode
def emoji_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds all :-prefixed emoji to completion list
"""
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if current_channel is None:
return w.WEECHAT_RC_OK
base_word = w.hook_completion_get_string(completion, "base_word")
reaction = re.match(REACTION_PREFIX_REGEX_STRING + ":", base_word)
prefix = reaction.group(0) if reaction else ":"
for emoji in current_channel.team.emoji_completions:
w.hook_completion_list_add(completion, prefix + emoji + ":", 0, w.WEECHAT_LIST_POS_SORT)
return w.WEECHAT_RC_OK
@utf8_decode
def thread_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds all $-prefixed thread ids to completion list
"""
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if current_channel is None or not hasattr(current_channel, 'hashed_messages'):
return w.WEECHAT_RC_OK
threads = (x for x in current_channel.hashed_messages.items() if isinstance(x[0], str))
for thread_id, message_ts in sorted(threads, key=lambda item: item[1]):
message = current_channel.messages.get(message_ts)
if message and message.number_of_replies():
w.hook_completion_list_add(completion, "$" + thread_id, 0, w.WEECHAT_LIST_POS_BEGINNING)
return w.WEECHAT_RC_OK
@utf8_decode
def topic_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds topic for current channel to completion list
"""
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if current_channel is None:
return w.WEECHAT_RC_OK
topic = current_channel.render_topic()
channel_names = [channel.name for channel in current_channel.team.channels.values()]
if topic.split(' ', 1)[0] in channel_names:
topic = '{} {}'.format(current_channel.name, topic)
w.hook_completion_list_add(completion, topic, 0, w.WEECHAT_LIST_POS_SORT)
return w.WEECHAT_RC_OK
@utf8_decode
def usergroups_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds all @-prefixed usergroups to completion list
"""
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if current_channel is None:
return w.WEECHAT_RC_OK
subteam_handles = [subteam.handle for subteam in current_channel.team.subteams.values()]
for group in subteam_handles + ["@channel", "@everyone", "@here"]:
w.hook_completion_list_add(completion, group, 1, w.WEECHAT_LIST_POS_SORT)
return w.WEECHAT_RC_OK
@utf8_decode
def complete_next_cb(data, current_buffer, command):
"""Extract current word, if it is equal to a nick, prefix it with @ and
rely on nick_completion_cb adding the @-prefixed versions to the
completion lists, then let Weechat's internal completion do its
thing
"""
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if not hasattr(current_channel, 'members') or current_channel is None or current_channel.members is None:
return w.WEECHAT_RC_OK
line_input = w.buffer_get_string(current_buffer, "input")
current_pos = w.buffer_get_integer(current_buffer, "input_pos") - 1
input_length = w.buffer_get_integer(current_buffer, "input_length")
word_start = 0
word_end = input_length
# If we're on a non-word, look left for something to complete
while current_pos >= 0 and line_input[current_pos] != '@' and not line_input[current_pos].isalnum():
current_pos = current_pos - 1
if current_pos < 0:
current_pos = 0
for l in range(current_pos, 0, -1):
if line_input[l] != '@' and not line_input[l].isalnum():
word_start = l + 1
break
for l in range(current_pos, input_length):
if not line_input[l].isalnum():
word_end = l
break
word = line_input[word_start:word_end]
for member in current_channel.members:
user = current_channel.team.users.get(member)
if user and user.name == word:
# Here, we cheat. Insert a @ in front and rely in the @
# nicks being in the completion list
w.buffer_set(current_buffer, "input", line_input[:word_start] + "@" + line_input[word_start:])
w.buffer_set(current_buffer, "input_pos", str(w.buffer_get_integer(current_buffer, "input_pos") + 1))
return w.WEECHAT_RC_OK_EAT
return w.WEECHAT_RC_OK
def script_unloaded():
stop_talking_to_slack()
return w.WEECHAT_RC_OK
def stop_talking_to_slack():
"""
complete
Prevents a race condition where quitting closes buffers
which triggers leaving the channel because of how close
buffer is handled
"""
if 'EVENTROUTER' in globals():
EVENTROUTER.shutdown()
for team in EVENTROUTER.teams.values():
team.ws.shutdown()
return w.WEECHAT_RC_OK
##### New Classes
class SlackRequest(object):
"""
Encapsulates a Slack api request. Valuable as an object that we can add to the queue and/or retry.
makes a SHA of the requst url and current time so we can re-tag this on the way back through.
"""
def __init__(self, team, request, post_data=None, channel=None, metadata=None, retries=3, token=None):
if team is None and token is None:
raise ValueError("Both team and token can't be None")
self.team = team
self.request = request
self.post_data = post_data if post_data else {}
self.channel = channel
self.metadata = metadata if metadata else {}
self.retries = retries
self.token = token if token else team.token
self.tries = 0
self.start_time = time.time()
self.request_normalized = re.sub(r'\W+', '', request)
self.domain = 'api.slack.com'
self.post_data['token'] = self.token
self.url = 'https://{}/api/{}?{}'.format(self.domain, self.request, urlencode(encode_to_utf8(self.post_data)))
self.params = {'useragent': 'wee_slack {}'.format(SCRIPT_VERSION)}
self.response_id = sha1_hex('{}{}'.format(self.url, self.start_time))
def __repr__(self):
return ("SlackRequest(team={}, request='{}', post_data={}, retries={}, token='{}', "
"tries={}, start_time={})").format(self.team, self.request, self.post_data,
self.retries, token_for_print(self.token), self.tries, self.start_time)
def request_string(self):
return "{}".format(self.url)
def tried(self):
self.tries += 1
self.response_id = sha1_hex("{}{}".format(self.url, time.time()))
def should_try(self):
return self.tries < self.retries
def retry_ready(self):
return (self.start_time + (self.tries**2)) < time.time()
class SlackSubteam(object):
"""
Represents a slack group or subteam
"""
def __init__(self, originating_team_id, is_member, **kwargs):
self.handle = '@{}'.format(kwargs['handle'])
self.identifier = kwargs['id']
self.name = kwargs['name']
self.description = kwargs.get('description')
self.team_id = originating_team_id
self.is_member = is_member
def __repr__(self):
return "Name:{} Identifier:{}".format(self.name, self.identifier)
def __eq__(self, compare_str):
return compare_str == self.identifier
class SlackTeam(object):
"""
incomplete
Team object under which users and channels live.. Does lots.
"""
def __init__(self, eventrouter, token, team_hash, websocket_url, team_info, subteams, nick, myidentifier, my_manual_presence, users, bots, channels, **kwargs):
self.slack_api_translator = copy.deepcopy(SLACK_API_TRANSLATOR)
self.identifier = team_info["id"]
self.type = "team"
self.active = True
self.team_hash = team_hash
self.ws_url = websocket_url
self.connected = False
self.connecting_rtm = False
self.connecting_ws = False
self.ws = None
self.ws_counter = 0
self.ws_replies = {}
self.last_ping_time = 0
self.last_pong_time = time.time()
self.eventrouter = eventrouter
self.token = token
self.team = self
self.subteams = subteams
self.team_info = team_info
self.subdomain = team_info["domain"]
self.domain = self.subdomain + ".slack.com"
self.set_name()
self.nick = nick
self.myidentifier = myidentifier
self.my_manual_presence = my_manual_presence
try:
if self.channels:
for c in channels.keys():
if not self.channels.get(c):
self.channels[c] = channels[c]
except:
self.channels = channels
self.users = users
self.bots = bots
self.channel_buffer = None
self.got_history = True
self.history_needs_update = False
self.create_buffer()
self.set_muted_channels(kwargs.get('muted_channels', ""))
self.set_highlight_words(kwargs.get('highlight_words', ""))
for c in self.channels.keys():
channels[c].set_related_server(self)
channels[c].check_should_open()
# Last step is to make sure my nickname is the set color
self.users[self.myidentifier].force_color(w.config_string(w.config_get('weechat.color.chat_nick_self')))
# This highlight step must happen after we have set related server
self.load_emoji_completions()
def __repr__(self):
return "domain={} nick={}".format(self.subdomain, self.nick)
def __eq__(self, compare_str):
return compare_str == self.token or compare_str == self.domain or compare_str == self.subdomain
@property
def members(self):
return self.users.keys()
def load_emoji_completions(self):
self.emoji_completions = list(EMOJI.keys())
if self.emoji_completions:
s = SlackRequest(self, "emoji.list")
self.eventrouter.receive(s)
def add_channel(self, channel):
self.channels[channel["id"]] = channel
channel.set_related_server(self)
def generate_usergroup_map(self):
return {s.handle: s.identifier for s in self.subteams.values()}
def set_name(self):
alias = config.server_aliases.get(self.subdomain)
if alias:
self.name = alias
elif config.short_buffer_names:
self.name = self.subdomain
else:
self.name = "slack.{}".format(self.subdomain)
def create_buffer(self):
if not self.channel_buffer:
self.channel_buffer = w.buffer_new(self.name, "buffer_input_callback", "EVENTROUTER", "", "")
self.eventrouter.weechat_controller.register_buffer(self.channel_buffer, self)
w.buffer_set(self.channel_buffer, "input_multiline", "1")
w.buffer_set(self.channel_buffer, "localvar_set_type", 'server')
w.buffer_set(self.channel_buffer, "localvar_set_slack_type", self.type)
w.buffer_set(self.channel_buffer, "localvar_set_nick", self.nick)
w.buffer_set(self.channel_buffer, "localvar_set_server", self.name)
self.buffer_merge()
def buffer_merge(self, config_value=None):
if not config_value:
config_value = w.config_string(w.config_get('irc.look.server_buffer'))
if config_value == 'merge_with_core':
w.buffer_merge(self.channel_buffer, w.buffer_search_main())
else:
w.buffer_unmerge(self.channel_buffer, 0)
def destroy_buffer(self, update_remote):
pass
def set_muted_channels(self, muted_str):
self.muted_channels = {x for x in muted_str.split(',') if x}
for channel in self.channels.values():
channel.set_highlights()
channel.rename()
def set_highlight_words(self, highlight_str):
self.highlight_words = {x for x in highlight_str.split(',') if x}
for channel in self.channels.values():
channel.set_highlights()
def formatted_name(self):
return self.domain
def buffer_prnt(self, data, message=False):
tag_name = "team_message" if message else "team_info"
ts = SlackTS()
w.prnt_date_tags(self.channel_buffer, ts.major, tag(ts, tag_name), data)
def send_message(self, message, subtype=None, request_dict_ext={}):
w.prnt("", "ERROR: Sending a message in the team buffer is not supported")
def find_channel_by_members(self, members, channel_type=None):
for channel in self.channels.values():
if channel.members == members and (
channel_type is None or channel.type == channel_type):
return channel
def get_channel_map(self):
return {v.name: k for k, v in self.channels.items()}
def get_username_map(self):
return {v.name: k for k, v in self.users.items()}
def get_team_hash(self):
return self.team_hash
@staticmethod
def generate_team_hash(team_id, subdomain):
return str(sha1_hex("{}{}".format(team_id, subdomain)))
def refresh(self):
pass
def is_user_present(self, user_id):
user = self.users.get(user_id)
if user and user.presence == 'active':
return True
else:
return False
def mark_read(self, ts=None, update_remote=True, force=False):
pass
def connect(self, reconnect=False):
if not self.connected and not self.connecting_ws:
if self.ws_url:
self.connecting_ws = True
try:
# only http proxy is currently supported
proxy = ProxyWrapper()
timeout = config.slack_timeout / 1000
if proxy.has_proxy == True:
ws = create_connection(self.ws_url, timeout=timeout, sslopt=sslopt_ca_certs, http_proxy_host=proxy.proxy_address, http_proxy_port=proxy.proxy_port, http_proxy_auth=(proxy.proxy_user, proxy.proxy_password))
else:
ws = create_connection(self.ws_url, timeout=timeout, sslopt=sslopt_ca_certs)
self.hook = w.hook_fd(ws.sock.fileno(), 1, 0, 0, "receive_ws_callback", self.get_team_hash())
ws.sock.setblocking(0)
except:
w.prnt(self.channel_buffer,
'Failed connecting to slack team {}, retrying.'.format(self.domain))
dbg('connect failed with exception:\n{}'.format(format_exc_tb()), level=5)
return False
finally:
self.connecting_ws = False
self.ws = ws
self.set_reconnect_url(None)
self.set_connected()
elif not self.connecting_rtm:
# The fast reconnect failed, so start over-ish
for chan in self.channels:
self.channels[chan].history_needs_update = True
s = initiate_connection(self.token, retries=999, team=self, reconnect=reconnect)
self.eventrouter.receive(s)
self.connecting_rtm = True
def set_connected(self):
self.connected = True
self.last_pong_time = time.time()
self.buffer_prnt('Connected to Slack team {} ({}) with username {}'.format(
self.team_info["name"], self.domain, self.nick))
dbg("connected to {}".format(self.domain))
if config.background_load_all_history:
for channel in self.channels.values():
if channel.channel_buffer:
channel.get_history(slow_queue=True)
else:
current_channel = self.eventrouter.weechat_controller.buffers.get(w.current_buffer())
if isinstance(current_channel, SlackChannelCommon) and current_channel.team == self:
current_channel.get_history(slow_queue=True)
def set_disconnected(self):
w.unhook(self.hook)
self.connected = False
def set_reconnect_url(self, url):
self.ws_url = url
def next_ws_transaction_id(self):
self.ws_counter += 1
return self.ws_counter
def send_to_websocket(self, data, expect_reply=True):
data["id"] = self.next_ws_transaction_id()
message = json.dumps(data)
try:
if expect_reply:
self.ws_replies[data["id"]] = data
self.ws.send(encode_to_utf8(message))
dbg("Sent {}...".format(message[:100]))
except (WebSocketConnectionClosedException, socket.error) as e:
handle_socket_error(e, self, 'send')
def update_member_presence(self, user, presence):
user.presence = presence
for c in self.channels:
c = self.channels[c]
if user.id in c.members:
c.buffer_name_needs_update = True
c.update_nicklist(user.id)
def subscribe_users_presence(self):
# FIXME: There is a limitation in the API to the size of the
# json we can send.
# We should try to be smarter to fetch the users whom we want to
# subscribe to.
users = list(self.users.keys())[:750]
if self.myidentifier not in users:
users.append(self.myidentifier)
self.send_to_websocket({
"type": "presence_sub",
"ids": users,
}, expect_reply=False)
class SlackChannelCommon(object):
def __init__(self):
self.label_full_drop_prefix = False
self.label_full = None
self.label_short_drop_prefix = False
self.label_short = None
self.buffer_rename_in_progress = False
def prnt_message(self, message, history_message=False, no_log=False, force_render=False):
text = self.render(message, force_render)
thread_channel = isinstance(self, SlackThreadChannel)
if message.subtype == "join":
tagset = "join"
prefix = w.prefix("join").strip()
elif message.subtype == "leave":
tagset = "leave"
prefix = w.prefix("quit").strip()
elif message.subtype == "topic":
tagset = "topic"
prefix = w.prefix("network").strip()
else:
channel_type = self.parent_channel.type if thread_channel else self.type
if channel_type in ["im", "mpim"]:
tagset = "dm"
else:
tagset = "channel"
if message.subtype == "me_message":
prefix = w.prefix("action").rstrip()
else:
prefix = message.sender
extra_tags = None
if message.subtype == "thread_broadcast":
extra_tags = [message.subtype]
elif type(message) == SlackThreadMessage and not thread_channel:
if config.thread_messages_in_channel:
extra_tags = [message.subtype]
else:
return
self.buffer_prnt(prefix, text, message.ts, tagset=tagset,
tag_nick=message.sender_plain, history_message=history_message,
no_log=no_log, extra_tags=extra_tags)
def print_getting_history(self):
if self.channel_buffer:
ts = SlackTS()
w.buffer_set(self.channel_buffer, "print_hooks_enabled", "0")
w.prnt_date_tags(self.channel_buffer, ts.major,
tag(ts, backlog=True, no_log=True), '\tgetting channel history...')
w.buffer_set(self.channel_buffer, "print_hooks_enabled", "1")
def reprint_messages(self, history_message=False, no_log=True, force_render=False):
if self.channel_buffer:
w.buffer_clear(self.channel_buffer)
for message in self.visible_messages.values():
self.prnt_message(message, history_message, no_log, force_render)
if (self.identifier in self.pending_history_requests or
config.thread_messages_in_channel and self.pending_history_requests):
self.print_getting_history()
def send_message(self, message, subtype=None, request_dict_ext={}):
if subtype == 'me_message':
message = linkify_text(message, self.team, escape_characters=False)
s = SlackRequest(self.team, "chat.meMessage", {"channel": self.identifier, "text": message}, channel=self)
self.eventrouter.receive(s)
else:
message = linkify_text(message, self.team)
request = {"type": "message", "channel": self.identifier,
"text": message, "user": self.team.myidentifier}
request.update(request_dict_ext)
self.team.send_to_websocket(request)
def send_add_reaction(self, msg_id, reaction):
self.send_change_reaction("reactions.add", msg_id, reaction)
def send_remove_reaction(self, msg_id, reaction):
self.send_change_reaction("reactions.remove", msg_id, reaction)
def send_change_reaction(self, method, msg_id, reaction):
message = self.message_from_hash_or_index(msg_id)
if message is None:
print_message_not_found_error(msg_id)
return
reaction_name = replace_emoji_with_string(reaction)
if method == "toggle":
reaction = message.get_reaction(reaction_name)
if reaction and self.team.myidentifier in reaction["users"]:
method = "reactions.remove"
else:
method = "reactions.add"
data = {"channel": self.identifier, "timestamp": message.ts, "name": reaction_name}
s = SlackRequest(self.team, method, data, channel=self, metadata={'reaction': reaction})
self.eventrouter.receive(s)
def edit_nth_previous_message(self, msg_id, old, new, flags):
message_filter = lambda message: message.user_identifier == self.team.myidentifier
message = self.message_from_hash_or_index(msg_id, message_filter)
if message is None:
if msg_id:
print_error("Invalid id given, must be an existing id to one of your " +
"messages or a number greater than 0 and less than the number " +
"of your messages in the channel")
else:
print_error("You don't have any messages in this channel")
return
if new == "" and old == "":
post_data = {"channel": self.identifier, "ts": message.ts}
s = SlackRequest(self.team, "chat.delete", post_data, channel=self)
self.eventrouter.receive(s)
else:
num_replace = 0 if 'g' in flags else 1
f = re.UNICODE
f |= re.IGNORECASE if 'i' in flags else 0
f |= re.MULTILINE if 'm' in flags else 0
f |= re.DOTALL if 's' in flags else 0
old_message_text = message.message_json["text"]
new_message_text = re.sub(old, new, old_message_text, num_replace, f)
if new_message_text != old_message_text:
post_data = {"channel": self.identifier, "ts": message.ts, "text": new_message_text}
s = SlackRequest(self.team, "chat.update", post_data, channel=self)
self.eventrouter.receive(s)
else:
print_error("The regex didn't match any part of the message")
def message_from_hash(self, ts_hash, message_filter=None):
if not ts_hash:
return
ts_hash_without_prefix = ts_hash[1:] if ts_hash[0] == "$" else ts_hash
ts = self.hashed_messages.get(ts_hash_without_prefix)
message = self.messages.get(ts)
if message is None:
return
if message_filter and not message_filter(message):
return
return message
def message_from_index(self, index, message_filter=None, reverse=True):
for ts in (reversed(self.visible_messages) if reverse else self.visible_messages):
message = self.messages[ts]
if not message_filter or message_filter(message):
index -= 1
if index == 0:
return message
def message_from_hash_or_index(self, hash_or_index=None, message_filter=None, reverse=True):
message = self.message_from_hash(hash_or_index, message_filter)
if not message:
if not hash_or_index:
index = 1
elif hash_or_index.isdigit():
index = int(hash_or_index)
else:
return
message = self.message_from_index(index, message_filter, reverse)
return message
def change_message(self, ts, message_json=None, text=None):
ts = SlackTS(ts)
m = self.messages.get(ts)
if not m:
return
if message_json:
m.message_json.update(message_json)
if text:
m.change_text(text)
if (type(m) == SlackMessage or m.subtype == "thread_broadcast"
or config.thread_messages_in_channel):
new_text = self.render(m, force=True)
modify_buffer_line(self.channel_buffer, ts, new_text)
if type(m) == SlackThreadMessage or m.thread_channel is not None:
thread_channel = (m.parent_message.thread_channel
if isinstance(m, SlackThreadMessage) else m.thread_channel)
if thread_channel and thread_channel.active:
new_text = thread_channel.render(m, force=True)
modify_buffer_line(thread_channel.channel_buffer, ts, new_text)
def mark_read(self, ts=None, update_remote=True, force=False, post_data={}):
if self.new_messages or force:
if self.channel_buffer:
w.buffer_set(self.channel_buffer, "unread", "")
w.buffer_set(self.channel_buffer, "hotlist", "-1")
if not ts:
ts = next(reversed(self.messages), SlackTS())
if ts > self.last_read:
self.last_read = SlackTS(ts)
if update_remote:
args = {"channel": self.identifier, "ts": ts}
args.update(post_data)
mark_method = self.team.slack_api_translator[self.type].get("mark")
if mark_method:
s = SlackRequest(self.team, mark_method, args, channel=self)
self.eventrouter.receive(s)
self.new_messages = False
def destroy_buffer(self, update_remote):
self.channel_buffer = None
self.got_history = False
self.active = False
class SlackChannel(SlackChannelCommon):
"""
Represents an individual slack channel.
"""
def __init__(self, eventrouter, channel_type="channel", **kwargs):
super(SlackChannel, self).__init__()
self.active = False
for key, value in kwargs.items():
setattr(self, key, value)
self.eventrouter = eventrouter
self.team = kwargs.get('team')
self.identifier = kwargs["id"]
self.type = channel_type
self.set_name(kwargs["name"])
self.slack_purpose = kwargs.get("purpose", {"value": ""})
self.topic = kwargs.get("topic", {"value": ""})
self.last_read = SlackTS(kwargs.get("last_read", 0))
self.channel_buffer = None
self.got_history = False
self.history_needs_update = False
self.pending_history_requests = set()
self.messages = OrderedDict()
self.visible_messages = SlackChannelVisibleMessages(self)
self.hashed_messages = SlackChannelHashedMessages(self)
self.thread_channels = {}
self.new_messages = False
self.typing = {}
# short name relates to the localvar we change for typing indication
self.set_members(kwargs.get('members', []))
self.unread_count_display = 0
self.last_line_from = None
self.buffer_name_needs_update = False
self.last_refresh_typing = False
def __eq__(self, compare_str):
if compare_str == self.slack_name or compare_str == self.formatted_name() or compare_str == self.formatted_name(style="long_default"):
return True
else:
return False
def __repr__(self):
return "Name:{} Identifier:{}".format(self.name, self.identifier)
@property
def muted(self):
return self.identifier in self.team.muted_channels
def set_name(self, slack_name):
self.slack_name = slack_name
self.name = self.formatted_name()
self.buffer_name_needs_update = True
def refresh(self):
typing = self.is_someone_typing()
if self.buffer_name_needs_update or typing != self.last_refresh_typing:
self.last_refresh_typing = typing
self.buffer_name_needs_update = False
self.rename(typing)
def rename(self, typing=None):
if self.channel_buffer:
self.buffer_rename_in_progress = True
if typing is None:
typing = self.is_someone_typing()
present = self.team.is_user_present(self.user) if self.type == "im" else None
name = self.formatted_name("long_default", typing, present)
short_name = self.formatted_name("sidebar", typing, present)
w.buffer_set(self.channel_buffer, "name", name)
w.buffer_set(self.channel_buffer, "short_name", short_name)
self.buffer_rename_in_progress = False
def set_members(self, members):
self.members = set(members)
self.update_nicklist()
def set_unread_count_display(self, count):
self.unread_count_display = count
self.new_messages = bool(self.unread_count_display)
if self.muted and config.muted_channels_activity != "all":
return
for c in range(self.unread_count_display):
if self.type in ["im", "mpim"]:
w.buffer_set(self.channel_buffer, "hotlist", "2")
else:
w.buffer_set(self.channel_buffer, "hotlist", "1")
def formatted_name(self, style="default", typing=False, present=None):
show_typing = typing and not self.muted and config.channel_name_typing_indicator
if style == "sidebar" and show_typing:
prepend = ">"
elif self.type == "group" or self.type == "private":
prepend = config.group_name_prefix
elif self.type == "shared":
prepend = config.shared_name_prefix
elif self.type == "im":
if style != "sidebar":
prepend = ""
elif present and config.show_buflist_presence:
prepend = "+"
elif config.channel_name_typing_indicator or config.show_buflist_presence:
prepend = " "
else:
prepend = ""
elif self.type == "mpim":
if style == "sidebar":
prepend = "@"
else:
prepend = ""
else:
prepend = "#"
name = self.label_full or self.slack_name
if style == "sidebar":
name = self.label_short or name
if self.label_short_drop_prefix:
if show_typing:
name = prepend + name[1:]
elif self.type == "im" and present and config.show_buflist_presence and name[0] == " ":
name = prepend + name[1:]
else:
name = prepend + name
if self.muted:
sidebar_color = config.color_buflist_muted_channels
elif self.type == "im" and config.colorize_private_chats:
sidebar_color = self.color_name
else:
sidebar_color = ""
return colorize_string(sidebar_color, name)
elif style == "long_default":
if self.label_full_drop_prefix:
return name
else:
return "{}.{}{}".format(self.team.name, prepend, name)
else:
if self.label_full_drop_prefix:
return name
else:
return prepend + name
def render_topic(self, fallback_to_purpose=False):
topic = self.topic['value']
if not topic and fallback_to_purpose:
topic = self.slack_purpose['value']
return unhtmlescape(unfurl_refs(topic))
def set_topic(self, value=None):
if value is not None:
self.topic = {"value": value}
if self.channel_buffer:
topic = self.render_topic(fallback_to_purpose=True)
w.buffer_set(self.channel_buffer, "title", topic)
def update_from_message_json(self, message_json):
for key, value in message_json.items():
setattr(self, key, value)
def open(self, update_remote=True):
if update_remote:
join_method = self.team.slack_api_translator[self.type].get("join")
if join_method:
s = SlackRequest(self.team, join_method, {"channel": self.identifier}, channel=self)
self.eventrouter.receive(s)
self.create_buffer()
self.active = True
self.get_history()
def check_should_open(self, force=False):
if hasattr(self, "is_archived") and self.is_archived:
return
if force:
self.create_buffer()
return
# Only check is_member if is_open is not set, because in some cases
# (e.g. group DMs), is_member should be ignored in favor of is_open.
is_open = self.is_open if hasattr(self, "is_open") else self.is_member
if is_open or self.unread_count_display:
self.create_buffer()
def set_related_server(self, team):
self.team = team
def highlights(self):
nick_highlights = {'@' + self.team.nick, self.team.myidentifier}
subteam_highlights = {subteam.handle for subteam in self.team.subteams.values()
if subteam.is_member}
highlights = nick_highlights | subteam_highlights | self.team.highlight_words
if self.muted and config.muted_channels_activity == "personal_highlights":
return highlights
else:
return highlights | {"@channel", "@everyone", "@group", "@here"}
def set_highlights(self):
# highlight my own name and any set highlights
if self.channel_buffer:
h_str = ",".join(self.highlights())
w.buffer_set(self.channel_buffer, "highlight_words", h_str)
if self.muted and config.muted_channels_activity != "all":
notify_level = "0" if config.muted_channels_activity == "none" else "1"
w.buffer_set(self.channel_buffer, "notify", notify_level)
else:
buffer_full_name = w.buffer_get_string(self.channel_buffer, "full_name")
w.command(self.channel_buffer, "/mute /unset weechat.notify.{}".format(buffer_full_name))
if self.muted and config.muted_channels_activity == "none":
w.buffer_set(self.channel_buffer, "highlight_tags_restrict", "highlight_force")
else:
w.buffer_set(self.channel_buffer, "highlight_tags_restrict", "")
for thread_channel in self.thread_channels.values():
thread_channel.set_highlights(h_str)
def create_buffer(self):
"""
Creates the weechat buffer where the channel magic happens.
"""
if not self.channel_buffer:
self.active = True
self.channel_buffer = w.buffer_new(self.formatted_name(style="long_default"), "buffer_input_callback", "EVENTROUTER", "", "")
self.eventrouter.weechat_controller.register_buffer(self.channel_buffer, self)
w.buffer_set(self.channel_buffer, "input_multiline", "1")
w.buffer_set(self.channel_buffer, "localvar_set_type", get_localvar_type(self.type))
w.buffer_set(self.channel_buffer, "localvar_set_slack_type", self.type)
w.buffer_set(self.channel_buffer, "localvar_set_channel", self.formatted_name())
w.buffer_set(self.channel_buffer, "localvar_set_nick", self.team.nick)
self.buffer_rename_in_progress = True
w.buffer_set(self.channel_buffer, "short_name", self.formatted_name(style="sidebar"))
self.buffer_rename_in_progress = False
self.set_highlights()
self.set_topic()
if self.channel_buffer:
w.buffer_set(self.channel_buffer, "localvar_set_server", self.team.name)
self.update_nicklist()
info_method = self.team.slack_api_translator[self.type].get("info")
if info_method:
s = SlackRequest(self.team, info_method, {"channel": self.identifier}, channel=self)
self.eventrouter.receive(s)
if self.type == "im":
join_method = self.team.slack_api_translator[self.type].get("join")
if join_method:
s = SlackRequest(self.team, join_method, {"users": self.user, "return_im": True}, channel=self)
self.eventrouter.receive(s)
def destroy_buffer(self, update_remote):
super(SlackChannel, self).destroy_buffer(update_remote)
self.messages = OrderedDict()
if update_remote and not self.eventrouter.shutting_down:
s = SlackRequest(self.team, self.team.slack_api_translator[self.type]["leave"],
{"channel": self.identifier}, channel=self)
self.eventrouter.receive(s)
def buffer_prnt(self, nick, text, timestamp, tagset, tag_nick=None, history_message=False, no_log=False, extra_tags=None):
data = "{}\t{}".format(format_nick(nick, self.last_line_from), text)
self.last_line_from = nick
ts = SlackTS(timestamp)
# without this, DMs won't open automatically
if not self.channel_buffer and ts > self.last_read:
self.open(update_remote=False)
if self.channel_buffer:
# backlog messages - we will update the read marker as we print these
backlog = ts <= self.last_read
if not backlog:
self.new_messages = True
no_log = no_log or history_message and backlog
self_msg = tag_nick == self.team.nick
tags = tag(ts, tagset, user=tag_nick, self_msg=self_msg, backlog=backlog, no_log=no_log, extra_tags=extra_tags)
if (config.unhide_buffers_with_activity
and not self.is_visible() and not self.muted):
w.buffer_set(self.channel_buffer, "hidden", "0")
if no_log:
w.buffer_set(self.channel_buffer, "print_hooks_enabled", "0")
w.prnt_date_tags(self.channel_buffer, ts.major, tags, data)
if no_log:
w.buffer_set(self.channel_buffer, "print_hooks_enabled", "1")
if backlog or self_msg:
self.mark_read(ts, update_remote=False, force=True)
def store_message(self, message_to_store):
if not self.active:
return
old_message = self.messages.get(message_to_store.ts)
if old_message and old_message.submessages and not message_to_store.submessages:
message_to_store.submessages = old_message.submessages
self.messages[message_to_store.ts] = message_to_store
self.messages = OrderedDict(sorted(self.messages.items()))
max_history = w.config_integer(w.config_get("weechat.history.max_buffer_lines_number"))
messages_to_check = islice(self.messages.items(),
max(0, len(self.messages) - max_history))
messages_to_delete = []
for (ts, message) in messages_to_check:
if ts == message_to_store.ts:
pass
elif isinstance(message, SlackThreadMessage):
thread_channel = self.thread_channels.get(message.thread_ts)
if thread_channel is None or not thread_channel.active:
messages_to_delete.append(ts)
elif message.number_of_replies():
if ((message.thread_channel is None or not message.thread_channel.active) and
not any(submessage in self.messages for submessage in message.submessages)):
messages_to_delete.append(ts)
else:
messages_to_delete.append(ts)
for ts in messages_to_delete:
message_hash = self.hashed_messages.get(ts)
if message_hash:
del self.hashed_messages[ts]
del self.hashed_messages[message_hash]
del self.messages[ts]
def is_visible(self):
return w.buffer_get_integer(self.channel_buffer, "hidden") == 0
def get_history(self, slow_queue=False, full=False, no_log=False):
if self.identifier in self.pending_history_requests:
return
self.print_getting_history()
self.pending_history_requests.add(self.identifier)
post_data = {"channel": self.identifier, "count": config.history_fetch_count}
if self.got_history and self.messages and not full:
post_data["oldest"] = next(reversed(self.messages))
s = SlackRequest(self.team, self.team.slack_api_translator[self.type]["history"],
post_data, channel=self, metadata={"slow_queue": slow_queue, "no_log": no_log})
self.eventrouter.receive(s, slow_queue)
self.got_history = True
self.history_needs_update = False
def get_thread_history(self, thread_ts, slow_queue=False, no_log=False):
if thread_ts in self.pending_history_requests:
return
if config.thread_messages_in_channel:
self.print_getting_history()
thread_channel = self.thread_channels.get(thread_ts)
if thread_channel and thread_channel.active:
thread_channel.print_getting_history()
self.pending_history_requests.add(thread_ts)
post_data = {"channel": self.identifier, "ts": thread_ts,
"limit": config.history_fetch_count}
s = SlackRequest(self.team, "conversations.replies",
post_data, channel=self,
metadata={"thread_ts": thread_ts, "no_log": no_log})
self.eventrouter.receive(s, slow_queue)
# Typing related
def set_typing(self, user):
if self.channel_buffer and self.is_visible():
self.typing[user.name] = time.time()
self.buffer_name_needs_update = True
def is_someone_typing(self):
"""
Walks through dict of typing folks in a channel and fast
returns if any of them is actively typing. If none are,
nulls the dict and returns false.
"""
typing_expire_time = time.time() - TYPING_DURATION
for timestamp in self.typing.values():
if timestamp > typing_expire_time:
return True
if self.typing:
self.typing = {}
return False
def get_typing_list(self):
"""
Returns the names of everyone in the channel who is currently typing.
"""
typing_expire_time = time.time() - TYPING_DURATION
typing = []
for user, timestamp in self.typing.items():
if timestamp > typing_expire_time:
typing.append(user)
else:
del self.typing[user]
return typing
def user_joined(self, user_id):
# ugly hack - for some reason this gets turned into a list
self.members = set(self.members)
self.members.add(user_id)
self.update_nicklist(user_id)
def user_left(self, user_id):
self.members.discard(user_id)
self.update_nicklist(user_id)
def update_nicklist(self, user=None):
if not self.channel_buffer:
return
if self.type not in ["channel", "group", "mpim", "private", "shared"]:
return
w.buffer_set(self.channel_buffer, "nicklist", "1")
# create nicklists for the current channel if they don't exist
# if they do, use the existing pointer
here = w.nicklist_search_group(self.channel_buffer, '', NICK_GROUP_HERE)
if not here:
here = w.nicklist_add_group(self.channel_buffer, '', NICK_GROUP_HERE, "weechat.color.nicklist_group", 1)
afk = w.nicklist_search_group(self.channel_buffer, '', NICK_GROUP_AWAY)
if not afk:
afk = w.nicklist_add_group(self.channel_buffer, '', NICK_GROUP_AWAY, "weechat.color.nicklist_group", 1)
# Add External nicklist group only for shared channels
if self.type == 'shared':
external = w.nicklist_search_group(self.channel_buffer, '', NICK_GROUP_EXTERNAL)
if not external:
external = w.nicklist_add_group(self.channel_buffer, '', NICK_GROUP_EXTERNAL, 'weechat.color.nicklist_group', 2)
if user and len(self.members) < 1000:
user = self.team.users.get(user)
# External users that have left shared channels won't exist
if not user or user.deleted:
return
nick = w.nicklist_search_nick(self.channel_buffer, "", user.name)
# since this is a change just remove it regardless of where it is
w.nicklist_remove_nick(self.channel_buffer, nick)
# now add it back in to whichever..
nick_group = afk
if user.is_external:
nick_group = external
elif self.team.is_user_present(user.identifier):
nick_group = here
if user.identifier in self.members:
w.nicklist_add_nick(self.channel_buffer, nick_group, user.name, user.color_name, "", "", 1)
# if we didn't get a user, build a complete list. this is expensive.
else:
if len(self.members) < 1000:
try:
for user in self.members:
user = self.team.users.get(user)
if user.deleted:
continue
nick_group = afk
if user.is_external:
nick_group = external
elif self.team.is_user_present(user.identifier):
nick_group = here
w.nicklist_add_nick(self.channel_buffer, nick_group, user.name, user.color_name, "", "", 1)
except:
dbg("DEBUG: {} {} {}".format(self.identifier, self.name, format_exc_only()))
else:
w.nicklist_remove_all(self.channel_buffer)
for fn in ["1| too", "2| many", "3| users", "4| to", "5| show"]:
w.nicklist_add_group(self.channel_buffer, '', fn, w.color('white'), 1)
def render(self, message, force=False):
text = message.render(force)
if isinstance(message, SlackThreadMessage):
thread_hash = self.hashed_messages[message.thread_ts]
hash_str = colorize_string(
get_thread_color(str(thread_hash)), '[{}]'.format(thread_hash))
return '{} {}'.format(hash_str, text)
return text
class SlackChannelVisibleMessages(MappingReversible):
"""
Class with a reversible mapping interface (like a read-only OrderedDict)
which doesn't include the messages older than first_ts_to_display.
"""
def __init__(self, channel):
self.channel = channel
self.first_ts_to_display = SlackTS(0)
def __getitem__(self, key):
if key < self.first_ts_to_display:
raise KeyError(key)
return self.channel.messages[key]
def _is_visible(self, ts):
if ts < self.first_ts_to_display:
return False
message = self.get(ts)
if (type(message) == SlackThreadMessage and message.subtype != "thread_broadcast" and
not config.thread_messages_in_channel):
return False
return True
def __iter__(self):
for ts in self.channel.messages:
if self._is_visible(ts):
yield ts
def __len__(self):
i = 0
for _ in self:
i += 1
return i
def __reversed__(self):
for ts in reversed(self.channel.messages):
if self._is_visible(ts):
yield ts
class SlackChannelHashedMessages(dict):
def __init__(self, channel):
self.channel = channel
def __missing__(self, key):
if not isinstance(key, SlackTS):
raise KeyError(key)
hash_len = 3
full_hash = sha1_hex(str(key))
short_hash = full_hash[:hash_len]
while any(x.startswith(short_hash) for x in self if isinstance(x, str)):
hash_len += 1
short_hash = full_hash[:hash_len]
if short_hash[:-1] in self:
ts_with_same_hash = self.pop(short_hash[:-1])
other_full_hash = sha1_hex(str(ts_with_same_hash))
other_short_hash = other_full_hash[:hash_len]
while short_hash == other_short_hash:
hash_len += 1
short_hash = full_hash[:hash_len]
other_short_hash = other_full_hash[:hash_len]
self[other_short_hash] = ts_with_same_hash
self[ts_with_same_hash] = other_short_hash
other_message = self.channel.messages.get(ts_with_same_hash)
if other_message:
self.channel.change_message(other_message.ts)
if other_message.thread_channel:
other_message.thread_channel.rename()
for thread_message in other_message.submessages:
self.channel.change_message(thread_message)
self[short_hash] = key
self[key] = short_hash
return self[key]
class SlackDMChannel(SlackChannel):
"""
Subclass of a normal channel for person-to-person communication, which
has some important differences.
"""
def __init__(self, eventrouter, users, **kwargs):
dmuser = kwargs["user"]
kwargs["name"] = users[dmuser].name if dmuser in users else dmuser
super(SlackDMChannel, self).__init__(eventrouter, "im", **kwargs)
self.update_color()
self.members = {self.user}
if dmuser in users:
self.set_topic(create_user_status_string(users[dmuser].profile))
def set_related_server(self, team):
super(SlackDMChannel, self).set_related_server(team)
if self.user not in self.team.users:
s = SlackRequest(self.team, 'users.info', {'user': self.user}, channel=self)
self.eventrouter.receive(s)
def create_buffer(self):
if not self.channel_buffer:
super(SlackDMChannel, self).create_buffer()
w.buffer_set(self.channel_buffer, "localvar_set_type", 'private')
def update_color(self):
if config.colorize_private_chats:
self.color_name = get_nick_color(self.name)
else:
self.color_name = ""
def open(self, update_remote=True):
self.create_buffer()
self.get_history()
info_method = self.team.slack_api_translator[self.type].get("info")
if info_method:
s = SlackRequest(self.team, info_method, {"name": self.identifier}, channel=self)
self.eventrouter.receive(s)
if update_remote:
join_method = self.team.slack_api_translator[self.type].get("join")
if join_method:
s = SlackRequest(self.team, join_method, {"users": self.user, "return_im": True}, channel=self)
self.eventrouter.receive(s)
class SlackGroupChannel(SlackChannel):
"""
A group channel is a private discussion group.
"""
def __init__(self, eventrouter, channel_type="group", **kwargs):
super(SlackGroupChannel, self).__init__(eventrouter, channel_type, **kwargs)
class SlackPrivateChannel(SlackGroupChannel):
"""
A private channel is a private discussion group. At the time of writing, it
differs from group channels in that group channels are channels initially
created as private, while private channels are public channels which are
later converted to private.
"""
def __init__(self, eventrouter, **kwargs):
super(SlackPrivateChannel, self).__init__(eventrouter, "private", **kwargs)
def get_history(self, slow_queue=False, full=False, no_log=False):
# Fetch members since they aren't included in rtm.start
s = SlackRequest(self.team, 'conversations.members', {'channel': self.identifier}, channel=self)
self.eventrouter.receive(s)
super(SlackPrivateChannel, self).get_history(slow_queue, full, no_log)
class SlackMPDMChannel(SlackChannel):
"""
An MPDM channel is a special instance of a 'group' channel.
We change the name to look less terrible in weechat.
"""
def __init__(self, eventrouter, team_users, myidentifier, **kwargs):
kwargs["name"] = ','.join(sorted(
getattr(team_users.get(user_id), 'name', user_id)
for user_id in kwargs["members"]
if user_id != myidentifier
))
super(SlackMPDMChannel, self).__init__(eventrouter, "mpim", **kwargs)
def open(self, update_remote=True):
self.create_buffer()
self.active = True
self.get_history()
info_method = self.team.slack_api_translator[self.type].get("info")
if info_method:
s = SlackRequest(self.team, info_method, {"channel": self.identifier}, channel=self)
self.eventrouter.receive(s)
if update_remote:
join_method = self.team.slack_api_translator[self.type].get("join")
if join_method:
s = SlackRequest(self.team, join_method, {'users': ','.join(self.members)}, channel=self)
self.eventrouter.receive(s)
class SlackSharedChannel(SlackChannel):
def __init__(self, eventrouter, **kwargs):
super(SlackSharedChannel, self).__init__(eventrouter, "shared", **kwargs)
def get_history(self, slow_queue=False, full=False, no_log=False):
# Get info for external users in the channel
for user in self.members - set(self.team.users.keys()):
s = SlackRequest(self.team, 'users.info', {'user': user}, channel=self)
self.eventrouter.receive(s)
# Fetch members since they aren't included in rtm.start
s = SlackRequest(self.team, 'conversations.members', {'channel': self.identifier}, channel=self)
self.eventrouter.receive(s)
super(SlackSharedChannel, self).get_history(slow_queue, full, no_log)
class SlackThreadChannel(SlackChannelCommon):
"""
A thread channel is a virtual channel. We don't inherit from
SlackChannel, because most of how it operates will be different.
"""
def __init__(self, eventrouter, parent_channel, thread_ts):
super(SlackThreadChannel, self).__init__()
self.active = False
self.eventrouter = eventrouter
self.parent_channel = parent_channel
self.thread_ts = thread_ts
self.messages = SlackThreadChannelMessages(self)
self.channel_buffer = None
self.type = "thread"
self.got_history = False
self.history_needs_update = False
self.team = self.parent_channel.team
self.last_line_from = None
self.new_messages = False
self.buffer_name_needs_update = False
@property
def members(self):
return self.parent_channel.members
@property
def parent_message(self):
return self.parent_channel.messages[self.thread_ts]
@property
def hashed_messages(self):
return self.parent_channel.hashed_messages
@property
def last_read(self):
return self.parent_message.last_read
@last_read.setter
def last_read(self, ts):
self.parent_message.last_read = ts
@property
def identifier(self):
return self.parent_channel.identifier
@property
def visible_messages(self):
return self.messages
@property
def muted(self):
return self.parent_channel.muted
@property
def pending_history_requests(self):
if self.thread_ts in self.parent_channel.pending_history_requests:
return {self.identifier, self.thread_ts}
else:
return set()
def formatted_name(self, style="default"):
name = self.label_full or self.parent_message.hash
if style == "sidebar":
name = self.label_short or name
if self.label_short_drop_prefix:
return name
else:
indent_expr = w.config_string(w.config_get("buflist.format.indent"))
# Only indent with space if slack_type isn't mentioned in the indent option
indent = "" if "slack_type" in indent_expr else " "
return "{}${}".format(indent, name)
elif style == "long_default":
if self.label_full_drop_prefix:
return name
else:
channel_name = self.parent_channel.formatted_name(style="long_default")
return "{}.{}".format(channel_name, name)
else:
if self.label_full_drop_prefix:
return name
else:
channel_name = self.parent_channel.formatted_name()
return "{}.{}".format(channel_name, name)
def mark_read(self, ts=None, update_remote=True, force=False, post_data={}):
if not self.parent_message.subscribed:
return
args = {"thread_ts": self.thread_ts}
args.update(post_data)
super(SlackThreadChannel, self).mark_read(ts=ts, update_remote=update_remote, force=force, post_data=args)
def buffer_prnt(self, nick, text, timestamp, tagset, tag_nick=None, history_message=False, no_log=False, extra_tags=None):
data = "{}\t{}".format(format_nick(nick, self.last_line_from), text)
self.last_line_from = nick
ts = SlackTS(timestamp)
if self.channel_buffer:
# backlog messages - we will update the read marker as we print these
backlog = ts <= self.last_read
if not backlog:
self.new_messages = True
no_log = no_log or history_message and backlog
self_msg = tag_nick == self.team.nick
tags = tag(ts, tagset, user=tag_nick, self_msg=self_msg, backlog=backlog, no_log=no_log, extra_tags=extra_tags)
if no_log:
w.buffer_set(self.channel_buffer, "print_hooks_enabled", "0")
w.prnt_date_tags(self.channel_buffer, ts.major, tags, data)
if no_log:
w.buffer_set(self.channel_buffer, "print_hooks_enabled", "1")
if backlog or self_msg:
self.mark_read(ts, update_remote=False, force=True)
def get_history(self, slow_queue=False, full=False, no_log=False):
self.got_history = True
self.history_needs_update = False
any_msg_is_none = any(message is None for message in self.messages.values())
if not any_msg_is_none:
self.reprint_messages(history_message=True, no_log=no_log)
if (full or any_msg_is_none or
len(self.parent_message.submessages) < self.parent_message.number_of_replies()):
self.parent_channel.get_thread_history(self.thread_ts, slow_queue, no_log)
def send_message(self, message, subtype=None, request_dict_ext={}):
if subtype == 'me_message':
w.prnt("", "ERROR: /me is not supported in threads")
return w.WEECHAT_RC_ERROR
request = {"thread_ts": str(self.thread_ts)}
request.update(request_dict_ext)
super(SlackThreadChannel, self).send_message(message, subtype, request)
def open(self, update_remote=True):
self.create_buffer()
self.active = True
self.get_history()
def refresh(self):
if self.buffer_name_needs_update:
self.buffer_name_needs_update = False
self.rename()
def rename(self):
if self.channel_buffer:
self.buffer_rename_in_progress = True
w.buffer_set(self.channel_buffer, "name", self.formatted_name(style="long_default"))
w.buffer_set(self.channel_buffer, "short_name", self.formatted_name(style="sidebar"))
self.buffer_rename_in_progress = False
def set_highlights(self, highlight_string=None):
if self.channel_buffer:
if highlight_string is None:
highlight_string = ",".join(self.parent_channel.highlights())
w.buffer_set(self.channel_buffer, "highlight_words", highlight_string)
def create_buffer(self):
"""
Creates the weechat buffer where the thread magic happens.
"""
if not self.channel_buffer:
self.channel_buffer = w.buffer_new(self.formatted_name(style="long_default"), "buffer_input_callback", "EVENTROUTER", "", "")
self.eventrouter.weechat_controller.register_buffer(self.channel_buffer, self)
w.buffer_set(self.channel_buffer, "input_multiline", "1")
w.buffer_set(self.channel_buffer, "localvar_set_type", get_localvar_type(self.parent_channel.type))
w.buffer_set(self.channel_buffer, "localvar_set_slack_type", self.type)
w.buffer_set(self.channel_buffer, "localvar_set_nick", self.team.nick)
w.buffer_set(self.channel_buffer, "localvar_set_channel", self.formatted_name())
w.buffer_set(self.channel_buffer, "localvar_set_server", self.team.name)
self.buffer_rename_in_progress = True
w.buffer_set(self.channel_buffer, "short_name", self.formatted_name(style="sidebar"))
self.buffer_rename_in_progress = False
self.set_highlights()
time_format = w.config_string(w.config_get("weechat.look.buffer_time_format"))
parent_time = time.localtime(SlackTS(self.thread_ts).major)
topic = '{} {} | {}'.format(time.strftime(time_format, parent_time),
self.parent_message.sender, self.render(self.parent_message))
w.buffer_set(self.channel_buffer, "title", topic)
def destroy_buffer(self, update_remote):
super(SlackThreadChannel, self).destroy_buffer(update_remote)
if update_remote and not self.eventrouter.shutting_down:
self.mark_read()
def render(self, message, force=False):
return message.render(force)
class SlackThreadChannelMessages(MappingReversible):
"""
Class with a reversible mapping interface (like a read-only OrderedDict)
which looks up messages using the parent channel and parent message.
"""
def __init__(self, thread_channel):
self.thread_channel = thread_channel
@property
def _parent_message(self):
return self.thread_channel.parent_message
def __getitem__(self, key):
if key != self._parent_message.ts and key not in self._parent_message.submessages:
raise KeyError(key)
return self.thread_channel.parent_channel.messages[key]
def __iter__(self):
yield self._parent_message.ts
for ts in self._parent_message.submessages:
yield ts
def __len__(self):
return 1 + len(self._parent_message.submessages)
def __reversed__(self):
for ts in reversed(self._parent_message.submessages):
yield ts
yield self._parent_message.ts
class SlackUser(object):
"""
Represends an individual slack user. Also where you set their name formatting.
"""
def __init__(self, originating_team_id, **kwargs):
self.identifier = kwargs["id"]
# These attributes may be missing in the response, so we have to make
# sure they're set
self.profile = {}
self.presence = kwargs.get("presence", "unknown")
self.deleted = kwargs.get("deleted", False)
self.is_external = (not kwargs.get("is_bot") and
kwargs.get("team_id") != originating_team_id)
for key, value in kwargs.items():
setattr(self, key, value)
self.name = nick_from_profile(self.profile, kwargs["name"])
self.username = kwargs["name"]
self.update_color()
def __repr__(self):
return "Name:{} Identifier:{}".format(self.name, self.identifier)
def force_color(self, color_name):
self.color_name = color_name
def update_color(self):
# This will automatically be none/"" if the user has disabled nick
# colourization.
self.color_name = get_nick_color(self.name)
def update_status(self, status_emoji, status_text):
self.profile["status_emoji"] = status_emoji
self.profile["status_text"] = status_text
def formatted_name(self, prepend="", enable_color=True):
name = prepend + self.name
if enable_color:
return colorize_string(self.color_name, name)
else:
return name
class SlackBot(SlackUser):
"""
Basically the same as a user, but split out to identify and for future
needs
"""
def __init__(self, originating_team_id, **kwargs):
super(SlackBot, self).__init__(originating_team_id, is_bot=True, **kwargs)
class SlackMessage(object):
"""
Represents a single slack message and associated context/metadata.
These are modifiable and can be rerendered to change a message,
delete a message, add a reaction, add a thread.
Note: these can't be tied to a SlackUser object because users
can be deleted, so we have to store sender in each one.
"""
def __init__(self, subtype, message_json, channel):
self.team = channel.team
self.channel = channel
self.subtype = subtype
self.user_identifier = message_json.get('user')
self.message_json = message_json
self.submessages = []
self.ts = SlackTS(message_json['ts'])
self.subscribed = message_json.get("subscribed", False)
self.last_read = SlackTS(message_json.get("last_read", 0))
self.last_notify = SlackTS(0)
def __hash__(self):
return hash(self.ts)
@property
def hash(self):
return self.channel.hashed_messages[self.ts]
@property
def thread_channel(self):
return self.channel.thread_channels.get(self.ts)
def open_thread(self, switch=False):
if not self.thread_channel or not self.thread_channel.active:
self.channel.thread_channels[self.ts] = SlackThreadChannel(EVENTROUTER, self.channel, self.ts)
self.thread_channel.open()
if switch:
w.buffer_set(self.thread_channel.channel_buffer, "display", "1")
def render(self, force=False):
# If we already have a rendered version in the object, just return that.
if not force and self.message_json.get("_rendered_text"):
return self.message_json["_rendered_text"]
blocks = self.message_json.get("blocks", [])
blocks_rendered = "\n".join(unfurl_blocks(blocks))
has_rich_text = any(block["type"] == "rich_text" for block in blocks)
if has_rich_text:
text = self.message_json.get("text", "")
if blocks_rendered:
if text:
text += "\n"
text += blocks_rendered
elif blocks_rendered:
text = blocks_rendered
else:
text = self.message_json.get("text", "")
if self.message_json.get('mrkdwn', True):
text = render_formatting(text)
if (self.message_json.get('subtype') in ('channel_join', 'group_join') and
self.message_json.get('inviter')):
inviter_id = self.message_json.get('inviter')
text += " by invitation from <@{}>".format(inviter_id)
text = unfurl_refs(text)
if (self.subtype == 'me_message' and
not self.message_json['text'].startswith(self.sender)):
text = "{} {}".format(self.sender, text)
if "edited" in self.message_json:
text += " " + colorize_string(config.color_edited_suffix, '(edited)')
text += unfurl_refs(unwrap_attachments(self.message_json, text))
text += unfurl_refs(unwrap_files(self.message_json, text))
text = unhtmlescape(text.lstrip().replace("\t", " "))
text += create_reactions_string(
self.message_json.get("reactions", ""), self.team.myidentifier)
if self.number_of_replies():
text += " " + colorize_string(get_thread_color(self.hash), "[ Thread: {} Replies: {}{} ]".format(
self.hash, self.number_of_replies(), " Subscribed" if self.subscribed else ""))
text = replace_string_with_emoji(text)
self.message_json["_rendered_text"] = text
return text
def change_text(self, new_text):
self.message_json["text"] = new_text
dbg(self.message_json)
def get_sender(self, plain):
user = self.team.users.get(self.user_identifier)
if user:
name = "{}".format(user.formatted_name(enable_color=not plain))
if user.is_external:
name += config.external_user_suffix
return name
elif 'username' in self.message_json:
username = self.message_json["username"]
if plain:
return username
elif self.message_json.get("subtype") == "bot_message":
return "{} :]".format(username)
else:
return "-{}-".format(username)
elif 'service_name' in self.message_json:
service_name = self.message_json["service_name"]
if plain:
return service_name
else:
return "-{}-".format(service_name)
elif self.message_json.get('bot_id') in self.team.bots:
bot = self.team.bots[self.message_json["bot_id"]]
name = bot.formatted_name(enable_color=not plain)
if plain:
return name
else:
return "{} :]".format(name)
return ""
@property
def sender(self):
return self.get_sender(False)
@property
def sender_plain(self):
return self.get_sender(True)
def get_reaction(self, reaction_name):
for reaction in self.message_json.get("reactions", []):
if reaction["name"] == reaction_name:
return reaction
return None
def add_reaction(self, reaction_name, user):
reaction = self.get_reaction(reaction_name)
if reaction:
if user not in reaction["users"]:
reaction["users"].append(user)
else:
if "reactions" not in self.message_json:
self.message_json["reactions"] = []
self.message_json["reactions"].append({"name": reaction_name, "users": [user]})
def remove_reaction(self, reaction_name, user):
reaction = self.get_reaction(reaction_name)
if user in reaction["users"]:
reaction["users"].remove(user)
def has_mention(self):
return w.string_has_highlight(unfurl_refs(self.message_json.get('text')),
",".join(self.channel.highlights()))
def number_of_replies(self):
return max(len(self.submessages), self.message_json.get("reply_count", 0))
def notify_thread(self, message=None):
if message is None:
if not self.submessages:
return
message = self.channel.messages.get(self.submessages[-1])
if (self.thread_channel and self.thread_channel.active or
message.ts <= self.last_read or message.ts <= self.last_notify):
return
if message.has_mention():
template = "You were mentioned in thread {hash}, channel {channel}"
elif self.subscribed:
template = "New message in thread {hash}, channel {channel} to which you are subscribed"
else:
return
self.last_notify = max(message.ts, SlackTS())
if config.auto_open_threads:
self.open_thread()
if message.user_identifier != self.team.myidentifier and (config.notify_subscribed_threads == True or
config.notify_subscribed_threads == "auto" and not config.auto_open_threads and
not config.thread_messages_in_channel):
message = template.format(hash=self.hash, channel=self.channel.formatted_name())
self.team.buffer_prnt(message, message=True)
class SlackThreadMessage(SlackMessage):
def __init__(self, parent_channel, thread_ts, message_json, *args):
subtype = message_json.get('subtype',
'thread_broadcast' if message_json.get("reply_broadcast") else 'thread_message')
super(SlackThreadMessage, self).__init__(subtype, message_json, *args)
self.parent_channel = parent_channel
self.thread_ts = thread_ts
@property
def parent_message(self):
return self.parent_channel.messages.get(self.thread_ts)
class Hdata(object):
def __init__(self, w):
self.buffer = w.hdata_get('buffer')
self.line = w.hdata_get('line')
self.line_data = w.hdata_get('line_data')
self.lines = w.hdata_get('lines')
class SlackTS(object):
def __init__(self, ts=None):
if isinstance(ts, int):
self.major = ts
self.minor = 0
elif ts is not None:
self.major, self.minor = [int(x) for x in ts.split('.', 1)]
else:
self.major = int(time.time())
self.minor = 0
def __cmp__(self, other):
if isinstance(other, SlackTS):
if self.major < other.major:
return -1
elif self.major > other.major:
return 1
elif self.major == other.major:
if self.minor < other.minor:
return -1
elif self.minor > other.minor:
return 1
else:
return 0
elif isinstance(other, str):
s = self.__str__()
if s < other:
return -1
elif s > other:
return 1
elif s == other:
return 0
def __lt__(self, other):
return self.__cmp__(other) < 0
def __le__(self, other):
return self.__cmp__(other) <= 0
def __eq__(self, other):
return self.__cmp__(other) == 0
def __ne__(self, other):
return self.__cmp__(other) != 0
def __ge__(self, other):
return self.__cmp__(other) >= 0
def __gt__(self, other):
return self.__cmp__(other) > 0
def __hash__(self):
return hash("{}.{}".format(self.major, self.minor))
def __repr__(self):
return str("{0}.{1:06d}".format(self.major, self.minor))
def split(self, *args, **kwargs):
return [self.major, self.minor]
def majorstr(self):
return str(self.major)
def minorstr(self):
return str(self.minor)
###### New handlers
def handle_rtmstart(login_data, eventrouter, team, channel, metadata):
"""
This handles the main entry call to slack, rtm.start
"""
metadata = login_data["wee_slack_request_metadata"]
if not login_data["ok"]:
w.prnt("", "ERROR: Failed connecting to Slack with token {}: {}"
.format(token_for_print(metadata.token), login_data["error"]))
if not re.match(r"^xo\w\w(-\d+){3}-[0-9a-f]+$", metadata.token):
w.prnt("", "ERROR: Token does not look like a valid Slack token. "
"Ensure it is a valid token and not just a OAuth code.")
return
self_profile = next(
user["profile"]
for user in login_data["users"]
if user["id"] == login_data["self"]["id"]
)
self_nick = nick_from_profile(self_profile, login_data["self"]["name"])
# Let's reuse a team if we have it already.
th = SlackTeam.generate_team_hash(login_data['team']['id'], login_data['team']['domain'])
if not eventrouter.teams.get(th):
users = {}
for item in login_data["users"]:
users[item["id"]] = SlackUser(login_data['team']['id'], **item)
bots = {}
for item in login_data["bots"]:
bots[item["id"]] = SlackBot(login_data['team']['id'], **item)
subteams = {}
for item in login_data["subteams"]["all"]:
is_member = item['id'] in login_data["subteams"]["self"]
subteams[item['id']] = SlackSubteam(
login_data['team']['id'], is_member=is_member, **item)
channels = {}
for item in login_data["channels"]:
if item["is_shared"]:
channels[item["id"]] = SlackSharedChannel(eventrouter, **item)
elif item["is_private"]:
channels[item["id"]] = SlackPrivateChannel(eventrouter, **item)
else:
channels[item["id"]] = SlackChannel(eventrouter, **item)
for item in login_data["ims"]:
channels[item["id"]] = SlackDMChannel(eventrouter, users, **item)
for item in login_data["groups"]:
if item["is_mpim"]:
channels[item["id"]] = SlackMPDMChannel(eventrouter, users, login_data["self"]["id"], **item)
else:
channels[item["id"]] = SlackGroupChannel(eventrouter, **item)
t = SlackTeam(
eventrouter,
metadata.token,
th,
login_data['url'],
login_data["team"],
subteams,
self_nick,
login_data["self"]["id"],
login_data["self"]["manual_presence"],
users,
bots,
channels,
muted_channels=login_data["self"]["prefs"]["muted_channels"],
highlight_words=login_data["self"]["prefs"]["highlight_words"],
)
eventrouter.register_team(t)
else:
t = eventrouter.teams.get(th)
if t.myidentifier != login_data["self"]["id"]:
print_error(
'The Slack team {} has tokens for two different users, this is not supported. The '
'token {} is for user {}, and the token {} is for user {}. Please remove one of '
'them.'.format(t.team_info["name"], token_for_print(t.token), t.nick,
token_for_print(metadata.token), self_nick)
)
return
elif not metadata.metadata.get('reconnect'):
print_error(
'Ignoring duplicate Slack tokens for the same team ({}) and user ({}). The two '
'tokens are {} and {}.'.format(t.team_info["name"], t.nick,
token_for_print(t.token), token_for_print(metadata.token)),
warning=True
)
return
else:
t.set_reconnect_url(login_data['url'])
t.connecting_rtm = False
t.connect(metadata.metadata['reconnect'])
def handle_rtmconnect(login_data, eventrouter, team, channel, metadata):
metadata = login_data["wee_slack_request_metadata"]
team = metadata.team
team.connecting_rtm = False
if not login_data["ok"]:
w.prnt("", "ERROR: Failed reconnecting to Slack with token {}: {}"
.format(token_for_print(metadata.token), login_data["error"]))
return
team.set_reconnect_url(login_data['url'])
team.connect(metadata.metadata['reconnect'])
def handle_emojilist(emoji_json, eventrouter, team, channel, metadata):
if emoji_json["ok"]:
team.emoji_completions.extend(emoji_json["emoji"].keys())
def handle_channelsinfo(channel_json, eventrouter, team, channel, metadata):
channel.set_unread_count_display(channel_json['channel'].get('unread_count_display', 0))
channel.set_members(channel_json['channel']['members'])
def handle_groupsinfo(group_json, eventrouter, team, channel, metadatas):
channel.set_unread_count_display(group_json['group'].get('unread_count_display', 0))
channel.set_members(group_json['group']['members'])
def handle_conversationsopen(conversation_json, eventrouter, team, channel, metadata, object_name='channel'):
# Set unread count if the channel isn't new
if channel:
unread_count_display = conversation_json[object_name].get('unread_count_display', 0)
channel.set_unread_count_display(unread_count_display)
def handle_mpimopen(mpim_json, eventrouter, team, channel, metadata, object_name='group'):
handle_conversationsopen(mpim_json, eventrouter, team, channel, metadata, object_name)
def handle_history(message_json, eventrouter, team, channel, metadata, includes_threads=True):
channel.got_history = True
channel.history_needs_update = False
for message in reversed(message_json["messages"]):
message = process_message(message, eventrouter, team, channel, metadata, history_message=True)
if (not includes_threads and message and message.number_of_replies() and
(config.thread_messages_in_channel or message.subscribed and
SlackTS(message.message_json.get("latest_reply", 0)) > message.last_read)):
channel.get_thread_history(message.ts, metadata["slow_queue"], metadata["no_log"])
channel.pending_history_requests.discard(channel.identifier)
if channel.visible_messages.first_ts_to_display.major == 0 and message_json["messages"]:
channel.visible_messages.first_ts_to_display = SlackTS(message_json["messages"][-1]["ts"])
channel.reprint_messages(history_message=True, no_log=metadata["no_log"])
for thread_channel in channel.thread_channels.values():
thread_channel.reprint_messages(history_message=True, no_log=metadata["no_log"])
handle_channelshistory = handle_history
handle_groupshistory = handle_history
handle_imhistory = handle_history
handle_mpimhistory = handle_history
def handle_conversationshistory(message_json, eventrouter, team, channel, metadata, includes_threads=True):
handle_history(message_json, eventrouter, team, channel, metadata, False)
def handle_conversationsreplies(message_json, eventrouter, team, channel, metadata):
for message in message_json['messages']:
process_message(message, eventrouter, team, channel, metadata, history_message=True)
channel.pending_history_requests.discard(metadata.get('thread_ts'))
thread_channel = channel.thread_channels.get(metadata.get('thread_ts'))
if thread_channel and thread_channel.active:
thread_channel.got_history = True
thread_channel.history_needs_update = False
thread_channel.reprint_messages(history_message=True, no_log=metadata["no_log"])
if config.thread_messages_in_channel:
channel.reprint_messages(history_message=True, no_log=metadata["no_log"])
def handle_conversationsmembers(members_json, eventrouter, team, channel, metadata):
if members_json['ok']:
channel.set_members(members_json['members'])
else:
w.prnt(team.channel_buffer, '{}Couldn\'t load members for channel {}. Error: {}'
.format(w.prefix('error'), channel.name, members_json['error']))
def handle_usersinfo(user_json, eventrouter, team, channel, metadata):
user_info = user_json['user']
if not metadata.get('user'):
user = SlackUser(team.identifier, **user_info)
team.users[user_info['id']] = user
if channel.type == 'shared':
channel.update_nicklist(user_info['id'])
elif channel.type == 'im':
channel.set_name(user.name)
channel.set_topic(create_user_status_string(user.profile))
def handle_usergroupsuserslist(users_json, eventrouter, team, channel, metadata):
header = 'Users in {}'.format(metadata['usergroup_handle'])
users = [team.users[key] for key in users_json['users']]
return print_users_info(team, header, users)
def handle_usersprofileset(json, eventrouter, team, channel, metadata):
if not json['ok']:
w.prnt('', 'ERROR: Failed to set profile: {}'.format(json['error']))
def handle_conversationscreate(json, eventrouter, team, channel, metadata):
metadata = json["wee_slack_request_metadata"]
if not json['ok']:
name = metadata.post_data["name"]
print_error("Couldn't create channel {}: {}".format(name, json['error']))
def handle_conversationsinvite(json, eventrouter, team, channel, metadata):
nicks = ', '.join(metadata['nicks'])
if json['ok']:
w.prnt(team.channel_buffer, 'Invited {} to {}'.format(nicks, channel.name))
else:
w.prnt(team.channel_buffer, 'ERROR: Couldn\'t invite {} to {}. Error: {}'
.format(nicks, channel.name, json['error']))
def handle_chatcommand(json, eventrouter, team, channel, metadata):
command = '{} {}'.format(metadata['command'], metadata['command_args']).rstrip()
response = unfurl_refs(json['response']) if 'response' in json else ''
if json['ok']:
response_text = 'Response: {}'.format(response) if response else 'No response'
w.prnt(team.channel_buffer, 'Ran command "{}". {}' .format(command, response_text))
else:
response_text = '. Response: {}'.format(response) if response else ''
w.prnt(team.channel_buffer, 'ERROR: Couldn\'t run command "{}". Error: {}{}'
.format(command, json['error'], response_text))
def handle_chatdelete(json, eventrouter, team, channel, metadata):
if not json['ok']:
print_error("Couldn't delete message: {}".format(json['error']))
def handle_chatupdate(json, eventrouter, team, channel, metadata):
if not json['ok']:
print_error("Couldn't change message: {}".format(json['error']))
def handle_reactionsadd(json, eventrouter, team, channel, metadata):
if not json['ok']:
print_error("Couldn't add reaction {}: {}".format(metadata['reaction'], json['error']))
def handle_reactionsremove(json, eventrouter, team, channel, metadata):
if not json['ok']:
print_error("Couldn't remove reaction {}: {}".format(metadata['reaction'], json['error']))
def handle_subscriptionsthreadmark(json, eventrouter, team, channel, metadata):
if not json["ok"]:
if json['error'] == 'not_allowed_token_type':
team.slack_api_translator['thread']['mark'] = None
else:
print_error("Couldn't set thread read status: {}".format(json['error']))
def handle_subscriptionsthreadadd(json, eventrouter, team, channel, metadata):
if not json["ok"]:
if json['error'] == 'not_allowed_token_type':
print_error("Can only subscribe to a thread when using a session token, see the readme: https://github.com/wee-slack/wee-slack#4-add-your-slack-api-tokens")
else:
print_error("Couldn't add thread subscription: {}".format(json['error']))
def handle_subscriptionsthreadremove(json, eventrouter, team, channel, metadata):
if not json["ok"]:
if json['error'] == 'not_allowed_token_type':
print_error("Can only unsubscribe from a thread when using a session token, see the readme: https://github.com/wee-slack/wee-slack#4-add-your-slack-api-tokens")
else:
print_error("Couldn't remove thread subscription: {}".format(json['error']))
###### New/converted process_ and subprocess_ methods
def process_hello(message_json, eventrouter, team, channel, metadata):
team.subscribe_users_presence()
def process_reconnect_url(message_json, eventrouter, team, channel, metadata):
team.set_reconnect_url(message_json['url'])
def process_presence_change(message_json, eventrouter, team, channel, metadata):
users = [team.users[user_id] for user_id in message_json.get("users", [])]
if "user" in metadata:
users.append(metadata["user"])
for user in users:
team.update_member_presence(user, message_json["presence"])
if team.myidentifier in users:
w.bar_item_update("away")
w.bar_item_update("slack_away")
def process_manual_presence_change(message_json, eventrouter, team, channel, metadata):
team.my_manual_presence = message_json["presence"]
w.bar_item_update("away")
w.bar_item_update("slack_away")
def process_pref_change(message_json, eventrouter, team, channel, metadata):
if message_json['name'] == 'muted_channels':
team.set_muted_channels(message_json['value'])
elif message_json['name'] == 'highlight_words':
team.set_highlight_words(message_json['value'])
else:
dbg("Preference change not implemented: {}\n".format(message_json['name']))
def process_user_change(message_json, eventrouter, team, channel, metadata):
"""
Currently only used to update status, but lots here we could do.
"""
user = metadata['user']
profile = message_json['user']['profile']
if user:
user.update_status(profile.get('status_emoji'), profile.get('status_text'))
dmchannel = team.find_channel_by_members({user.identifier}, channel_type='im')
if dmchannel:
dmchannel.set_topic(create_user_status_string(profile))
def process_user_typing(message_json, eventrouter, team, channel, metadata):
if channel and metadata["user"]:
channel.set_typing(metadata["user"])
w.bar_item_update("slack_typing_notice")
def process_team_join(message_json, eventrouter, team, channel, metadata):
user = message_json['user']
team.users[user["id"]] = SlackUser(team.identifier, **user)
def process_pong(message_json, eventrouter, team, channel, metadata):
team.last_pong_time = time.time()
def process_message(message_json, eventrouter, team, channel, metadata, history_message=False):
if not history_message and "ts" in message_json and SlackTS(message_json["ts"]) in channel.messages:
return
subtype = message_json.get("subtype")
subtype_functions = get_functions_with_prefix("subprocess_")
if "thread_ts" in message_json and "reply_count" not in message_json:
message = subprocess_thread_message(message_json, eventrouter, team, channel, history_message)
elif subtype in subtype_functions:
message = subtype_functions[subtype](message_json, eventrouter, team, channel, history_message)
else:
message = SlackMessage(subtype or "normal", message_json, channel)
channel.store_message(message)
channel.unread_count_display += 1
if message and not history_message:
channel.prnt_message(message, history_message)
if not history_message:
download_files(message_json, team)
return message
def download_files(message_json, team):
download_location = config.files_download_location
if not download_location:
return
download_location = w.string_eval_path_home(download_location, {}, {}, {})
if not os.path.exists(download_location):
try:
os.makedirs(download_location)
except:
w.prnt('', 'ERROR: Failed to create directory at files_download_location: {}'
.format(format_exc_only()))
def fileout_iter(path):
yield path
main, ext = os.path.splitext(path)
for i in count(start=1):
yield main + "-{}".format(i) + ext
for f in message_json.get('files', []):
if f.get('mode') == 'tombstone':
continue
filetype = '' if f['title'].endswith(f['filetype']) else '.' + f['filetype']
filename = '{}_{}{}'.format(team.name, f['title'], filetype)
for fileout in fileout_iter(os.path.join(download_location, filename)):
if os.path.isfile(fileout):
continue
w.hook_process_hashtable(
"url:" + f['url_private'],
{
'file_out': fileout,
'httpheader': 'Authorization: Bearer ' + team.token
},
config.slack_timeout, "", "")
break
def subprocess_thread_message(message_json, eventrouter, team, channel, history_message):
parent_ts = SlackTS(message_json['thread_ts'])
message = SlackThreadMessage(channel, parent_ts, message_json, channel)
parent_message = message.parent_message
if parent_message and message.ts not in parent_message.submessages:
parent_message.submessages.append(message.ts)
parent_message.submessages.sort()
channel.store_message(message)
if parent_message:
channel.change_message(parent_ts)
if parent_message.thread_channel and parent_message.thread_channel.active:
if not history_message:
parent_message.thread_channel.prnt_message(message, history_message)
else:
parent_message.notify_thread(message)
else:
channel.get_thread_history(parent_ts)
return message
subprocess_thread_broadcast = subprocess_thread_message
def subprocess_channel_join(message_json, eventrouter, team, channel, history_message):
message = SlackMessage("join", message_json, channel)
channel.store_message(message)
channel.user_joined(message_json["user"])
return message
def subprocess_channel_leave(message_json, eventrouter, team, channel, history_message):
message = SlackMessage("leave", message_json, channel)
channel.store_message(message)
channel.user_left(message_json["user"])
return message
def subprocess_channel_topic(message_json, eventrouter, team, channel, history_message):
message = SlackMessage("topic", message_json, channel)
channel.store_message(message)
channel.set_topic(message_json["topic"])
return message
subprocess_group_join = subprocess_channel_join
subprocess_group_leave = subprocess_channel_leave
subprocess_group_topic = subprocess_channel_topic
def subprocess_message_replied(message_json, eventrouter, team, channel, history_message):
pass
def subprocess_message_changed(message_json, eventrouter, team, channel, history_message):
new_message = message_json.get("message")
channel.change_message(new_message["ts"], message_json=new_message)
def subprocess_message_deleted(message_json, eventrouter, team, channel, history_message):
message = colorize_string(config.color_deleted, '(deleted)')
channel.change_message(message_json["deleted_ts"], text=message)
def process_reply(message_json, eventrouter, team, channel, metadata):
reply_to = int(message_json["reply_to"])
original_message_json = team.ws_replies.pop(reply_to, None)
if original_message_json:
dbg("REPLY {}".format(message_json))
channel = team.channels[original_message_json.get('channel')]
if message_json["ok"]:
original_message_json.update(message_json)
process_message(original_message_json, eventrouter, team=team, channel=channel, metadata={})
else:
print_error("Couldn't send message to channel {}: {}".format(channel.name, message_json["error"]))
else:
dbg("Unexpected reply {}".format(message_json))
def process_channel_marked(message_json, eventrouter, team, channel, metadata):
ts = message_json.get("ts")
if ts:
channel.mark_read(ts=ts, force=True, update_remote=False)
else:
dbg("tried to mark something weird {}".format(message_json))
process_group_marked = process_channel_marked
process_im_marked = process_channel_marked
process_mpim_marked = process_channel_marked
def process_thread_marked(message_json, eventrouter, team, channel, metadata):
subscription = message_json.get("subscription", {})
ts = subscription.get("last_read")
thread_ts = subscription.get("thread_ts")
channel = team.channels.get(subscription.get("channel"))
if ts and thread_ts and channel:
thread_channel = channel.thread_channels.get(SlackTS(thread_ts))
if thread_channel: thread_channel.mark_read(ts=ts, force=True, update_remote=False)
else:
dbg("tried to mark something weird {}".format(message_json))
def process_channel_joined(message_json, eventrouter, team, channel, metadata):
channel.update_from_message_json(message_json["channel"])
channel.open()
def process_channel_created(message_json, eventrouter, team, channel, metadata):
item = message_json["channel"]
item['is_member'] = False
channel = SlackChannel(eventrouter, team=team, **item)
team.channels[item["id"]] = channel
team.buffer_prnt('Channel created: {}'.format(channel.name))
def process_channel_rename(message_json, eventrouter, team, channel, metadata):
channel.set_name(message_json['channel']['name'])
def process_im_created(message_json, eventrouter, team, channel, metadata):
item = message_json["channel"]
channel = SlackDMChannel(eventrouter, team=team, users=team.users, **item)
team.channels[item["id"]] = channel
team.buffer_prnt('IM channel created: {}'.format(channel.name))
def process_im_open(message_json, eventrouter, team, channel, metadata):
channel.check_should_open(True)
w.buffer_set(channel.channel_buffer, "hotlist", "2")
def process_im_close(message_json, eventrouter, team, channel, metadata):
if channel.channel_buffer:
w.prnt(team.channel_buffer,
'IM {} closed by another client or the server'.format(channel.name))
eventrouter.weechat_controller.unregister_buffer(channel.channel_buffer, False, True)
def process_group_joined(message_json, eventrouter, team, channel, metadata):
item = message_json["channel"]
if item["name"].startswith("mpdm-"):
channel = SlackMPDMChannel(eventrouter, team.users, team.myidentifier, team=team, **item)
else:
channel = SlackGroupChannel(eventrouter, team=team, **item)
team.channels[item["id"]] = channel
channel.open()
def process_reaction_added(message_json, eventrouter, team, channel, metadata):
channel = team.channels.get(message_json["item"].get("channel"))
if message_json["item"].get("type") == "message":
ts = SlackTS(message_json['item']["ts"])
message = channel.messages.get(ts)
if message:
message.add_reaction(message_json["reaction"], message_json["user"])
channel.change_message(ts)
else:
dbg("reaction to item type not supported: " + str(message_json))
def process_reaction_removed(message_json, eventrouter, team, channel, metadata):
channel = team.channels.get(message_json["item"].get("channel"))
if message_json["item"].get("type") == "message":
ts = SlackTS(message_json['item']["ts"])
message = channel.messages.get(ts)
if message:
message.remove_reaction(message_json["reaction"], message_json["user"])
channel.change_message(ts)
else:
dbg("Reaction to item type not supported: " + str(message_json))
def process_subteam_created(subteam_json, eventrouter, team, channel, metadata):
subteam_json_info = subteam_json['subteam']
is_member = team.myidentifier in subteam_json_info.get('users', [])
subteam = SlackSubteam(team.identifier, is_member=is_member, **subteam_json_info)
team.subteams[subteam_json_info['id']] = subteam
def process_subteam_updated(subteam_json, eventrouter, team, channel, metadata):
current_subteam_info = team.subteams[subteam_json['subteam']['id']]
is_member = team.myidentifier in subteam_json['subteam'].get('users', [])
new_subteam_info = SlackSubteam(team.identifier, is_member=is_member, **subteam_json['subteam'])
team.subteams[subteam_json['subteam']['id']] = new_subteam_info
if current_subteam_info.is_member != new_subteam_info.is_member:
for channel in team.channels.values():
channel.set_highlights()
if config.notify_usergroup_handle_updated and current_subteam_info.handle != new_subteam_info.handle:
message = 'User group {old_handle} has updated its handle to {new_handle} in team {team}.'.format(
old_handle=current_subteam_info.handle, new_handle=new_subteam_info.handle, team=team.name)
team.buffer_prnt(message, message=True)
def process_emoji_changed(message_json, eventrouter, team, channel, metadata):
team.load_emoji_completions()
def process_thread_subscribed(message_json, eventrouter, team, channel, metadata):
dbg("THREAD SUBSCRIBED {}".format(message_json))
channel = team.channels[message_json["subscription"]["channel"]]
parent_ts = SlackTS(message_json["subscription"]["thread_ts"])
parent_message = channel.messages.get(parent_ts)
if parent_message:
parent_message.last_read = SlackTS(message_json["subscription"]["last_read"])
parent_message.subscribed = True
channel.change_message(parent_ts)
parent_message.notify_thread()
else:
channel.get_thread_history(parent_ts)
def process_thread_unsubscribed(message_json, eventrouter, team, channel, metadata):
dbg("THREAD UNSUBSCRIBED {}".format(message_json))
channel = team.channels[message_json["subscription"]["channel"]]
parent_ts = SlackTS(message_json["subscription"]["thread_ts"])
parent_message = channel.messages.get(parent_ts)
if parent_message:
parent_message.subscribed = False
channel.change_message(parent_ts)
###### New module/global methods
def render_formatting(text):
text = re.sub(r'(^| )\*([^*\n`]+)\*(?=[^\w]|$)',
r'\1{}*\2*{}'.format(w.color(config.render_bold_as),
w.color('-' + config.render_bold_as)),
text,
flags=re.UNICODE)
text = re.sub(r'(^| )_([^_\n`]+)_(?=[^\w]|$)',
r'\1{}_\2_{}'.format(w.color(config.render_italic_as),
w.color('-' + config.render_italic_as)),
text,
flags=re.UNICODE)
return text
def linkify_text(message, team, only_users=False, escape_characters=True):
# The get_username_map function is a bit heavy, but this whole
# function is only called on message send..
usernames = team.get_username_map()
channels = team.get_channel_map()
usergroups = team.generate_usergroup_map()
if escape_characters:
message = (message
# Replace IRC formatting chars with Slack formatting chars.
.replace('\x02', '*')
.replace('\x1D', '_')
.replace('\x1F', config.map_underline_to)
# Escape chars that have special meaning to Slack. Note that we do not
# (and should not) perform full HTML entity-encoding here.
# See https://api.slack.com/docs/message-formatting for details.
.replace('&', '&')
.replace('<', '<')
.replace('>', '>'))
def linkify_word(match):
word = match.group(0)
prefix, name = match.groups()
if prefix == "@":
if name in ["channel", "everyone", "group", "here"]:
return "<!{}>".format(name)
elif name in usernames:
return "<@{}>".format(usernames[name])
elif word in usergroups.keys():
return "<!subteam^{}|{}>".format(usergroups[word], word)
elif prefix == "#" and not only_users:
if word in channels:
return "<#{}|{}>".format(channels[word], name)
return word
linkify_regex = r'(?:^|(?<=\s))([@#])([\w\(\)\'.-]+)'
return re.sub(linkify_regex, linkify_word, message, flags=re.UNICODE)
def unfurl_blocks(blocks):
block_text = []
for block in blocks:
try:
if block["type"] == "section":
fields = block.get("fields", [])
if "text" in block:
fields.insert(0, block["text"])
block_text.extend(unfurl_block_element(field) for field in fields)
elif block["type"] == "actions":
elements = []
for element in block["elements"]:
if element["type"] == "button":
elements.append(unfurl_block_element(element["text"]))
else:
elements.append(colorize_string(config.color_deleted,
'<<Unsupported block action type "{}">>'.format(element["type"])))
block_text.append(" | ".join(elements))
elif block["type"] == "call":
block_text.append("Join via " + block["call"]["v1"]["join_url"])
elif block["type"] == "divider":
block_text.append("---")
elif block["type"] == "context":
block_text.append(" | ".join(unfurl_block_element(el) for el in block["elements"]))
elif block["type"] == "image":
if "title" in block:
block_text.append(unfurl_block_element(block["title"]))
block_text.append(unfurl_block_element(block))
elif block["type"] == "rich_text":
continue
else:
block_text.append(colorize_string(config.color_deleted,
'<<Unsupported block type "{}">>'.format(block["type"])))
dbg('Unsupported block: "{}"'.format(json.dumps(block)), level=4)
except Exception as e:
dbg("Failed to unfurl block ({}): {}".format(repr(e), json.dumps(block)), level=4)
return block_text
def unfurl_block_element(text):
if text["type"] == "mrkdwn":
return render_formatting(text["text"])
elif text["type"] == "plain_text":
return text["text"]
elif text["type"] == "image":
return "{} ({})".format(text["image_url"], text["alt_text"])
def unfurl_refs(text):
"""
input : <@U096Q7CQM|someuser> has joined the channel
ouput : someuser has joined the channel
"""
# Find all strings enclosed by <>
# - <https://example.com|example with spaces>
# - <#C2147483705|#otherchannel>
# - <@U2147483697|@othernick>
# - <!subteam^U2147483697|@group>
# Test patterns lives in ./_pytest/test_unfurl.py
def unfurl_ref(match):
ref, fallback = match.groups()
resolved_ref = resolve_ref(ref)
if resolved_ref != ref:
return resolved_ref
if fallback and fallback != ref and not config.unfurl_ignore_alt_text:
if ref.startswith("#"):
return "#{}".format(fallback)
elif ref.startswith("@"):
return fallback
elif ref.startswith("!subteam"):
prefix = "@" if not fallback.startswith("@") else ""
return prefix + fallback
elif ref.startswith("!date"):
return fallback
else:
match_url = r"^\w+:(//)?{}$".format(re.escape(fallback))
url_matches_desc = re.match(match_url, ref)
if url_matches_desc and config.unfurl_auto_link_display == "text":
return fallback
elif url_matches_desc and config.unfurl_auto_link_display == "url":
return ref
else:
return "{} ({})".format(ref, fallback)
return ref
return re.sub(r"<([^|>]*)(?:\|([^>]*))?>", unfurl_ref, text)
def unhtmlescape(text):
return text.replace("<", "<") \
.replace(">", ">") \
.replace("&", "&")
def unwrap_attachments(message_json, text_before):
text_before_unescaped = unhtmlescape(text_before)
attachment_texts = []
a = message_json.get("attachments")
if a:
if text_before:
attachment_texts.append('')
for attachment in a:
# Attachments should be rendered roughly like:
#
# $pretext
# $author: (if rest of line is non-empty) $title ($title_link) OR $from_url
# $author: (if no $author on previous line) $text
# $fields
if 'original_url' in attachment and not config.link_previews:
continue
t = []
prepend_title_text = ''
if 'author_name' in attachment:
prepend_title_text = attachment['author_name'] + ": "
if 'pretext' in attachment:
t.append(attachment['pretext'])
link_shown = False
title = attachment.get('title')
title_link = attachment.get('title_link', '')
if title_link and (title_link in text_before or title_link in text_before_unescaped):
title_link = ''
link_shown = True
if title and title_link:
t.append('%s%s (%s)' % (prepend_title_text, title, title_link,))
prepend_title_text = ''
elif title and not title_link:
t.append('%s%s' % (prepend_title_text, title,))
prepend_title_text = ''
from_url = attachment.get('from_url', '')
if (from_url not in text_before and from_url not in text_before_unescaped
and from_url != title_link):
t.append(from_url)
elif from_url:
link_shown = True
atext = attachment.get("text")
if atext:
tx = re.sub(r' *\n[\n ]+', '\n', atext)
t.append(prepend_title_text + tx)
prepend_title_text = ''
blocks = attachment.get("blocks", [])
t.extend(unfurl_blocks(blocks))
image_url = attachment.get('image_url', '')
if (image_url not in text_before and image_url not in text_before_unescaped
and image_url != from_url and image_url != title_link):
t.append(image_url)
elif image_url:
link_shown = True
for field in attachment.get("fields", []):
if field.get('title'):
t.append('{}: {}'.format(field['title'], field['value']))
else:
t.append(field['value'])
files = unwrap_files(attachment, None)
if files:
t.append(files)
footer = attachment.get("footer")
if footer:
ts = attachment.get("ts")
if ts:
ts_int = ts if type(ts) == int else SlackTS(ts).major
time_string = ''
if date.today() - date.fromtimestamp(ts_int) <= timedelta(days=1):
time_string = ' at {time}'
timestamp_formatted = resolve_ref('!date^{}^{{date_short_pretty}}{}'
.format(ts_int, time_string)).capitalize()
footer += ' | {}'.format(timestamp_formatted)
t.append(footer)
fallback = attachment.get("fallback")
if t == [] and fallback and not link_shown:
t.append(fallback)
if t:
lines = [line for part in t for line in part.strip().split("\n") if part]
prefix = '|'
line_color = None
color = attachment.get('color')
if color and config.colorize_attachments != "none":
weechat_color = w.info_get("color_rgb2term", str(int(color.lstrip("#"), 16)))
if config.colorize_attachments == "prefix":
prefix = colorize_string(weechat_color, prefix)
elif config.colorize_attachments == "all":
line_color = weechat_color
attachment_texts.extend(
colorize_string(line_color, "{} {}".format(prefix, line))
for line in lines)
return "\n".join(attachment_texts)
def unwrap_files(message_json, text_before):
files_texts = []
for f in message_json.get('files', []):
if f.get('mode', '') == 'tombstone':
text = colorize_string(config.color_deleted, '(This file was deleted.)')
elif f.get('mode', '') == 'hidden_by_limit':
text = colorize_string(config.color_deleted, '(This file is hidden because the workspace has passed its storage limit.)')
elif f.get('url_private', None) is not None and f.get('title', None) is not None:
text = '{} ({})'.format(f['url_private'], f['title'])
else:
dbg('File {} has unrecognized mode {}'.format(f['id'], f['mode']), 5)
text = colorize_string(config.color_deleted, '(This file cannot be handled.)')
files_texts.append(text)
if text_before:
files_texts.insert(0, '')
return "\n".join(files_texts)
def resolve_ref(ref):
if ref in ['!channel', '!everyone', '!group', '!here']:
return ref.replace('!', '@')
for team in EVENTROUTER.teams.values():
if ref.startswith('@'):
user = team.users.get(ref[1:])
if user:
suffix = config.external_user_suffix if user.is_external else ''
return '@{}{}'.format(user.name, suffix)
elif ref.startswith('#'):
channel = team.channels.get(ref[1:])
if channel:
return channel.name
elif ref.startswith('!subteam'):
_, subteam_id = ref.split('^')
subteam = team.subteams.get(subteam_id)
if subteam:
return subteam.handle
elif ref.startswith("!date"):
parts = ref.split('^')
ref_datetime = datetime.fromtimestamp(int(parts[1]))
link_suffix = ' ({})'.format(parts[3]) if len(parts) > 3 else ''
token_to_format = {
'date_num': '%Y-%m-%d',
'date': '%B %d, %Y',
'date_short': '%b %d, %Y',
'date_long': '%A, %B %d, %Y',
'time': '%H:%M',
'time_secs': '%H:%M:%S'
}
def replace_token(match):
token = match.group(1)
if token.startswith('date_') and token.endswith('_pretty'):
if ref_datetime.date() == date.today():
return 'today'
elif ref_datetime.date() == date.today() - timedelta(days=1):
return 'yesterday'
elif ref_datetime.date() == date.today() + timedelta(days=1):
return 'tomorrow'
else:
token = token.replace('_pretty', '')
if token in token_to_format:
return decode_from_utf8(ref_datetime.strftime(token_to_format[token]))
else:
return match.group(0)
return re.sub(r"{([^}]+)}", replace_token, parts[2]) + link_suffix
# Something else, just return as-is
return ref
def create_user_status_string(profile):
real_name = profile.get("real_name")
status_emoji = replace_string_with_emoji(profile.get("status_emoji", ""))
status_text = profile.get("status_text")
if status_emoji or status_text:
return "{} | {} {}".format(real_name, status_emoji, status_text)
else:
return real_name
def create_reaction_string(reaction, myidentifier):
if config.show_reaction_nicks:
nicks = [resolve_ref('@{}'.format(user)) for user in reaction['users']]
users = '({})'.format(','.join(nicks))
else:
users = len(reaction['users'])
reaction_string = ':{}:{}'.format(reaction['name'], users)
if myidentifier in reaction['users']:
return colorize_string(config.color_reaction_suffix_added_by_you, reaction_string,
reset_color=config.color_reaction_suffix)
else:
return reaction_string
def create_reactions_string(reactions, myidentifier):
reactions_with_users = [r for r in reactions if len(r['users']) > 0]
reactions_string = ' '.join(create_reaction_string(r, myidentifier) for r in reactions_with_users)
if reactions_string:
return ' ' + colorize_string(config.color_reaction_suffix, '[{}]'.format(reactions_string))
else:
return ''
def hdata_line_ts(line_pointer):
data = w.hdata_pointer(hdata.line, line_pointer, 'data')
for i in range(w.hdata_integer(hdata.line_data, data, 'tags_count')):
tag = w.hdata_string(hdata.line_data, data, '{}|tags_array'.format(i))
if tag.startswith('slack_ts_'):
return SlackTS(tag[9:])
return None
def modify_buffer_line(buffer_pointer, ts, new_text):
own_lines = w.hdata_pointer(hdata.buffer, buffer_pointer, 'own_lines')
line_pointer = w.hdata_pointer(hdata.lines, own_lines, 'last_line')
# Find the last line with this ts
is_last_line = True
while line_pointer and hdata_line_ts(line_pointer) != ts:
is_last_line = False
line_pointer = w.hdata_move(hdata.line, line_pointer, -1)
# Find all lines for the message
pointers = []
while line_pointer and hdata_line_ts(line_pointer) == ts:
pointers.append(line_pointer)
line_pointer = w.hdata_move(hdata.line, line_pointer, -1)
pointers.reverse()
if not pointers:
return w.WEECHAT_RC_OK
if is_last_line:
lines = new_text.split('\n')
extra_lines_count = len(lines) - len(pointers)
if extra_lines_count > 0:
line_data = w.hdata_pointer(hdata.line, pointers[0], 'data')
tags_count = w.hdata_integer(hdata.line_data, line_data, 'tags_count')
tags = [w.hdata_string(hdata.line_data, line_data, '{}|tags_array'.format(i))
for i in range(tags_count)]
tags = tags_set_notify_none(tags)
tags_str = ','.join(tags)
last_read_line = w.hdata_pointer(hdata.lines, own_lines, 'last_read_line')
should_set_unread = last_read_line == pointers[-1]
# Insert new lines to match the number of lines in the message
w.buffer_set(buffer_pointer, "print_hooks_enabled", "0")
for _ in range(extra_lines_count):
w.prnt_date_tags(buffer_pointer, ts.major, tags_str, " \t ")
pointers.append(w.hdata_pointer(hdata.lines, own_lines, 'last_line'))
if should_set_unread:
w.buffer_set(buffer_pointer, "unread", "")
w.buffer_set(buffer_pointer, "print_hooks_enabled", "1")
else:
# Split the message into at most the number of existing lines as we can't insert new lines
lines = new_text.split('\n', len(pointers) - 1)
# Replace newlines to prevent garbled lines in bare display mode
lines = [line.replace('\n', ' | ') for line in lines]
# Extend lines in case the new message is shorter than the old as we can't delete lines
lines += [''] * (len(pointers) - len(lines))
for pointer, line in zip(pointers, lines):
data = w.hdata_pointer(hdata.line, pointer, 'data')
w.hdata_update(hdata.line_data, data, {"message": line})
return w.WEECHAT_RC_OK
def nick_from_profile(profile, username):
full_name = profile.get('real_name') or username
if config.use_full_names:
nick = full_name
else:
nick = profile.get('display_name') or full_name
return nick.replace(' ', '')
def format_nick(nick, previous_nick=None):
if nick == previous_nick:
nick = w.config_string(w.config_get('weechat.look.prefix_same_nick')) or nick
nick_prefix = w.config_string(w.config_get('weechat.look.nick_prefix'))
nick_prefix_color_name = w.config_string(w.config_get('weechat.color.chat_nick_prefix'))
nick_suffix = w.config_string(w.config_get('weechat.look.nick_suffix'))
nick_suffix_color_name = w.config_string(w.config_get('weechat.color.chat_nick_prefix'))
return colorize_string(nick_prefix_color_name, nick_prefix) + nick + colorize_string(nick_suffix_color_name, nick_suffix)
def tags_set_notify_none(tags):
notify_tags = {"notify_highlight", "notify_message", "notify_private"}
tags = [tag for tag in tags if tag not in notify_tags]
tags += ["no_highlight", "notify_none"]
return tags
def tag(ts, tagset=None, user=None, self_msg=False, backlog=False, no_log=False, extra_tags=None):
tagsets = {
"team_info": ["no_highlight", "log3"],
"team_message": ["irc_privmsg", "notify_message", "log1"],
"dm": ["irc_privmsg", "notify_private", "log1"],
"join": ["irc_join", "no_highlight", "log4"],
"leave": ["irc_part", "no_highlight", "log4"],
"topic": ["irc_topic", "no_highlight", "log3"],
"channel": ["irc_privmsg", "notify_message", "log1"],
}
ts_tag = "slack_ts_{}".format(ts)
slack_tag = "slack_{}".format(tagset or "default")
nick_tag = ["nick_{}".format(user).replace(" ", "_")] if user else []
tags = [ts_tag, slack_tag] + nick_tag + tagsets.get(tagset, [])
if self_msg or backlog:
tags = tags_set_notify_none(tags)
if self_msg:
tags += ["self_msg"]
if backlog:
tags += ["logger_backlog"]
if no_log:
tags += ["no_log"]
tags = [tag for tag in tags if not tag.startswith("log") or tag == "logger_backlog"]
if extra_tags:
tags += extra_tags
return ",".join(OrderedDict.fromkeys(tags))
def set_own_presence_active(team):
slackbot = team.get_channel_map()['Slackbot']
channel = team.channels[slackbot]
request = {"type": "typing", "channel": channel.identifier}
channel.team.send_to_websocket(request, expect_reply=False)
###### New/converted command_ commands
@slack_buffer_or_ignore
@utf8_decode
def invite_command_cb(data, current_buffer, args):
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
split_args = args.split()[1:]
if not split_args:
w.prnt('', 'Too few arguments for command "/invite" (help on command: /help invite)')
return w.WEECHAT_RC_OK_EAT
if split_args[-1].startswith("#") or split_args[-1].startswith(config.group_name_prefix):
nicks = split_args[:-1]
channel = team.channels.get(team.get_channel_map().get(split_args[-1]))
if not nicks or not channel:
w.prnt('', '{}: No such nick/channel'.format(split_args[-1]))
return w.WEECHAT_RC_OK_EAT
else:
nicks = split_args
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
all_users = team.get_username_map()
users = set()
for nick in nicks:
user = all_users.get(nick.lstrip('@'))
if not user:
w.prnt('', 'ERROR: Unknown user: {}'.format(nick))
return w.WEECHAT_RC_OK_EAT
users.add(user)
s = SlackRequest(team, "conversations.invite", {"channel": channel.identifier, "users": ",".join(users)},
channel=channel, metadata={"nicks": nicks})
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK_EAT
@slack_buffer_or_ignore
@utf8_decode
def part_command_cb(data, current_buffer, args):
e = EVENTROUTER
args = args.split()
if len(args) > 1:
team = e.weechat_controller.buffers[current_buffer].team
cmap = team.get_channel_map()
channel = "".join(args[1:])
if channel in cmap:
buffer_ptr = team.channels[cmap[channel]].channel_buffer
e.weechat_controller.unregister_buffer(buffer_ptr, update_remote=True, close_buffer=True)
else:
w.prnt(team.channel_buffer, "{}: No such channel".format(channel))
else:
e.weechat_controller.unregister_buffer(current_buffer, update_remote=True, close_buffer=True)
return w.WEECHAT_RC_OK_EAT
def parse_topic_command(command):
_, _, args = command.partition(' ')
if args.startswith('#'):
channel_name, _, topic_arg = args.partition(' ')
else:
channel_name = None
topic_arg = args
if topic_arg == '-delete':
topic = ''
elif topic_arg:
topic = topic_arg
else:
topic = None
return channel_name, topic
@slack_buffer_or_ignore
@utf8_decode
def topic_command_cb(data, current_buffer, command):
"""
Change the topic of a channel
/topic [<channel>] [<topic>|-delete]
"""
channel_name, topic = parse_topic_command(command)
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
if channel_name:
channel = team.channels.get(team.get_channel_map().get(channel_name))
else:
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
if not channel:
w.prnt(team.channel_buffer, "{}: No such channel".format(channel_name))
return w.WEECHAT_RC_OK_EAT
if topic is None:
w.prnt(channel.channel_buffer,
'Topic for {} is "{}"'.format(channel.name, channel.render_topic()))
else:
s = SlackRequest(team, "conversations.setTopic",
{"channel": channel.identifier, "topic": linkify_text(topic, team)}, channel=channel)
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK_EAT
@slack_buffer_or_ignore
@utf8_decode
def whois_command_cb(data, current_buffer, command):
"""
Get real name of user
/whois <nick>
"""
args = command.split()
if len(args) < 2:
w.prnt(current_buffer, "Not enough arguments")
return w.WEECHAT_RC_OK_EAT
user = args[1]
if (user.startswith('@')):
user = user[1:]
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
u = team.users.get(team.get_username_map().get(user))
if u:
def print_profile(field):
value = u.profile.get(field)
if value:
team.buffer_prnt("[{}]: {}: {}".format(user, field, value))
team.buffer_prnt("[{}]: {}".format(user, u.real_name))
status_emoji = replace_string_with_emoji(u.profile.get("status_emoji", ""))
status_text = u.profile.get("status_text", "")
if status_emoji or status_text:
team.buffer_prnt("[{}]: {} {}".format(user, status_emoji, status_text))
team.buffer_prnt("[{}]: username: {}".format(user, u.username))
team.buffer_prnt("[{}]: id: {}".format(user, u.identifier))
print_profile('title')
print_profile('email')
print_profile('phone')
print_profile('skype')
else:
team.buffer_prnt("[{}]: No such user".format(user))
return w.WEECHAT_RC_OK_EAT
@slack_buffer_or_ignore
@utf8_decode
def me_command_cb(data, current_buffer, args):
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
message = args.split(' ', 1)[1]
channel.send_message(message, subtype='me_message')
return w.WEECHAT_RC_OK_EAT
@utf8_decode
def command_register(data, current_buffer, args):
"""
/slack register [-nothirdparty] [code/token]
Register a Slack team in wee-slack. Call this without any arguments and
follow the instructions to register a new team. If you already have a token
for a team, you can call this with that token to add it.
By default GitHub Pages will see a temporary code used to create your token
(but not the token itself). If you're worried about this, you can use the
-nothirdparty option, though the process will be a bit less user friendly.
"""
CLIENT_ID = "2468770254.51917335286"
CLIENT_SECRET = "dcb7fe380a000cba0cca3169a5fe8d70" # Not really a secret.
REDIRECT_URI_GITHUB = "https://wee-slack.github.io/wee-slack/oauth"
REDIRECT_URI_NOTHIRDPARTY = "http://not.a.realhost/"
args = args.strip()
if " " in args:
nothirdparty_arg, _, code = args.partition(" ")
nothirdparty = nothirdparty_arg == "-nothirdparty"
else:
nothirdparty = args == "-nothirdparty"
code = "" if nothirdparty else args
redirect_uri = quote(REDIRECT_URI_NOTHIRDPARTY if nothirdparty else REDIRECT_URI_GITHUB, safe='')
if not code:
if nothirdparty:
nothirdparty_note = ""
last_step = "You will see a message that the site can't be reached, this is expected. The URL for the page will have a code in it of the form `?code=<code>`. Copy the code after the equals sign, return to weechat and run `/slack register -nothirdparty <code>`."
else:
nothirdparty_note = "\nNote that by default GitHub Pages will see a temporary code used to create your token (but not the token itself). If you're worried about this, you can use the -nothirdparty option, though the process will be a bit less user friendly."
last_step = "The web page will show a command in the form `/slack register <code>`. Run this command in weechat."
message = textwrap.dedent("""
### Connecting to a Slack team with OAuth ###{}
1) Paste this link into a browser: https://slack.com/oauth/authorize?client_id={}&scope=client&redirect_uri={}
2) Select the team you wish to access from wee-slack in your browser. If you want to add multiple teams, you will have to repeat this whole process for each team.
3) Click "Authorize" in the browser.
If you get a message saying you are not authorized to install wee-slack, the team has restricted Slack app installation and you will have to request it from an admin. To do that, go to https://my.slack.com/apps/A1HSZ9V8E-wee-slack and click "Request to Install".
4) {}
""").strip().format(nothirdparty_note, CLIENT_ID, redirect_uri, last_step)
w.prnt("", "\n" + message)
return w.WEECHAT_RC_OK_EAT
elif code.startswith('xox'):
add_token(code)
return w.WEECHAT_RC_OK_EAT
uri = (
"https://slack.com/api/oauth.access?"
"client_id={}&client_secret={}&redirect_uri={}&code={}"
).format(CLIENT_ID, CLIENT_SECRET, redirect_uri, code)
params = {'useragent': 'wee_slack {}'.format(SCRIPT_VERSION)}
w.hook_process_hashtable('url:', params, config.slack_timeout, "", "")
w.hook_process_hashtable("url:{}".format(uri), params, config.slack_timeout, "register_callback", "")
return w.WEECHAT_RC_OK_EAT
command_register.completion = '-nothirdparty %-'
@utf8_decode
def register_callback(data, command, return_code, out, err):
if return_code != 0:
w.prnt("", "ERROR: problem when trying to get Slack OAuth token. Got return code {}. Err: {}".format(return_code, err))
w.prnt("", "Check the network or proxy settings")
return w.WEECHAT_RC_OK_EAT
if len(out) <= 0:
w.prnt("", "ERROR: problem when trying to get Slack OAuth token. Got 0 length answer. Err: {}".format(err))
w.prnt("", "Check the network or proxy settings")
return w.WEECHAT_RC_OK_EAT
d = json.loads(out)
if not d["ok"]:
w.prnt("",
"ERROR: Couldn't get Slack OAuth token: {}".format(d['error']))
return w.WEECHAT_RC_OK_EAT
add_token(d['access_token'], d['team_name'])
return w.WEECHAT_RC_OK_EAT
def add_token(token, team_name=None):
if config.is_default('slack_api_token'):
w.config_set_plugin('slack_api_token', token)
else:
# Add new token to existing set, joined by comma.
existing_tokens = config.get_string('slack_api_token')
if token in existing_tokens:
print_error('This token is already registered')
return
w.config_set_plugin('slack_api_token', ','.join([existing_tokens, token]))
if team_name:
w.prnt("", "Success! Added team \"{}\"".format(team_name))
else:
w.prnt("", "Success! Added token")
w.prnt("", "Please reload wee-slack with: /python reload slack")
w.prnt("", "If you want to add another team you can repeat this process from step 1 before reloading wee-slack.")
@slack_buffer_or_ignore
@utf8_decode
def msg_command_cb(data, current_buffer, args):
aargs = args.split(None, 2)
who = aargs[1].lstrip('@')
if who == "*":
who = EVENTROUTER.weechat_controller.buffers[current_buffer].name
else:
join_query_command_cb(data, current_buffer, '/query ' + who)
if len(aargs) > 2:
message = aargs[2]
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
cmap = team.get_channel_map()
if who in cmap:
channel = team.channels[cmap[who]]
channel.send_message(message)
return w.WEECHAT_RC_OK_EAT
def print_team_items_info(team, header, items, extra_info_function):
team.buffer_prnt("{}:".format(header))
if items:
max_name_length = max(len(item.name) for item in items)
for item in sorted(items, key=lambda item: item.name.lower()):
extra_info = extra_info_function(item)
team.buffer_prnt(" {:<{}}({})".format(item.name, max_name_length + 2, extra_info))
return w.WEECHAT_RC_OK_EAT
def print_users_info(team, header, users):
def extra_info_function(user):
external_text = ", external" if user.is_external else ""
return user.presence + external_text
return print_team_items_info(team, header, users, extra_info_function)
@slack_buffer_required
@utf8_decode
def command_teams(data, current_buffer, args):
"""
/slack teams
List the connected Slack teams.
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
teams = EVENTROUTER.teams.values()
extra_info_function = lambda team: "token: {}".format(token_for_print(team.token))
return print_team_items_info(team, "Slack teams", teams, extra_info_function)
@slack_buffer_required
@utf8_decode
def command_channels(data, current_buffer, args):
"""
/slack channels
List the channels in the current team.
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
channels = [channel for channel in team.channels.values() if channel.type not in ['im', 'mpim']]
def extra_info_function(channel):
if channel.active:
return "member"
elif getattr(channel, "is_archived", None):
return "archived"
else:
return "not a member"
return print_team_items_info(team, "Channels", channels, extra_info_function)
@slack_buffer_required
@utf8_decode
def command_users(data, current_buffer, args):
"""
/slack users
List the users in the current team.
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
return print_users_info(team, "Users", team.users.values())
@slack_buffer_required
@utf8_decode
def command_usergroups(data, current_buffer, args):
"""
/slack usergroups [handle]
List the usergroups in the current team
If handle is given show the members in the usergroup
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
usergroups = team.generate_usergroup_map()
usergroup_key = usergroups.get(args)
if usergroup_key:
s = SlackRequest(team, "usergroups.users.list", {"usergroup": usergroup_key},
metadata={'usergroup_handle': args})
EVENTROUTER.receive(s)
elif args:
w.prnt('', 'ERROR: Unknown usergroup handle: {}'.format(args))
return w.WEECHAT_RC_ERROR
else:
def extra_info_function(subteam):
is_member = 'member' if subteam.is_member else 'not a member'
return '{}, {}'.format(subteam.handle, is_member)
return print_team_items_info(team, "Usergroups", team.subteams.values(), extra_info_function)
return w.WEECHAT_RC_OK_EAT
command_usergroups.completion = '%(usergroups) %-'
@slack_buffer_required
@utf8_decode
def command_talk(data, current_buffer, args):
"""
/slack talk <user>[,<user2>[,<user3>...]]
Open a chat with the specified user(s).
"""
if not args:
w.prnt('', 'Usage: /slack talk <user>[,<user2>[,<user3>...]]')
return w.WEECHAT_RC_ERROR
return join_query_command_cb(data, current_buffer, '/query ' + args)
command_talk.completion = '%(nicks)'
@slack_buffer_or_ignore
@utf8_decode
def join_query_command_cb(data, current_buffer, args):
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
split_args = args.split(' ', 1)
if len(split_args) < 2 or not split_args[1]:
w.prnt('', 'Too few arguments for command "{}" (help on command: /help {})'
.format(split_args[0], split_args[0].lstrip('/')))
return w.WEECHAT_RC_OK_EAT
query = split_args[1]
# Try finding the channel by name
channel = team.channels.get(team.get_channel_map().get(query))
# If the channel doesn't exist, try finding a DM or MPDM instead
if not channel:
if query.startswith('#'):
w.prnt('', 'ERROR: Unknown channel: {}'.format(query))
return w.WEECHAT_RC_OK_EAT
# Get the IDs of the users
all_users = team.get_username_map()
users = set()
for username in query.split(','):
user = all_users.get(username.lstrip('@'))
if not user:
w.prnt('', 'ERROR: Unknown user: {}'.format(username))
return w.WEECHAT_RC_OK_EAT
users.add(user)
if users:
if len(users) > 1:
channel_type = 'mpim'
# Add the current user since MPDMs include them as a member
users.add(team.myidentifier)
else:
channel_type = 'im'
channel = team.find_channel_by_members(users, channel_type=channel_type)
# If the DM or MPDM doesn't exist, create it
if not channel:
s = SlackRequest(team, team.slack_api_translator[channel_type]['join'], {'users': ','.join(users)})
EVENTROUTER.receive(s)
if channel:
channel.open()
if config.switch_buffer_on_join:
w.buffer_set(channel.channel_buffer, "display", "1")
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_create(data, current_buffer, args):
"""
/slack create [-private] <channel_name>
Create a public or private channel.
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
parts = args.split(None, 1)
if parts[0] == "-private":
args = parts[1]
private = True
else:
private = False
post_data = {"name": args, "is_private": private}
s = SlackRequest(team, "conversations.create", post_data)
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK_EAT
command_create.completion = '-private'
@slack_buffer_required
@utf8_decode
def command_showmuted(data, current_buffer, args):
"""
/slack showmuted
List the muted channels in the current team.
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
muted_channels = [team.channels[key].name
for key in team.muted_channels if key in team.channels]
team.buffer_prnt("Muted channels: {}".format(', '.join(muted_channels)))
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_thread(data, current_buffer, args):
"""
/thread [count/message_id]
Open the thread for the message.
If no message id is specified the last thread in channel will be opened.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
if not isinstance(channel, SlackChannelCommon):
print_error('/thread can not be used in the team buffer, only in a channel')
return w.WEECHAT_RC_ERROR
message = channel.message_from_hash(args)
if not message:
message_filter = lambda message: message.number_of_replies()
message = channel.message_from_hash_or_index(args, message_filter)
if message:
message.open_thread(switch=config.switch_buffer_on_join)
elif args:
print_error("Invalid id given, must be an existing id or a number greater " +
"than 0 and less than the number of thread messages in the channel")
else:
print_error("No threads found in channel")
return w.WEECHAT_RC_OK_EAT
command_thread.completion = '%(threads) %-'
def subscribe_helper(current_buffer, args, usage, api):
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
team = channel.team
if isinstance(channel, SlackThreadChannel) and not args:
message = channel.parent_message
else:
message_filter = lambda message: message.number_of_replies()
message = channel.message_from_hash_or_index(args, message_filter)
if not message:
print_message_not_found_error(args)
return w.WEECHAT_RC_OK_EAT
last_read = next(reversed(message.submessages), message.ts)
post_data = {"channel": channel.identifier, "thread_ts": message.ts, "last_read": last_read}
s = SlackRequest(team, api, post_data, channel=channel)
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_subscribe(data, current_buffer, args):
"""
/slack subscribe <thread>
Subscribe to a thread, so that you are alerted to new messages. When in a
thread buffer, you can omit the thread id.
This command only works when using a session token, see the readme: https://github.com/wee-slack/wee-slack#4-add-your-slack-api-tokens
"""
return subscribe_helper(current_buffer, args, 'Usage: /slack subscribe <thread>', "subscriptions.thread.add")
command_subscribe.completion = '%(threads) %-'
@slack_buffer_required
@utf8_decode
def command_unsubscribe(data, current_buffer, args):
"""
/slack unsubscribe <thread>
Unsubscribe from a thread that has been previously subscribed to, so that
you are not alerted to new messages. When in a thread buffer, you can omit
the thread id.
This command only works when using a session token, see the readme: https://github.com/wee-slack/wee-slack#4-add-your-slack-api-tokens
"""
return subscribe_helper(current_buffer, args, 'Usage: /slack unsubscribe <thread>', "subscriptions.thread.remove")
command_unsubscribe.completion = '%(threads) %-'
@slack_buffer_required
@utf8_decode
def command_reply(data, current_buffer, args):
"""
/reply [-alsochannel] [<count/message_id>] <message>
When in a channel buffer:
/reply [-alsochannel] <count/message_id> <message>
Reply in a thread on the message. Specify either the message id or a count
upwards to the message from the last message.
When in a thread buffer:
/reply [-alsochannel] <message>
Reply to the current thread. This can be used to send the reply to the
rest of the channel.
In either case, -alsochannel also sends the reply to the parent channel.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
parts = args.split(None, 1)
if parts[0] == "-alsochannel":
args = parts[1]
broadcast = True
else:
broadcast = False
if isinstance(channel, SlackThreadChannel):
text = args
message = channel.parent_message
else:
try:
msg_id, text = args.split(None, 1)
except ValueError:
w.prnt('', 'Usage (when in a channel buffer): /reply [-alsochannel] <count/message_id> <message>')
return w.WEECHAT_RC_OK_EAT
message = channel.message_from_hash_or_index(msg_id)
if not message:
print_message_not_found_error(args)
return w.WEECHAT_RC_OK_EAT
if isinstance(message, SlackThreadMessage):
parent_id = str(message.parent_message.ts)
elif message:
parent_id = str(message.ts)
channel.send_message(text, request_dict_ext={'thread_ts': parent_id, 'reply_broadcast': broadcast})
return w.WEECHAT_RC_OK_EAT
command_reply.completion = '%(threads)|-alsochannel %(threads)'
@slack_buffer_required
@utf8_decode
def command_rehistory(data, current_buffer, args):
"""
/rehistory [-remote]
Reload the history in the current channel.
With -remote the history will be downloaded again from Slack.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
if args == "-remote":
channel.get_history(full=True, no_log=True)
else:
channel.reprint_messages(force_render=True)
return w.WEECHAT_RC_OK_EAT
command_rehistory.completion = '-remote'
@slack_buffer_required
@utf8_decode
def command_hide(data, current_buffer, args):
"""
/hide
Hide the current channel if it is marked as distracting.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
name = channel.formatted_name(style='long_default')
if name in config.distracting_channels:
w.buffer_set(channel.channel_buffer, "hidden", "1")
return w.WEECHAT_RC_OK_EAT
@utf8_decode
def slack_command_cb(data, current_buffer, args):
split_args = args.split(' ', 1)
cmd_name = split_args[0]
cmd_args = split_args[1] if len(split_args) > 1 else ''
cmd = EVENTROUTER.cmds.get(cmd_name or 'help')
if not cmd:
w.prnt('', 'Command not found: ' + cmd_name)
return w.WEECHAT_RC_OK
return cmd(data, current_buffer, cmd_args)
@utf8_decode
def command_help(data, current_buffer, args):
"""
/slack help [command]
Print help for /slack commands.
"""
if args:
cmd = EVENTROUTER.cmds.get(args)
if cmd:
cmds = {args: cmd}
else:
w.prnt('', 'Command not found: ' + args)
return w.WEECHAT_RC_OK
else:
cmds = EVENTROUTER.cmds
w.prnt('', '\n{}'.format(colorize_string('bold', 'Slack commands:')))
script_prefix = '{0}[{1}python{0}/{1}slack{0}]{1}'.format(w.color('green'), w.color('reset'))
for _, cmd in sorted(cmds.items()):
name, cmd_args, description = parse_help_docstring(cmd)
w.prnt('', '\n{} {} {}\n\n{}'.format(
script_prefix, colorize_string('white', name), cmd_args, description))
return w.WEECHAT_RC_OK
@slack_buffer_required
@utf8_decode
def command_distracting(data, current_buffer, args):
"""
/slack distracting
Add or remove the current channel from distracting channels. You can hide
or unhide these channels with /slack nodistractions.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
fullname = channel.formatted_name(style="long_default")
if fullname in config.distracting_channels:
config.distracting_channels.remove(fullname)
else:
config.distracting_channels.append(fullname)
w.config_set_plugin('distracting_channels', ','.join(config.distracting_channels))
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_slash(data, current_buffer, args):
"""
/slack slash /customcommand arg1 arg2 arg3
Run a custom slack command.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
team = channel.team
split_args = args.split(' ', 1)
command = split_args[0]
text = split_args[1] if len(split_args) > 1 else ""
text_linkified = linkify_text(text, team, only_users=True)
s = SlackRequest(team, "chat.command",
{"command": command, "text": text_linkified, 'channel': channel.identifier},
channel=channel, metadata={'command': command, 'command_args': text})
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_mute(data, current_buffer, args):
"""
/slack mute
Toggle mute on the current channel.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
team = channel.team
team.muted_channels ^= {channel.identifier}
muted_str = "Muted" if channel.identifier in team.muted_channels else "Unmuted"
team.buffer_prnt("{} channel {}".format(muted_str, channel.name))
s = SlackRequest(team, "users.prefs.set",
{"name": "muted_channels", "value": ",".join(team.muted_channels)}, channel=channel)
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_linkarchive(data, current_buffer, args):
"""
/slack linkarchive [message_id]
Place a link to the channel or message in the input bar.
Use cursor or mouse mode to get the id.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
url = 'https://{}/'.format(channel.team.domain)
if isinstance(channel, SlackChannelCommon):
url += 'archives/{}/'.format(channel.identifier)
if args:
message = channel.message_from_hash_or_index(args)
if message:
url += 'p{}{:0>6}'.format(message.ts.majorstr(), message.ts.minorstr())
if isinstance(message, SlackThreadMessage):
url += "?thread_ts={}&cid={}".format(message.parent_message.ts, channel.identifier)
else:
print_message_not_found_error(args)
return w.WEECHAT_RC_OK_EAT
w.command(current_buffer, "/input insert {}".format(url))
return w.WEECHAT_RC_OK_EAT
command_linkarchive.completion = '%(threads) %-'
@utf8_decode
def command_nodistractions(data, current_buffer, args):
"""
/slack nodistractions
Hide or unhide all channels marked as distracting.
"""
global hide_distractions
hide_distractions = not hide_distractions
channels = [channel for channel in EVENTROUTER.weechat_controller.buffers.values()
if channel in config.distracting_channels]
for channel in channels:
w.buffer_set(channel.channel_buffer, "hidden", str(int(hide_distractions)))
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_upload(data, current_buffer, args):
"""
/slack upload <filename>
Uploads a file to the current buffer.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
weechat_dir = w.info_get("weechat_dir", "")
file_path = os.path.join(weechat_dir, os.path.expanduser(args))
if channel.type == 'team':
w.prnt('', "ERROR: Can't upload a file to the team buffer")
return w.WEECHAT_RC_ERROR
if not os.path.isfile(file_path):
unescaped_file_path = file_path.replace(r'\ ', ' ')
if os.path.isfile(unescaped_file_path):
file_path = unescaped_file_path
else:
w.prnt('', 'ERROR: Could not find file: {}'.format(file_path))
return w.WEECHAT_RC_ERROR
post_data = {
'channels': channel.identifier,
}
if isinstance(channel, SlackThreadChannel):
post_data['thread_ts'] = channel.thread_ts
url = SlackRequest(channel.team, 'files.upload', post_data, channel=channel).request_string()
options = [
'-s',
'-Ffile=@{}'.format(file_path),
url
]
proxy_string = ProxyWrapper().curl()
if proxy_string:
options.append(proxy_string)
options_hashtable = {'arg{}'.format(i + 1): arg for i, arg in enumerate(options)}
w.hook_process_hashtable('curl', options_hashtable, config.slack_timeout, 'upload_callback', '')
return w.WEECHAT_RC_OK_EAT
command_upload.completion = '%(filename) %-'
@utf8_decode
def upload_callback(data, command, return_code, out, err):
if return_code != 0:
w.prnt("", "ERROR: Couldn't upload file. Got return code {}. Error: {}".format(return_code, err))
return w.WEECHAT_RC_OK_EAT
try:
response = json.loads(out)
except JSONDecodeError:
w.prnt("", "ERROR: Couldn't process response from file upload. Got: {}".format(out))
return w.WEECHAT_RC_OK_EAT
if not response["ok"]:
w.prnt("", "ERROR: Couldn't upload file. Error: {}".format(response["error"]))
return w.WEECHAT_RC_OK_EAT
@utf8_decode
def away_command_cb(data, current_buffer, args):
all_servers, message = re.match('^/away( -all)? ?(.*)', args).groups()
if all_servers:
team_buffers = [team.channel_buffer for team in EVENTROUTER.teams.values()]
elif current_buffer in EVENTROUTER.weechat_controller.buffers:
team_buffers = [current_buffer]
else:
return w.WEECHAT_RC_OK
for team_buffer in team_buffers:
if message:
command_away(data, team_buffer, args)
else:
command_back(data, team_buffer, args)
return w.WEECHAT_RC_OK
@slack_buffer_required
@utf8_decode
def command_away(data, current_buffer, args):
"""
/slack away
Sets your status as 'away'.
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
s = SlackRequest(team, "users.setPresence", {"presence": "away"})
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK
@slack_buffer_required
@utf8_decode
def command_status(data, current_buffer, args):
"""
/slack status [<emoji> [<status_message>]|-delete]
Lets you set your Slack Status (not to be confused with away/here).
Prints current status if no arguments are given, unsets the status if -delete is given.
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
split_args = args.split(" ", 1)
if not split_args[0]:
profile = team.users[team.myidentifier].profile
team.buffer_prnt("Status: {} {}".format(
replace_string_with_emoji(profile.get("status_emoji", "")),
profile.get("status_text", "")))
return w.WEECHAT_RC_OK
emoji = "" if split_args[0] == "-delete" else split_args[0]
text = split_args[1] if len(split_args) > 1 else ""
new_profile = {"status_text": text, "status_emoji": emoji}
s = SlackRequest(team, "users.profile.set", {"profile": new_profile})
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK
command_status.completion = "-delete|%(emoji) %-"
@utf8_decode
def line_event_cb(data, signal, hashtable):
tags = hashtable["_chat_line_tags"].split(',')
for tag in tags:
if tag.startswith('slack_ts_'):
ts = SlackTS(tag[9:])
break
else:
return w.WEECHAT_RC_OK
buffer_pointer = hashtable["_buffer"]
channel = EVENTROUTER.weechat_controller.buffers.get(buffer_pointer)
if isinstance(channel, SlackChannelCommon):
message_hash = channel.hashed_messages[ts]
if message_hash is None:
return w.WEECHAT_RC_OK
message_hash = "$" + message_hash
if data == "auto":
reaction = EMOJI_CHAR_OR_NAME_REGEX.match(hashtable["_chat_eol"])
if reaction:
emoji = reaction.group("emoji_char") or reaction.group("emoji_name")
channel.send_change_reaction("toggle", message_hash, emoji)
else:
data = "message"
if data == "message":
w.command(buffer_pointer, "/cursor stop")
w.command(buffer_pointer, "/input insert {}".format(message_hash))
elif data == "delete":
w.command(buffer_pointer, "/input send {}s///".format(message_hash))
elif data == "linkarchive":
w.command(buffer_pointer, "/cursor stop")
w.command(buffer_pointer, "/slack linkarchive {}".format(message_hash))
elif data == "reply":
w.command(buffer_pointer, "/cursor stop")
w.command(buffer_pointer, "/input insert /reply {}\\x20".format(message_hash))
elif data == "thread":
w.command(buffer_pointer, "/cursor stop")
w.command(buffer_pointer, "/thread {}".format(message_hash))
return w.WEECHAT_RC_OK
@slack_buffer_required
@utf8_decode
def command_back(data, current_buffer, args):
"""
/slack back
Sets your status as 'back'.
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
s = SlackRequest(team, "users.setPresence", {"presence": "auto"})
EVENTROUTER.receive(s)
set_own_presence_active(team)
return w.WEECHAT_RC_OK
@slack_buffer_required
@utf8_decode
def command_label(data, current_buffer, args):
"""
/label [-full] <name>|-unset
Rename a channel or thread buffer. Note that this is not permanent, it will
only last as long as you keep the buffer and wee-slack open. Changes the
short_name by default, and the name and full_name if you use the -full
option. If you haven't set the short_name explicitly, that will also be
changed when using the -full option. Use the -unset option to set it back
to the default.
"""
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
split_args = args.split(None, 1)
if split_args[0] == "-full":
channel.label_full_drop_prefix = False
channel.label_full = split_args[1] if split_args[1] != "-unset" else None
else:
channel.label_short_drop_prefix = False
channel.label_short = args if args != "-unset" else None
channel.rename()
return w.WEECHAT_RC_OK
command_label.completion = "-unset|-full -unset %-"
@utf8_decode
def set_unread_cb(data, current_buffer, command):
for channel in EVENTROUTER.weechat_controller.buffers.values():
channel.mark_read()
return w.WEECHAT_RC_OK
@slack_buffer_or_ignore
@utf8_decode
def set_unread_current_buffer_cb(data, current_buffer, command):
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
channel.mark_read()
return w.WEECHAT_RC_OK
###### NEW EXCEPTIONS
class InvalidType(Exception):
"""
Raised when we do type checking to ensure objects of the wrong
type are not used improperly.
"""
def __init__(self, type_str):
super(InvalidType, self).__init__(type_str)
###### New but probably old and need to migrate
def closed_slack_debug_buffer_cb(data, buffer):
global slack_debug
slack_debug = None
return w.WEECHAT_RC_OK
def create_slack_debug_buffer():
global slack_debug, debug_string
if slack_debug is None:
debug_string = None
slack_debug = w.buffer_new("slack-debug", "", "", "closed_slack_debug_buffer_cb", "")
w.buffer_set(slack_debug, "print_hooks_enabled", "0")
w.buffer_set(slack_debug, "notify", "0")
w.buffer_set(slack_debug, "highlight_tags_restrict", "highlight_force")
def load_emoji():
try:
weechat_dir = w.info_get('weechat_dir', '')
weechat_sharedir = w.info_get('weechat_sharedir', '')
local_weemoji, global_weemoji = ('{}/weemoji.json'.format(path)
for path in (weechat_dir, weechat_sharedir))
path = (global_weemoji if os.path.exists(global_weemoji) and
not os.path.exists(local_weemoji) else local_weemoji)
with open(path, 'r') as ef:
emojis = json.loads(ef.read())
if 'emoji' in emojis:
print_error('The weemoji.json file is in an old format. Please update it.')
else:
emoji_unicode = {key: value['unicode'] for key, value in emojis.items()}
emoji_skin_tones = {skin_tone['name']: skin_tone['unicode']
for emoji in emojis.values()
for skin_tone in emoji.get('skinVariations', {}).values()}
emoji_with_skin_tones = chain(emoji_unicode.items(), emoji_skin_tones.items())
emoji_with_skin_tones_reverse = {v: k for k, v in emoji_with_skin_tones}
return emoji_unicode, emoji_with_skin_tones_reverse
except:
dbg("Couldn't load emoji list: {}".format(format_exc_only()), 5)
return {}, {}
def parse_help_docstring(cmd):
doc = textwrap.dedent(cmd.__doc__).strip().split('\n', 1)
cmd_line = doc[0].split(None, 1)
args = ''.join(cmd_line[1:])
return cmd_line[0], args, doc[1].strip()
def setup_hooks():
w.bar_item_new('slack_typing_notice', '(extra)typing_bar_item_cb', '')
w.bar_item_new('away', '(extra)away_bar_item_cb', '')
w.bar_item_new('slack_away', '(extra)away_bar_item_cb', '')
w.hook_timer(5000, 0, 0, "ws_ping_cb", "")
w.hook_timer(1000, 0, 0, "typing_update_cb", "")
w.hook_timer(1000, 0, 0, "buffer_list_update_callback", "")
w.hook_timer(3000, 0, 0, "reconnect_callback", "EVENTROUTER")
w.hook_timer(1000 * 60 * 5, 0, 0, "slack_never_away_cb", "")
w.hook_signal('buffer_closing', "buffer_closing_callback", "")
w.hook_signal('buffer_renamed', "buffer_renamed_cb", "")
w.hook_signal('buffer_switch', "buffer_switch_callback", "")
w.hook_signal('window_switch', "buffer_switch_callback", "")
w.hook_signal('quit', "quit_notification_callback", "")
if config.send_typing_notice:
w.hook_signal('input_text_changed', "typing_notification_cb", "")
command_help.completion = '|'.join(EVENTROUTER.cmds.keys())
completions = '||'.join(
'{} {}'.format(name, getattr(cmd, 'completion', ''))
for name, cmd in EVENTROUTER.cmds.items())
w.hook_command(
# Command name and description
'slack', 'Plugin to allow typing notification and sync of read markers for slack.com',
# Usage
'<command> [<command options>]',
# Description of arguments
'Commands:\n' +
'\n'.join(sorted(EVENTROUTER.cmds.keys())) +
'\nUse /slack help <command> to find out more\n',
# Completions
completions,
# Function name
'slack_command_cb', '')
w.hook_command_run('/me', 'me_command_cb', '')
w.hook_command_run('/query', 'join_query_command_cb', '')
w.hook_command_run('/join', 'join_query_command_cb', '')
w.hook_command_run('/part', 'part_command_cb', '')
w.hook_command_run('/topic', 'topic_command_cb', '')
w.hook_command_run('/msg', 'msg_command_cb', '')
w.hook_command_run('/invite', 'invite_command_cb', '')
w.hook_command_run("/input complete_next", "complete_next_cb", "")
w.hook_command_run("/input set_unread", "set_unread_cb", "")
w.hook_command_run("/input set_unread_current_buffer", "set_unread_current_buffer_cb", "")
w.hook_command_run('/away', 'away_command_cb', '')
w.hook_command_run('/whois', 'whois_command_cb', '')
for cmd_name in ['hide', 'label', 'rehistory', 'reply', 'thread']:
cmd = EVENTROUTER.cmds[cmd_name]
_, args, description = parse_help_docstring(cmd)
completion = getattr(cmd, 'completion', '')
w.hook_command(cmd_name, description, args, '', completion, 'command_' + cmd_name, '')
w.hook_completion("irc_channel_topic", "complete topic for slack", "topic_completion_cb", "")
w.hook_completion("irc_channels", "complete channels for slack", "channel_completion_cb", "")
w.hook_completion("irc_privates", "complete dms/mpdms for slack", "dm_completion_cb", "")
w.hook_completion("nicks", "complete @-nicks for slack", "nick_completion_cb", "")
w.hook_completion("threads", "complete thread ids for slack", "thread_completion_cb", "")
w.hook_completion("usergroups", "complete @-usergroups for slack", "usergroups_completion_cb", "")
w.hook_completion("emoji", "complete :emoji: for slack", "emoji_completion_cb", "")
w.key_bind("mouse", {
"@chat(python.*):button2": "hsignal:slack_mouse",
})
w.key_bind("cursor", {
"@chat(python.*):D": "hsignal:slack_cursor_delete",
"@chat(python.*):L": "hsignal:slack_cursor_linkarchive",
"@chat(python.*):M": "hsignal:slack_cursor_message",
"@chat(python.*):R": "hsignal:slack_cursor_reply",
"@chat(python.*):T": "hsignal:slack_cursor_thread",
})
w.hook_hsignal("slack_mouse", "line_event_cb", "auto")
w.hook_hsignal("slack_cursor_delete", "line_event_cb", "delete")
w.hook_hsignal("slack_cursor_linkarchive", "line_event_cb", "linkarchive")
w.hook_hsignal("slack_cursor_message", "line_event_cb", "message")
w.hook_hsignal("slack_cursor_reply", "line_event_cb", "reply")
w.hook_hsignal("slack_cursor_thread", "line_event_cb", "thread")
# Hooks to fix/implement
# w.hook_signal('buffer_opened', "buffer_opened_cb", "")
# w.hook_signal('window_scrolled', "scrolled_cb", "")
# w.hook_timer(3000, 0, 0, "slack_connection_persistence_cb", "")
##### END NEW
def dbg(message, level=0, main_buffer=False, fout=False):
"""
send debug output to the slack-debug buffer and optionally write to a file.
"""
# TODO: do this smarter
if level >= config.debug_level:
global debug_string
message = "DEBUG: {}".format(message)
if fout:
with open('/tmp/debug.log', 'a+') as log_file:
log_file.writelines(message + '\n')
if main_buffer:
w.prnt("", "slack: " + message)
else:
if slack_debug and (not debug_string or debug_string in message):
w.prnt(slack_debug, message)
###### Config code
class PluginConfig(object):
Setting = collections.namedtuple('Setting', ['default', 'desc'])
# Default settings.
# These are, initially, each a (default, desc) tuple; the former is the
# default value of the setting, in the (string) format that weechat
# expects, and the latter is the user-friendly description of the setting.
# At __init__ time these values are extracted, the description is used to
# set or update the setting description for use with /help, and the default
# value is used to set the default for any settings not already defined.
# Following this procedure, the keys remain the same, but the values are
# the real (python) values of the settings.
default_settings = {
'auto_open_threads': Setting(
default='false',
desc='Automatically open threads when mentioned or in'
'response to own messages.'),
'background_load_all_history': Setting(
default='true',
desc='Load the history for all channels in the background when the script is loaded,'
' rather than waiting until the buffer is switched to. You can set this to false if'
' you experience performance issues, however that causes some loss of functionality,'
' see known issues in the readme.'),
'channel_name_typing_indicator': Setting(
default='true',
desc='Change the prefix of a channel from # to > when someone is'
' typing in it. Note that this will (temporarily) affect the sort'
' order if you sort buffers by name rather than by number.'),
'color_buflist_muted_channels': Setting(
default='darkgray',
desc='Color to use for muted channels in the buflist'),
'color_deleted': Setting(
default='red',
desc='Color to use for deleted messages and files.'),
'color_edited_suffix': Setting(
default='095',
desc='Color to use for (edited) suffix on messages that have been edited.'),
'color_reaction_suffix': Setting(
default='darkgray',
desc='Color to use for the [:wave:(@user)] suffix on messages that'
' have reactions attached to them.'),
'color_reaction_suffix_added_by_you': Setting(
default='blue',
desc='Color to use for reactions that you have added.'),
'color_thread_suffix': Setting(
default='lightcyan',
desc='Color to use for the [thread: XXX] suffix on messages that'
' have threads attached to them. The special value "multiple" can'
' be used to use a different color for each thread.'),
'color_typing_notice': Setting(
default='yellow',
desc='Color to use for the typing notice.'),
'colorize_attachments': Setting(
default='prefix',
desc='Whether to colorize attachment lines. Values: "prefix": Only colorize'
' the prefix, "all": Colorize the whole line, "none": Don\'t colorize.'),
'colorize_private_chats': Setting(
default='false',
desc='Whether to use nick-colors in DM windows.'),
'debug_mode': Setting(
default='false',
desc='Open a dedicated buffer for debug messages and start logging'
' to it. How verbose the logging is depends on log_level.'),
'debug_level': Setting(
default='3',
desc='Show only this level of debug info (or higher) when'
' debug_mode is on. Lower levels -> more messages.'),
'distracting_channels': Setting(
default='',
desc='List of channels to hide.'),
'external_user_suffix': Setting(
default='*',
desc='The suffix appended to nicks to indicate external users.'),
'files_download_location': Setting(
default='',
desc='If set, file attachments will be automatically downloaded'
' to this location. "%h" will be replaced by WeeChat home,'
' "~/.weechat" by default. Requires WeeChat 2.2 or newer.'),
'group_name_prefix': Setting(
default='&',
desc='The prefix of buffer names for groups (private channels).'),
'history_fetch_count': Setting(
default='200',
desc='The number of messages to fetch for each channel when fetching'
' history, between 1 and 1000.'),
'link_previews': Setting(
default='true',
desc='Show previews of website content linked by teammates.'),
'map_underline_to': Setting(
default='_',
desc='When sending underlined text to slack, use this formatting'
' character for it. The default ("_") sends it as italics. Use'
' "*" to send bold instead.'),
'muted_channels_activity': Setting(
default='personal_highlights',
desc="Control which activity you see from muted channels, either"
" none, personal_highlights, all_highlights or all. none: Don't"
" show any activity. personal_highlights: Only show personal"
" highlights, i.e. not @channel and @here. all_highlights: Show"
" all highlights, but not other messages. all: Show all activity,"
" like other channels."),
'notify_subscribed_threads': Setting(
default='auto',
desc="Control if you want to see a notification in the team buffer when a"
" thread you're subscribed to receives a new message, either auto, true or"
" false. auto means that you only get a notification if auto_open_threads"
" and thread_messages_in_channel both are false. Defaults to auto."),
'notify_usergroup_handle_updated': Setting(
default='false',
desc="Control if you want to see a notification in the team buffer when a"
"usergroup's handle has changed, either true or false."),
'never_away': Setting(
default='false',
desc='Poke Slack every five minutes so that it never marks you "away".'),
'record_events': Setting(
default='false',
desc='Log all traffic from Slack to disk as JSON.'),
'render_bold_as': Setting(
default='bold',
desc='When receiving bold text from Slack, render it as this in weechat.'),
'render_emoji_as_string': Setting(
default='false',
desc="Render emojis as :emoji_name: instead of emoji characters. Enable this"
" if your terminal doesn't support emojis, or set to 'both' if you want to"
" see both renderings. Note that even though this is"
" disabled by default, you need to place {}/blob/master/weemoji.json in your"
" weechat directory to enable rendering emojis as emoji characters."
.format(REPO_URL)),
'render_italic_as': Setting(
default='italic',
desc='When receiving bold text from Slack, render it as this in weechat.'
' If your terminal lacks italic support, consider using "underline" instead.'),
'send_typing_notice': Setting(
default='true',
desc='Alert Slack users when you are typing a message in the input bar '
'(Requires reload)'),
'server_aliases': Setting(
default='',
desc='A comma separated list of `subdomain:alias` pairs. The alias'
' will be used instead of the actual name of the slack (in buffer'
' names, logging, etc). E.g `work:no_fun_allowed` would make your'
' work slack show up as `no_fun_allowed` rather than `work.slack.com`.'),
'shared_name_prefix': Setting(
default='%',
desc='The prefix of buffer names for shared channels.'),
'short_buffer_names': Setting(
default='false',
desc='Use `foo.#channel` rather than `foo.slack.com.#channel` as the'
' internal name for Slack buffers.'),
'show_buflist_presence': Setting(
default='true',
desc='Display a `+` character in the buffer list for present users.'),
'show_reaction_nicks': Setting(
default='false',
desc='Display the name of the reacting user(s) alongside each reactji.'),
'slack_api_token': Setting(
default='INSERT VALID KEY HERE!',
desc='List of Slack API tokens, one per Slack instance you want to'
' connect to. See the README for details on how to get these.'),
'slack_timeout': Setting(
default='20000',
desc='How long (ms) to wait when communicating with Slack.'),
'switch_buffer_on_join': Setting(
default='true',
desc='When /joining a channel, automatically switch to it as well.'),
'thread_messages_in_channel': Setting(
default='false',
desc='When enabled shows thread messages in the parent channel.'),
'unfurl_ignore_alt_text': Setting(
default='false',
desc='When displaying ("unfurling") links to channels/users/etc,'
' ignore the "alt text" present in the message and instead use the'
' canonical name of the thing being linked to.'),
'unfurl_auto_link_display': Setting(
default='both',
desc='When displaying ("unfurling") links to channels/users/etc,'
' determine what is displayed when the text matches the url'
' without the protocol. This happens when Slack automatically'
' creates links, e.g. from words separated by dots or email'
' addresses. Set it to "text" to only display the text written by'
' the user, "url" to only display the url or "both" (the default)'
' to display both.'),
'unhide_buffers_with_activity': Setting(
default='false',
desc='When activity occurs on a buffer, unhide it even if it was'
' previously hidden (whether by the user or by the'
' distracting_channels setting).'),
'use_full_names': Setting(
default='false',
desc='Use full names as the nicks for all users. When this is'
' false (the default), display names will be used if set, with a'
' fallback to the full name if display name is not set.'),
}
# Set missing settings to their defaults. Load non-missing settings from
# weechat configs.
def __init__(self):
self.settings = {}
# Set all descriptions, replace the values in the dict with the
# default setting value rather than the (setting,desc) tuple.
for key, (default, desc) in self.default_settings.items():
w.config_set_desc_plugin(key, desc)
self.settings[key] = default
# Migrate settings from old versions of Weeslack...
self.migrate()
# ...and then set anything left over from the defaults.
for key, default in self.settings.items():
if not w.config_get_plugin(key):
w.config_set_plugin(key, default)
self.config_changed(None, None, None)
def __str__(self):
return "".join([x + "\t" + str(self.settings[x]) + "\n" for x in self.settings.keys()])
def config_changed(self, data, full_key, value):
if full_key is None:
for key in self.settings:
self.settings[key] = self.fetch_setting(key)
else:
key = full_key.replace(CONFIG_PREFIX + ".", "")
self.settings[key] = self.fetch_setting(key)
if (full_key is None or full_key == CONFIG_PREFIX + ".debug_mode") and self.debug_mode:
create_slack_debug_buffer()
return w.WEECHAT_RC_OK
def fetch_setting(self, key):
try:
return getattr(self, 'get_' + key)(key)
except AttributeError:
# Most settings are on/off, so make get_boolean the default
return self.get_boolean(key)
except:
# There was setting-specific getter, but it failed.
print(format_exc_tb())
return self.settings[key]
def __getattr__(self, key):
try:
return self.settings[key]
except KeyError:
raise AttributeError(key)
def get_boolean(self, key):
return w.config_string_to_boolean(w.config_get_plugin(key))
def get_string(self, key):
return w.config_get_plugin(key)
def get_int(self, key):
return int(w.config_get_plugin(key))
def is_default(self, key):
default = self.default_settings.get(key).default
return w.config_get_plugin(key) == default
get_color_buflist_muted_channels = get_string
get_color_deleted = get_string
get_color_edited_suffix = get_string
get_color_reaction_suffix = get_string
get_color_reaction_suffix_added_by_you = get_string
get_color_thread_suffix = get_string
get_color_typing_notice = get_string
get_colorize_attachments = get_string
get_debug_level = get_int
get_external_user_suffix = get_string
get_files_download_location = get_string
get_group_name_prefix = get_string
get_history_fetch_count = get_int
get_map_underline_to = get_string
get_muted_channels_activity = get_string
get_render_bold_as = get_string
get_render_italic_as = get_string
get_shared_name_prefix = get_string
get_slack_timeout = get_int
get_unfurl_auto_link_display = get_string
def get_distracting_channels(self, key):
return [x.strip() for x in w.config_get_plugin(key).split(',') if x]
def get_server_aliases(self, key):
alias_list = w.config_get_plugin(key)
return dict(item.split(":") for item in alias_list.split(",") if ':' in item)
def get_slack_api_token(self, key):
token = w.config_get_plugin("slack_api_token")
if token.startswith('${sec.data'):
return w.string_eval_expression(token, {}, {}, {})
else:
return token
def get_string_or_boolean(self, key, *valid_strings):
value = w.config_get_plugin(key)
if value in valid_strings:
return value
return w.config_string_to_boolean(value)
def get_notify_subscribed_threads(self, key):
return self.get_string_or_boolean(key, 'auto')
def get_render_emoji_as_string(self, key):
return self.get_string_or_boolean(key, 'both')
def migrate(self):
"""
This is to migrate the extension name from slack_extension to slack
"""
if not w.config_get_plugin("migrated"):
for k in self.settings.keys():
if not w.config_is_set_plugin(k):
p = w.config_get("{}_extension.{}".format(CONFIG_PREFIX, k))
data = w.config_string(p)
if data != "":
w.config_set_plugin(k, data)
w.config_set_plugin("migrated", "true")
old_thread_color_config = w.config_get_plugin("thread_suffix_color")
new_thread_color_config = w.config_get_plugin("color_thread_suffix")
if old_thread_color_config and not new_thread_color_config:
w.config_set_plugin("color_thread_suffix", old_thread_color_config)
def config_server_buffer_cb(data, key, value):
for team in EVENTROUTER.teams.values():
team.buffer_merge(value)
return w.WEECHAT_RC_OK
# to Trace execution, add `setup_trace()` to startup
# and to a function and sys.settrace(trace_calls) to a function
def setup_trace():
global f
now = time.time()
f = open('{}/{}-trace.json'.format(RECORD_DIR, now), 'w')
def trace_calls(frame, event, arg):
global f
if event != 'call':
return
co = frame.f_code
func_name = co.co_name
if func_name == 'write':
# Ignore write() calls from print statements
return
func_line_no = frame.f_lineno
func_filename = co.co_filename
caller = frame.f_back
caller_line_no = caller.f_lineno
caller_filename = caller.f_code.co_filename
print('Call to %s on line %s of %s from line %s of %s' % \
(func_name, func_line_no, func_filename,
caller_line_no, caller_filename), file=f)
f.flush()
return
def initiate_connection(token, retries=3, team=None, reconnect=False):
return SlackRequest(team,
'rtm.{}'.format('connect' if team else 'start'),
{"batch_presence_aware": 1},
retries=retries,
token=token,
metadata={'reconnect': reconnect})
if __name__ == "__main__":
w = WeechatWrapper(weechat)
if w.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION, SCRIPT_LICENSE,
SCRIPT_DESC, "script_unloaded", ""):
weechat_version = int(w.info_get("version_number", "") or 0)
weechat_upgrading = w.info_get("weechat_upgrading", "")
if weechat_version < 0x1030000:
w.prnt("", "\nERROR: Weechat version 1.3+ is required to use {}.\n\n".format(SCRIPT_NAME))
elif weechat_upgrading == "1":
w.prnt("", "NOTE: wee-slack will not work after running /upgrade until it's"
" reloaded. Please run `/python reload slack` to continue using it. You"
" will not receive any new messages in wee-slack buffers until doing this.")
else:
global EVENTROUTER
EVENTROUTER = EventRouter()
receive_httprequest_callback = EVENTROUTER.receive_httprequest_callback
receive_ws_callback = EVENTROUTER.receive_ws_callback
# Global var section
slack_debug = None
config = PluginConfig()
config_changed_cb = config.config_changed
typing_timer = time.time()
hide_distractions = False
w.hook_config(CONFIG_PREFIX + ".*", "config_changed_cb", "")
w.hook_config("irc.look.server_buffer", "config_server_buffer_cb", "")
if weechat_version < 0x2090000:
w.hook_modifier("input_text_for_buffer", "input_text_for_buffer_cb", "")
EMOJI, EMOJI_WITH_SKIN_TONES_REVERSE = load_emoji()
setup_hooks()
if config.record_events:
EVENTROUTER.record()
hdata = Hdata(w)
auto_connect = weechat.info_get("auto_connect", "") != "0"
if auto_connect:
tokens = [token.strip() for token in config.slack_api_token.split(',')]
w.prnt('', 'Connecting to {} slack team{}.'
.format(len(tokens), '' if len(tokens) == 1 else 's'))
for t in tokens:
s = initiate_connection(t)
EVENTROUTER.receive(s)
EVENTROUTER.handle_next()
| mit | -9,054,104,212,139,927,000 | 38.275929 | 277 | 0.608653 | false |
brian-yang/mozillians | vendor-local/lib/python/kombu/tests/test_compression.py | 13 | 1550 | from __future__ import absolute_import
import sys
from nose import SkipTest
from kombu import compression
from .utils import TestCase
from .utils import mask_modules
class test_compression(TestCase):
def setUp(self):
try:
import bz2 # noqa
except ImportError:
self.has_bzip2 = False
else:
self.has_bzip2 = True
@mask_modules("bz2")
def test_no_bz2(self):
c = sys.modules.pop("kombu.compression")
try:
import kombu.compression
self.assertFalse(hasattr(kombu.compression, "bz2"))
finally:
if c is not None:
sys.modules["kombu.compression"] = c
def test_encoders(self):
encoders = compression.encoders()
self.assertIn("application/x-gzip", encoders)
if self.has_bzip2:
self.assertIn("application/x-bz2", encoders)
def test_compress__decompress__zlib(self):
text = "The Quick Brown Fox Jumps Over The Lazy Dog"
c, ctype = compression.compress(text, "zlib")
self.assertNotEqual(text, c)
d = compression.decompress(c, ctype)
self.assertEqual(d, text)
def test_compress__decompress__bzip2(self):
if not self.has_bzip2:
raise SkipTest("bzip2 not available")
text = "The Brown Quick Fox Over The Lazy Dog Jumps"
c, ctype = compression.compress(text, "bzip2")
self.assertNotEqual(text, c)
d = compression.decompress(c, ctype)
self.assertEqual(d, text)
| bsd-3-clause | -5,377,916,114,516,399,000 | 28.245283 | 63 | 0.612903 | false |
odoobgorg/odoo | addons/payment_authorize/tests/test_authorize.py | 27 | 7898 | # -*- coding: utf-8 -*-
import hashlib
import hmac
import time
import urlparse
from lxml import objectify
import openerp
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment.tests.common import PaymentAcquirerCommon
from openerp.addons.payment_authorize.controllers.main import AuthorizeController
from openerp.tools import mute_logger
@openerp.tests.common.at_install(True)
@openerp.tests.common.post_install(True)
class AuthorizeCommon(PaymentAcquirerCommon):
def setUp(self):
super(AuthorizeCommon, self).setUp()
self.base_url = self.env['ir.config_parameter'].get_param('web.base.url')
# authorize only support USD in test environment
self.currency_usd = self.env['res.currency'].search([('name', '=', 'USD')], limit=1)[0]
# get the authorize account
model, self.authorize_id = self.env['ir.model.data'].get_object_reference('payment_authorize', 'payment_acquirer_authorize')
@openerp.tests.common.at_install(True)
@openerp.tests.common.post_install(True)
class AuthorizeForm(AuthorizeCommon):
def _authorize_generate_hashing(self, values):
data = '^'.join([
values['x_login'],
values['x_fp_sequence'],
values['x_fp_timestamp'],
values['x_amount'],
]) + '^'
return hmac.new(str(values['x_trans_key']), data, hashlib.md5).hexdigest()
def test_10_Authorize_form_render(self):
authorize = self.env['payment.acquirer'].browse(self.authorize_id)
self.assertEqual(authorize.environment, 'test', 'test without test environment')
# ----------------------------------------
# Test: button direct rendering
# ----------------------------------------
form_values = {
'x_login': authorize.authorize_login,
'x_trans_key': authorize.authorize_transaction_key,
'x_amount': '320.0',
'x_show_form': 'PAYMENT_FORM',
'x_type': 'AUTH_CAPTURE',
'x_method': 'CC',
'x_fp_sequence': '%s%s' % (authorize.id, int(time.time())),
'x_version': '3.1',
'x_relay_response': 'TRUE',
'x_fp_timestamp': str(int(time.time())),
'x_relay_url': '%s' % urlparse.urljoin(self.base_url, AuthorizeController._return_url),
'x_cancel_url': '%s' % urlparse.urljoin(self.base_url, AuthorizeController._cancel_url),
'return_url': None,
'x_currency_code': 'USD',
'x_invoice_num': 'SO004',
'x_first_name': 'Norbert',
'x_last_name': 'Buyer',
'x_address': 'Huge Street 2/543',
'x_city': 'Sin City',
'x_zip': '1000',
'x_country': 'Belgium',
'x_phone': '0032 12 34 56 78',
'x_email': '[email protected]',
'x_state': None,
'x_ship_to_first_name': 'Norbert',
'x_ship_to_last_name': 'Buyer',
'x_ship_to_address': 'Huge Street 2/543',
'x_ship_to_city': 'Sin City',
'x_ship_to_zip': '1000',
'x_ship_to_country': 'Belgium',
'x_ship_to_phone': '0032 12 34 56 78',
'x_ship_to_email': '[email protected]',
'x_ship_to_state': None,
}
form_values['x_fp_hash'] = self._authorize_generate_hashing(form_values)
# render the button
cr, uid, context = self.env.cr, self.env.uid, {}
res = self.payment_acquirer.render(
cr, uid, self.authorize_id, 'SO004', 320.0, self.currency_usd.id,
values=self.buyer_values, context=context)
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://test.authorize.net/gateway/transact.dll', 'Authorize: wrong form POST url')
for el in tree.iterfind('input'):
values = el.values()
if values[1] in ['submit', 'x_fp_hash', 'return_url', 'x_state', 'x_ship_to_state']:
continue
self.assertEqual(
unicode(values[2], "utf-8"),
form_values[values[1]],
'Authorize: wrong value for input %s: received %s instead of %s' % (values[1], values[2], form_values[values[1]])
)
@mute_logger('openerp.addons.payment_authorize.models.authorize', 'ValidationError')
def test_20_authorize_form_management(self):
cr, uid, context = self.env.cr, self.env.uid, {}
# be sure not to do stupid thing
authorize = self.env['payment.acquirer'].browse(self.authorize_id)
self.assertEqual(authorize.environment, 'test', 'test without test environment')
# typical data posted by authorize after client has successfully paid
authorize_post_data = {
'return_url': u'/shop/payment/validate',
'x_MD5_Hash': u'7934485E1C105940BE854208D10FAB4F',
'x_account_number': u'XXXX0027',
'x_address': u'Huge Street 2/543',
'x_amount': u'320.00',
'x_auth_code': u'E4W7IU',
'x_avs_code': u'Y',
'x_card_type': u'Visa',
'x_cavv_response': u'2',
'x_city': u'Sun City',
'x_company': u'',
'x_country': u'Belgium',
'x_cust_id': u'',
'x_cvv2_resp_code': u'',
'x_description': u'',
'x_duty': u'0.00',
'x_email': u'norbert.buyer@exampl',
'x_fax': u'',
'x_first_name': u'Norbert',
'x_freight': u'0.00',
'x_invoice_num': u'SO004',
'x_last_name': u'Buyer',
'x_method': u'CC',
'x_phone': u'0032 12 34 56 78',
'x_po_num': u'',
'x_response_code': u'1',
'x_response_reason_code': u'1',
'x_response_reason_text': u'This transaction has been approved.',
'x_ship_to_address': u'Huge Street 2/543',
'x_ship_to_city': u'Sun City',
'x_ship_to_company': u'',
'x_ship_to_country': u'Belgium',
'x_ship_to_first_name': u'Norbert',
'x_ship_to_last_name': u'Buyer',
'x_ship_to_state': u'',
'x_ship_to_zip': u'1000',
'x_state': u'',
'x_tax': u'0.00',
'x_tax_exempt': u'FALSE',
'x_test_request': u'false',
'x_trans_id': u'2217460311',
'x_type': u'auth_capture',
'x_zip': u'1000'
}
# should raise error about unknown tx
with self.assertRaises(ValidationError):
self.payment_transaction.form_feedback(cr, uid, authorize_post_data, 'authorize', context=context)
tx = self.env['payment.transaction'].create({
'amount': 320.0,
'acquirer_id': self.authorize_id,
'currency_id': self.currency_usd.id,
'reference': 'SO004',
'partner_name': 'Norbert Buyer',
'partner_country_id': self.country_france_id})
# validate it
self.payment_transaction.form_feedback(cr, uid, authorize_post_data, 'authorize', context=context)
# check state
self.assertEqual(tx.state, 'done', 'Authorize: validation did not put tx into done state')
self.assertEqual(tx.acquirer_reference, authorize_post_data.get('x_trans_id'), 'Authorize: validation did not update tx payid')
# reset tx
tx.write({'state': 'draft', 'date_validate': False, 'acquirer_reference': False})
# simulate an error
authorize_post_data['x_response_code'] = u'3'
self.payment_transaction.form_feedback(cr, uid, authorize_post_data, 'authorize', context=context)
# check state
self.assertEqual(tx.state, 'error', 'Authorize: erroneous validation did not put tx into error state')
| gpl-3.0 | -351,980,788,649,556,600 | 42.395604 | 135 | 0.559256 | false |
try-something-new-everyday/blog | kendo romania/scripts/.ipynb_checkpoints/members_loader-checkpoint.py | 4 | 5619 | import pandas as pd, numpy as np, json
import clubs_loader
nyears=8
def get_members(path):
members=pd.read_excel(path,header=[1])
members=members[[231, 'Nr. EKF',
'Club', 'Unnamed: 3',
'Numele', 'Prenumele',
'Gen', 'Data naşterii',
'1 kyu','practică',
'1 dan', '2 dan',
'3 dan', '4 dan',
'5 dan', '6 dan',
'7 dan', '8 dan',
151,
152, '152.1',
175, 179,
197,214,'231.1']]
members.columns=list(members.columns[:-nyears])+list(range(2019-nyears,2019))
return members
def get_transfer(name,tf,verbose=False):
if tf==[]:
return tf
else:
to_blank=[' ','(',')','Transfer:','?','/']
to_replace={'Hungary':'HUN'}
to_year={'Gușu Rebeca':'2010'}
def get_tf_clubs(z):
for t in range(len(to_blank)):
z=z.replace(to_blank[t],'')
for t in to_replace:
z=z.replace(t,to_replace[t])
if ('=>') in z:
from_to=z.find('=>')
to_return={'from':z[from_to-3:from_to],'to':z[from_to+2:from_to+5],'time':z[-4:]}
if verbose:
to_return['orig']=z
else:
print('error with transfer',z,)
to_return=z
##check years
#infer year from wrong note order
if '20' not in to_return['time']:
if '20' in z:
to_return['time']=z[z.find('20'):z.find('20')+4]
#if still not inferred, then manual fix
if '20' not in to_return['time']:
to_return['time']=to_year[name]
to_return['time']=int(to_return['time'])
return to_return
transfers=str(tf).split('\n')
tfr=[]
for i in transfers:
if not i in ('','nan'):
tfr.append(get_tf_clubs(i))
return sorted(tfr, key=lambda k: k['time'])
def cleaner(members):
data={}
replace_active={'Activ':'Active','Inactiv':'Inactive','Free':'Inactive','AS':'Abroad',
'Transferat':'Abroad','Decedat':'Inactive'}
active_redflags=['Deleted']
for i in members.T.iteritems():
active=i[1][231]
if active not in active_redflags:
grades=i[1][['1 kyu','1 dan','2 dan','3 dan','4 dan','5 dan',
'6 dan','7 dan','8 dan']].replace('x',pd.NaT).dropna()
grades0=i[1][['1 dan','2 dan','3 dan','4 dan','5 dan',
'6 dan','7 dan','8 dan']].replace('x',pd.NaT).dropna()
df=pd.DataFrame(grades0)
df.columns=['dan']
df=df.reset_index().set_index('dan').sort_index()
dummy={}
grades=pd.to_datetime(grades.astype(str))
active=replace_active[active]
if len(grades)>0:
mingrade=grades.min().year
maxgrade=grades.max().year
else:
mingrade=2016 #default starting year
maxgrade=2016
if active=='Active':
maxyear=2019 #default active max year
else:
maxyear=min(maxgrade+4,2019) #default active years grace period, if unknown
dummy['name']=i[1]['Numele']+' '+i[1]['Prenumele']
dummy['birth']=str(i[1]['Data naşterii'])[:10]
dummy['gen']=i[1]['Gen']
dummy['ekf']=i[1]['Nr. EKF']
club=i[1]['Club']
dummy['transfer']=get_transfer(dummy['name'],i[1]['Unnamed: 3'])
for year in range(mingrade,maxyear):
if year==maxyear-1:
dummy['active']=active
else:
dummy['active']='Active'
#get year from exams
dummy['dan']=len(df[:str
(year)])
#get active from member list
for j in i[1][-nyears:].index:
if year==j:
if i[1][-nyears:][j]=='Da':
dummy['active']=active
else:
dummy['active']='Inactive'
#get club from transfers
clubs=clubs_loader.get_club_by_year(dummy['transfer'],club,year,mingrade,maxyear)
clubs=clubs[:1] #remove this step to double count. this limits to first club in transfer years
for j in range(len(clubs)):
iclub=clubs_loader.replacer(clubs[j])
dummy['club']=iclub
dummy['age']=year-1-pd.to_datetime(dummy['birth']).year
data=clubs_loader.add_to_club(data,iclub,year,dummy.copy())
all_data=[]
for club in data:
for year in data[club]:
df=pd.DataFrame(data[club][year])
df['year']=year
df['club']=club
df=df.drop('transfer',axis=1)
all_data.append(df)
return pd.concat(all_data).reset_index(drop=True) | mit | -4,046,400,550,572,162,000 | 40.6 | 110 | 0.427961 | false |
demonchild2112/travis-test | grr/server/grr_response_server/checks/format_test.py | 2 | 1044 | #!/usr/bin/env python
"""Unit test for check definitions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import glob
import os
from absl import app
from grr_response_core import config
from grr_response_server.check_lib import checks_test_lib
from grr.test_lib import test_lib
class ValidFormatTest(checks_test_lib.HostCheckTest):
def testParseChecks(self):
"""Tests if checks verify, collates errors to diagnose invalid checks."""
# Find the configs.
check_configs = []
for path in config.CONFIG["Checks.config_dir"]:
check_configs.extend(glob.glob(os.path.join(path, "*.yaml")))
# Check each config file and collate errors.
errors = ""
for f in check_configs:
try:
self.assertValidCheckFile(f)
except AssertionError as e:
errors += "%s\n" % e
self.assertFalse(errors, "Errors in check configurations:\n%s" % errors)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
| apache-2.0 | 8,552,300,045,135,161,000 | 25.1 | 77 | 0.688697 | false |
Intel-tensorflow/tensorflow | tensorflow/compiler/tests/reshape_op_test.py | 25 | 1822 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slicing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import googletest
class ReshapeTest(xla_test.XLATestCase, parameterized.TestCase):
@parameterized.named_parameters(('32_bit_index', dtypes.int32),
('64_bit_index', dtypes.int64))
def testBasic(self, index_dtype):
for dtype in self.numeric_types:
with self.session():
i = array_ops.placeholder(dtype, shape=[2, 3])
with self.test_scope():
shape = constant_op.constant([3, 2], dtype=index_dtype)
o = array_ops.reshape(i, shape)
params = {
i: [[1, 2, 3], [4, 5, 6]],
}
result = o.eval(feed_dict=params)
self.assertAllEqual([[1, 2], [3, 4], [5, 6]], result)
if __name__ == '__main__':
googletest.main()
| apache-2.0 | -1,006,206,323,664,752,100 | 35.44 | 80 | 0.658617 | false |
matthiascy/panda3d | direct/src/showbase/EventGroup.py | 11 | 4259 | """Undocumented Module"""
__all__ = ['EventGroup']
from direct.showbase import DirectObject
from direct.showbase.PythonUtil import SerialNumGen, Functor
class EventGroup(DirectObject.DirectObject):
"""This class allows you to group together multiple events and treat
them as a single event. The EventGroup will not send out its event until
all of its sub-events have occured."""
_SerialNumGen = SerialNumGen()
def __init__(self, name, subEvents=None, doneEvent=None):
"""
Provide a meaningful name to aid debugging.
doneEvent is optional. If not provided, a unique done event will be
generated and is available as EventGroup.getDoneEvent().
Examples:
# waits for gotRed and gotBlue, then sends out 'gotColors'
EventGroup('getRedAndBlue', ('gotRed', 'gotBlue'), doneEvent='gotColors')
# waits for two interests to close, then calls self._handleBothInterestsClosed()
# uses EventGroup.getDoneEvent() and EventGroup.newEvent() to generate unique,
# disposable event names
eGroup = EventGroup('closeInterests')
self.acceptOnce(eGroup.getDoneEvent(), self._handleBothInterestsClosed)
base.cr.closeInterest(interest1, event=eGroup.newEvent('closeInterest1'))
base.cr.closeInterest(interest2, event=eGroup.newEvent('closeInterest2'))
"""
self._name = name
self._subEvents = set()
self._completedEvents = set()
if doneEvent is None:
# no doneEvent provided, allocate a unique event name
doneEvent = 'EventGroup-%s-%s-Done' % (
EventGroup._SerialNumGen.next(), self._name)
self._doneEvent = doneEvent
self._completed = False
if subEvents is not None:
# add the events that were passed in to start with, more may be added
# later via newEvent()
for event in subEvents:
self.addEvent(event)
def destroy(self):
if hasattr(self, '_name'):
# keep this around
#del self._doneEvent
del self._name
del self._subEvents
del self._completedEvents
self.ignoreAll()
def getName(self):
return self._name
def getDoneEvent(self):
return self._doneEvent
def isCompleted(self):
return self._completed
def addEvent(self, eventName):
""" Adds a new event to the list of sub-events that we're waiting on.
Returns the name of the event. """
if self._completed:
self.notify.error('addEvent(\'%s\') called on completed EventGroup \'%s\'' % (
eventName, self.getName()))
if eventName in self._subEvents:
self.notify.error('addEvent(\'%s\'): event already in EventGroup \'%s\'' % (
eventName, self.getName()))
self._subEvents.add(eventName)
self.acceptOnce(eventName, Functor(self._subEventComplete, eventName))
return eventName
def newEvent(self, name):
""" Pass in an event name and it will be unique-ified for you and added
to this EventGroup. TIP: there's no need to repeat information in this event
name that is already in the name of the EventGroup object.
Returns the new event name. """
return self.addEvent('%s-SubEvent-%s-%s' % (
self._name, EventGroup._SerialNumGen.next(), name))
def _subEventComplete(self, subEventName, *args, **kwArgs):
if subEventName in self._completedEvents:
self.notify.warning('_subEventComplete: \'%s\' already received' %
subEventName)
else:
self._completedEvents.add(subEventName)
if self._completedEvents == self._subEvents:
self._signalComplete()
def _signalComplete(self):
self._completed = True
messenger.send(self._doneEvent)
self.destroy()
def __repr__(self):
return '%s(\'%s\', %s, doneEvent=\'%s\') # completed=%s' % (
self.__class__.__name__,
self._name,
tuple(self._subEvents),
self._doneEvent,
tuple(self._completedEvents))
| bsd-3-clause | 218,800,054,126,819,740 | 37.718182 | 90 | 0.610472 | false |
wrycu/DiscordCharting | scripts/boot_listener.py | 1 | 1227 | import discord
import asyncio
from configparser import ConfigParser
import os
class BootListener:
def __init__(self, now_playing):
config = ConfigParser()
config.read(os.path.join('..', 'config.ini'))
conf = {
'discord': {
'email': config.get('discord', 'email'),
'pass': config.get('discord', 'pass'),
},
}
print('Starting new event loop for discord api')
loop = asyncio.new_event_loop()
try:
self.client = discord.Client(loop=loop)
print('Discord client created')
@self.client.event
async def on_ready():
print('Client ready')
await self.client.change_status(game=discord.Game(name=now_playing))
print('Status set')
loop.run_until_complete(self.client.close())
print('Shutting down client')
self.client.run(conf['discord']['email'], conf['discord']['pass'])
except Exception:
print("Exception! Quitting")
loop.run_until_complete(self.client.close())
finally:
print('Closing event loop')
loop.close()
| apache-2.0 | -7,679,223,266,241,262,000 | 31.289474 | 84 | 0.538712 | false |
jmlopez-rod/pyjoint | docs/source/conf.py | 1 | 8413 | # -*- coding: utf-8 -*-
#
# pyjoint documentation build configuration file, created by
# sphinx-quickstart on Tue Jan 6 23:49:22 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pyjoint'
copyright = u'2015, Manuel Lopez'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.0.dev'
# The full version, including alpha/beta/rc tags.
release = '0.0.0.dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyjointdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pyjoint.tex', u'pyjoint Documentation',
u'Manuel Lopez', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyjoint', u'pyjoint Documentation',
[u'Manuel Lopez'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyjoint', u'pyjoint Documentation',
u'Manuel Lopez', 'pyjoint', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| bsd-2-clause | -1,821,507,674,786,739,700 | 30.391791 | 79 | 0.706526 | false |
Matrixeigs/Optimization | unit_commitment/unit_commitment_distributed.py | 1 | 11794 | """
Augumented Lagrange decomposition for basic unit commitment problems
@author: Zhao Tianyang
@e-mail: [email protected]
@date:21 Mar 2018
Note: The mathematical model is taken from the following references.
[1]
"""
from numpy import zeros, shape, ones, diag, concatenate, r_, arange, divide, linalg
import matplotlib.pyplot as plt
from solvers.mixed_integer_quadratic_programming import mixed_integer_quadratic_programming as miqp
from scipy.sparse import csr_matrix as sparse
from pypower import loadcase, ext2int
def problem_formulation(case, Distribution_factor, Cg, Cd):
"""
:param case: The test case for unit commitment problem
:return:
"""
from unit_commitment.data_format.data_format import IG, PG
from unit_commitment.test_cases.case118 import F_BUS, T_BUS, BR_X, RATE_A
from unit_commitment.test_cases.case118 import GEN_BUS, COST_C, COST_B, COST_A, PG_MAX, PG_MIN, I0, MIN_DOWN, \
MIN_UP, RU, RD, COLD_START
from unit_commitment.test_cases.case118 import BUS_ID, PD
baseMVA, bus, gen, branch, profile = case["baseMVA"], case["bus"], case["gen"], case["branch"], case["Load_profile"]
# Modify the bus, gen and branch matrix
bus[:, BUS_ID] = bus[:, BUS_ID] - 1
gen[:, GEN_BUS] = gen[:, GEN_BUS] - 1
branch[:, F_BUS] = branch[:, F_BUS] - 1
branch[:, T_BUS] = branch[:, T_BUS] - 1
ng = shape(case['gen'])[0] # number of schedule injections
nl = shape(case['branch'])[0] ## number of branches
nb = shape(case['bus'])[0] ## number of branches
u0 = [0] * ng # The initial generation status
for i in range(ng):
u0[i] = int(gen[i, I0] > 0)
# Formulate a mixed integer quadratic programming problem
# 1) Announce the variables
# [vt,wt,ut,Pt]:start-up,shut-down,status,generation level
# 1.1) boundary information
T = case["Load_profile"].shape[0]
lb = []
for i in range(ng):
lb += [0] * T
lb += [0] * T
lb += [0] * T
lb += [0] * T
ub = []
for i in range(ng):
ub += [1] * T
ub += [1] * T
ub += [1] * T
ub += [gen[i, PG_MAX]] * T
nx = len(lb)
NX = 4 * T # The number of decision variables for each unit
# 1.2) variable information
vtypes = []
for i in range(ng):
vtypes += ["C"] * T
vtypes += ["C"] * T
vtypes += ["B"] * T
vtypes += ["C"] * T
# 1.3) objective information
c = []
q = []
for i in range(ng):
c += [gen[i, COLD_START]] * T
c += [0] * T
c += [gen[i, COST_C]] * T
c += [gen[i, COST_B]] * T
q += [0] * T
q += [0] * T
q += [0] * T
q += [gen[i, COST_A]] * T
Q = diag(q)
# 2) Constraint set
# 2.1) Power balance equation
Aeq = zeros((T, nx))
for i in range(T):
for j in range(ng):
Aeq[i, j * NX + 3 * T + i] = 1
beq = [0] * T
for i in range(T):
beq[i] = case["Load_profile"][i]
# 2.2) Status transformation of each unit
Aeq_temp = zeros((T * ng, nx))
beq_temp = [0] * T * ng
for i in range(ng):
for j in range(T):
Aeq_temp[i * T + j, i * NX + j] = 1
Aeq_temp[i * T + j, i * NX + j + T] = -1
Aeq_temp[i * T + j, i * NX + j + 2 * T] = -1
if j != 0:
Aeq_temp[i * T + j, i * NX + j - 1 + 2 * T] = 1
else:
beq_temp[i * T + j] = -u0[i]
Aeq = concatenate((Aeq, Aeq_temp), axis=0)
beq += beq_temp
# 2.3) Power range limitation
Aineq = zeros((T * ng, nx))
bineq = [0] * T * ng
for i in range(ng):
for j in range(T):
Aineq[i * T + j, i * NX + 2 * T + j] = gen[i, PG_MIN]
Aineq[i * T + j, i * NX + 3 * T + j] = -1
Aineq_temp = zeros((T * ng, nx))
bineq_temp = [0] * T * ng
for i in range(ng):
for j in range(T):
Aineq_temp[i * T + j, i * NX + 2 * T + j] = -gen[i, PG_MAX]
Aineq_temp[i * T + j, i * NX + 3 * T + j] = 1
Aineq = concatenate((Aineq, Aineq_temp), axis=0)
bineq += bineq_temp
# 2.4) Start up and shut down time limitation
UP_LIMIT = [0] * ng
DOWN_LIMIT = [0] * ng
for i in range(ng):
UP_LIMIT[i] = T - int(gen[i, MIN_UP])
DOWN_LIMIT[i] = T - int(gen[i, MIN_DOWN])
# 2.4.1) Up limit
Aineq_temp = zeros((sum(UP_LIMIT), nx))
bineq_temp = [0] * sum(UP_LIMIT)
for i in range(ng):
for j in range(int(gen[i, MIN_UP]), T):
Aineq_temp[sum(UP_LIMIT[0:i]) + j - int(gen[i, MIN_UP]), i * NX + j - int(gen[i, MIN_UP]):i * NX + j] = 1
Aineq_temp[sum(UP_LIMIT[0:i]) + j - int(gen[i, MIN_UP]), i * NX + 2 * T + j] = -1
Aineq = concatenate((Aineq, Aineq_temp), axis=0)
bineq += bineq_temp
# 2.4.2) Down limit
Aineq_temp = zeros((sum(DOWN_LIMIT), nx))
bineq_temp = [1] * sum(DOWN_LIMIT)
for i in range(ng):
for j in range(int(gen[i, MIN_DOWN]), T):
Aineq_temp[sum(DOWN_LIMIT[0:i]) + j - int(gen[i, MIN_DOWN]),
i * NX + T + j - int(gen[i, MIN_DOWN]):i * NX + T + j] = 1
Aineq_temp[sum(DOWN_LIMIT[0:i]) + j - int(gen[i, MIN_DOWN]), i * NX + 2 * T + j] = 1
Aineq = concatenate((Aineq, Aineq_temp), axis=0)
bineq += bineq_temp
# 2.5) Ramp constraints:
# 2.5.1) Ramp up limitation
Aineq_temp = zeros((ng * (T - 1), nx))
bineq_temp = [0] * ng * (T - 1)
for i in range(ng):
for j in range(T - 1):
Aineq_temp[i * (T - 1) + j, i * NX + 3 * T + j + 1] = 1
Aineq_temp[i * (T - 1) + j, i * NX + 3 * T + j] = -1
Aineq_temp[i * (T - 1) + j, i * NX + 2 * T + j] = -gen[i, RU]
Aineq_temp[i * (T - 1) + j, i * NX + j] = -gen[i, PG_MAX]
Aineq = concatenate((Aineq, Aineq_temp), axis=0)
bineq += bineq_temp
# 2.5.2) Ramp up limitation
Aineq_temp = zeros((ng * (T - 1), nx))
bineq_temp = [0] * ng * (T - 1)
for i in range(ng):
for j in range(T - 1):
Aineq_temp[i * (T - 1) + j, i * NX + 3 * T + j + 1] = -1
Aineq_temp[i * (T - 1) + j, i * NX + 3 * T + j] = 1
Aineq_temp[i * (T - 1) + j, i * NX + 2 * T + j + 1] = -gen[i, RD]
Aineq_temp[i * (T - 1) + j, i * NX + T + j + 1] = -gen[i, PG_MIN]
Aineq = concatenate((Aineq, Aineq_temp), axis=0)
bineq += bineq_temp
# 2.6) Line flow limitation
# Add the line flow limitation time by time
Aineq_temp = zeros((nl * T, nx))
bineq_temp = [0] * nl * T
for i in range(T):
index = [0] * ng
for j in range(ng):
index[j] = j * 4 * T + 3 * T + i
Cx2g = sparse((ones(ng), (arange(ng), index)), (ng, nx))
Aineq_temp[i * nl:(i + 1) * nl, :] = (Distribution_factor * Cg * Cx2g).todense()
PD_bus = bus[:, PD] * case["Load_profile"][i]
bineq_temp[i * nl:(i + 1) * nl] = branch[:, RATE_A] + Distribution_factor * Cd * PD_bus
del index, Cx2g
Aineq = concatenate((Aineq, Aineq_temp), axis=0)
bineq += bineq_temp
Aineq_temp = zeros((nl * T, nx))
bineq_temp = [0] * nl * T
for i in range(T):
index = [0] * ng
for j in range(ng):
index[j] = j * 4 * T + 3 * T + i
Cx2g = sparse((-ones(ng), (arange(ng), index)), (ng, nx))
Aineq_temp[i * nl:(i + 1) * nl, :] = (Distribution_factor * Cg * Cx2g).todense()
PD_bus = bus[:, PD] * case["Load_profile"][i]
bineq_temp[i * nl:(i + 1) * nl] = branch[:, RATE_A] - Distribution_factor * Cd * PD_bus
del index, Cx2g
Aineq = concatenate((Aineq, Aineq_temp), axis=0)
bineq += bineq_temp
model = {}
model["c"] = c
model["Q"] = Q
model["Aeq"] = Aeq
model["beq"] = beq
model["lb"] = lb
model["ub"] = ub
model["Aineq"] = Aineq
model["bineq"] = bineq
model["vtypes"] = vtypes
return model
def solution_decomposition(xx, obj, success):
"""
Decomposition of objective functions
:param xx: Solution
:param obj: Objective value
:param success: Success or not
:return:
"""
T = 24
ng = 54
result = {}
result["success"] = success
result["obj"] = obj
if success:
v = zeros((ng, T))
w = zeros((ng, T))
Ig = zeros((ng, T))
Pg = zeros((ng, T))
for i in range(ng):
v[i, :] = xx[4 * i * T:4 * i * T + T]
w[i, :] = xx[4 * i * T + T:4 * i * T + 2 * T]
Ig[i, :] = xx[4 * i * T + 2 * T:4 * i * T + 3 * T]
Pg[i, :] = xx[4 * i * T + 3 * T:4 * i * T + 4 * T]
result["vt"] = v
result["wt"] = w
result["Ig"] = Ig
result["Pg"] = Pg
else:
result["vt"] = 0
result["wt"] = 0
result["Ig"] = 0
result["Pg"] = 0
return result
if __name__ == "__main__":
from unit_commitment.test_cases import case118
test_case = case118.case118()
from pypower.case118 import case118
from pypower.idx_brch import F_BUS, T_BUS, BR_X, TAP, SHIFT, BR_STATUS, RATE_A
from pypower.idx_cost import MODEL, NCOST, PW_LINEAR, COST, POLYNOMIAL
from pypower.idx_bus import BUS_TYPE, REF, VA, VM, PD, GS, VMAX, VMIN, BUS_I
from pypower.idx_gen import GEN_BUS, VG, PG, QG, PMAX, PMIN, QMAX, QMIN
from numpy import flatnonzero as find
casedata = case118()
mpc = loadcase.loadcase(casedata)
mpc = ext2int.ext2int(mpc)
baseMVA, bus, gen, branch, gencost = mpc["baseMVA"], mpc["bus"], mpc["gen"], mpc["branch"], mpc["gencost"] #
nb = shape(mpc['bus'])[0] ## number of buses
nl = shape(mpc['branch'])[0] ## number of branches
ng = shape(mpc['gen'])[0] ## number of dispatchable injections
## Formualte the
stat = branch[:, BR_STATUS] ## ones at in-service branches
b = stat / branch[:, BR_X] ## series susceptance
tap = ones(nl) ## default tap ratio = 1
i = find(branch[:, TAP]) ## indices of non-zero tap ratios
tap[i] = branch[i, TAP] ## assign non-zero tap ratios
## build connection matrix Cft = Cf - Ct for line and from - to buses
f = branch[:, F_BUS] ## list of "from" buses
t = branch[:, T_BUS] ## list of "to" buses
i = r_[range(nl), range(nl)] ## double set of row indices
## connection matrix
Cft = sparse((r_[ones(nl), -ones(nl)], (i, r_[f, t])), (nl, nb))
## build Bf such that Bf * Va is the vector of real branch powers injected
## at each branch's "from" bus
Bf = sparse((r_[b, -b], (i, r_[f, t])), shape=(nl, nb)) ## = spdiags(b, 0, nl, nl) * Cft
## build Bbus
Bbus = Cft.T * Bf
# The distribution factor
Distribution_factor = sparse(linalg.solve(Bbus.toarray().transpose(), Bf.toarray().transpose()).transpose())
Cg = sparse((ones(ng), (gen[:, GEN_BUS], arange(ng))),
(nb, ng)) # Sparse index generation method is different from the way of matlab
Cd = sparse((ones(nb), (bus[:, BUS_I], arange(nb))), (nb, nb)) # Sparse index load
model = problem_formulation(test_case, Distribution_factor, Cg, Cd)
(xx, obj, success) = miqp(c=model["c"], Q=model["Q"], Aeq=model["Aeq"], A=model["Aineq"], b=model["bineq"],
beq=model["beq"], xmin=model["lb"],
xmax=model["ub"], vtypes=model["vtypes"])
sol = solution_decomposition(xx, obj, success)
T = 24
nx = 4 * T * ng
# check the branch power flow
branch_f2t = zeros((nl, T))
branch_t2f = zeros((nl, T))
for i in range(T):
PD_bus = test_case["bus"][:, 1] * test_case["Load_profile"][i]
branch_f2t[:, i] = Distribution_factor * (Cg * sol["Pg"][:, i] - Cd * PD_bus)
branch_t2f[:, i] = -Distribution_factor * (Cg * sol["Pg"][:, i] - Cd * PD_bus)
plt.plot(sol["Pg"])
plt.show()
| mit | 5,418,163,758,514,771,000 | 35.627329 | 120 | 0.518993 | false |
ojarva/django-websocket-redis | ws4redis/django_runserver.py | 2 | 3380 | #-*- coding: utf-8 -*-
import six
import base64
import select
from hashlib import sha1
from wsgiref import util
from django.core.wsgi import get_wsgi_application
from django.core.servers.basehttp import WSGIServer, WSGIRequestHandler
from django.core.handlers.wsgi import logger
from django.conf import settings
from django.core.management.commands import runserver
from django.utils.six.moves import socketserver
from django.utils.encoding import force_str
from ws4redis.websocket import WebSocket
from ws4redis.wsgi_server import WebsocketWSGIServer, HandshakeError, UpgradeRequiredError
util._hoppish = {}.__contains__
class WebsocketRunServer(WebsocketWSGIServer):
WS_GUID = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
WS_VERSIONS = ('13', '8', '7')
def upgrade_websocket(self, environ, start_response):
"""
Attempt to upgrade the socket environ['wsgi.input'] into a websocket enabled connection.
"""
websocket_version = environ.get('HTTP_SEC_WEBSOCKET_VERSION', '')
if not websocket_version:
raise UpgradeRequiredError
elif websocket_version not in self.WS_VERSIONS:
raise HandshakeError('Unsupported WebSocket Version: {0}'.format(websocket_version))
key = environ.get('HTTP_SEC_WEBSOCKET_KEY', '').strip()
if not key:
raise HandshakeError('Sec-WebSocket-Key header is missing/empty')
try:
key_len = len(base64.b64decode(key))
except TypeError:
raise HandshakeError('Invalid key: {0}'.format(key))
if key_len != 16:
# 5.2.1 (3)
raise HandshakeError('Invalid key: {0}'.format(key))
sec_ws_accept = base64.b64encode(sha1(six.b(key) + self.WS_GUID).digest())
if six.PY3:
sec_ws_accept = sec_ws_accept.decode('ascii')
headers = [
('Upgrade', 'websocket'),
('Connection', 'Upgrade'),
('Sec-WebSocket-Accept', sec_ws_accept),
('Sec-WebSocket-Version', str(websocket_version)),
]
logger.debug('WebSocket request accepted, switching protocols')
start_response(force_str('101 Switching Protocols'), headers)
six.get_method_self(start_response).finish_content()
return WebSocket(environ['wsgi.input'])
def select(self, rlist, wlist, xlist, timeout=None):
return select.select(rlist, wlist, xlist, timeout)
def run(addr, port, wsgi_handler, ipv6=False, threading=False):
"""
Function to monkey patch the internal Django command: manage.py runserver
"""
logger.info('Websocket support is enabled')
server_address = (addr, port)
if not threading:
raise Exception("Django's Websocket server must run with threading enabled")
httpd_cls = type('WSGIServer', (socketserver.ThreadingMixIn, WSGIServer), {'daemon_threads': True})
httpd = httpd_cls(server_address, WSGIRequestHandler, ipv6=ipv6)
httpd.set_app(wsgi_handler)
httpd.serve_forever()
runserver.run = run
_django_app = get_wsgi_application()
_websocket_app = WebsocketRunServer()
_websocket_url = getattr(settings, 'WEBSOCKET_URL')
def application(environ, start_response):
if _websocket_url and environ.get('PATH_INFO').startswith(_websocket_url):
return _websocket_app(environ, start_response)
return _django_app(environ, start_response)
| mit | 3,041,391,749,939,777,500 | 38.302326 | 103 | 0.681953 | false |
fabrickit/fabkit | core/webapp/web_apps/chat/views.py | 1 | 3023 | # coding: utf-8
from django.contrib.auth.models import User
from web_apps.chat.models import Comment, UserCluster
from web_apps.chat.utils import get_comments, get_cluster
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseServerError
from django.views.decorators.csrf import csrf_exempt
from django.contrib.sessions.models import Session
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist
import json
@login_required
def index(request, cluster_name='all'):
cluster = get_cluster(cluster_name)
comments = get_comments(cluster)
try:
user_cluster = UserCluster.objects.get(user=request.user, cluster=cluster)
user_cluster.unread_comments_length = 0
user_cluster.save()
except ObjectDoesNotExist:
UserCluster.objects.create(user=request.user, cluster=cluster, unread_comments_length=0)
context = {
'title': 'Chat: ' + cluster_name,
'comments': comments,
'cluster': cluster_name,
}
if request.META.get('HTTP_X_PJAX'):
return render(request, 'chat/content.html', context)
return render(request, 'chat/index.html', context)
@csrf_exempt
def node_api(request, action):
try:
# Get User from sessionid
session = Session.objects.get(session_key=request.POST.get('sessionid'))
user_id = session.get_decoded().get('_auth_user_id')
user = User.objects.get(id=user_id)
if action == 'leave_from_cluster':
cluster = request.POST.get('cluster')
cluster = get_cluster(cluster)
try:
user_cluster = UserCluster.objects.get(
user=user, cluster=cluster)
user_cluster.delete()
except ObjectDoesNotExist:
pass
data = json.dumps({})
return HttpResponse(data)
elif action == 'post_comment':
message = json.loads(request.POST.get('message'))
cluster = message.get('cluster')
text = message.get('text')
cluster = get_cluster(cluster)
user_clusters = UserCluster.objects.all().filter(cluster=cluster)
for user_cluster in user_clusters:
if user_cluster.user.id == user.id:
continue
user_cluster.unread_comments_length += 1
user_cluster.save()
if cluster == "None" or cluster is None:
comment = Comment.objects.create(user=user, text=text)
else:
comment = Comment.objects.create(user=user, cluster=cluster, text=text)
data = json.dumps({
'user': user.username,
'text': text,
'created_at': str(comment.created_at),
'updated_at': str(comment.created_at),
})
return HttpResponse(data)
except Exception, e:
return HttpResponseServerError(str(e))
| mit | -5,369,956,445,051,854,000 | 32.21978 | 96 | 0.615944 | false |
olivierdalang/stdm | ui/forms/property_mapper.py | 1 | 4099 | """
/***************************************************************************
Name : Generic application for forms
Description : forms generator functions
Date : 30/June/2013
copyright : (C) 2014 by UN-Habitat and implementing partners.
See the accompanying file CONTRIBUTORS.txt in the root
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from collections import OrderedDict
from stdm.utils import *
from stdm.data import STDMDb
from .widgets import WidgetCollection
from PyQt4.QtGui import QMessageBox
from stdm.ui.stdmdialog import DeclareMapping
from lookup_dlg import LookupModeller
from attribute_datatype import AttributePropretyType
from stdm.ui.forms.attribute_formatters import AttributeFormatters
from stdm.data.config_utils import foreign_key_table_reference
class TypePropertyMapper(object):
def __init__(self, model = None):
""" Class to read and match the datatype to respective control on the form"""
self._modeller = LookupModeller()
self._model = model
self._mapper = DeclareMapping.instance()
self._attribute_mapper = AttributePropretyType(self._model)
self._attr = self._attribute_mapper.attribute_type()
self.widgetList = OrderedDict()
self.hideGUID()
def hideGUID(self):
try:
for keys in self._attr.keys():
if keys == 'id':
self._attr.pop(keys)
except KeyError as ex:
raise ex.message
def widget(self):
isLookup = False
lk_items = None
self.formatters = None
widget_collection = WidgetCollection()
for attr, attr_data_type in self._attr.iteritems():
if attr_data_type[1]:
attr_data_type[0] = 'choice'
lkModel = self._modeller.lookupModel(attr_data_type[1])
lk_items = self.lookupItems(lkModel)
if lk_items:
isLookup = True
control_widget = widget_collection.widget_control_type(attr_data_type[0])
if attr_data_type[0] == 'foreign key':
source_table = foreign_key_table_reference(self._model)
self.formatters = AttributeFormatters(attr, source_table[0])
self.formatters.set_display_name(source_table[1])
self.widgetList[attr] = [control_widget, isLookup, lk_items, self.formatters]
def setProperty(self):
self.widget()
return self.widgetList
def userLookupOptions(self, DBmodel):
"""
Fetch lookup values from the DB.
"""
# try:
lkupModel = readComboSelections(DBmodel)
return lkupModel
#except Exception as ex:
# QMessageBox.information(None,'Lookup choices',
#QApplication.translate(u'TypePropertyMapper',"Error loading %s lookup values"%str(ex.message)))
#finally:
# self.clearMapping()
def lookupItems(self, model):
modelItems = self.userLookupOptions(model)
return modelItems
def clearMapping(self):
STDMDb.instance().session.rollback()
def display_mapping(self):
#use the mapped table properties
self._mapper.tableMapping(self._model) | gpl-2.0 | -671,776,579,487,693,800 | 40.278351 | 108 | 0.537448 | false |
matthiasdiener/spack | var/spack/repos/builtin/packages/launchmon/package.py | 5 | 2100 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Launchmon(Package):
"""Software infrastructure that enables HPC run-time tools to
co-locate tool daemons with a parallel job."""
homepage = "https://github.com/LLNL/LaunchMON"
url = "https://github.com/LLNL/LaunchMON/releases/download/v1.0.2/launchmon-v1.0.2.tar.gz"
version('1.0.2', '8d6ba77a0ec2eff2fde2c5cc8fa7ff7a')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('libgcrypt')
depends_on('libgpg-error')
depends_on("elf", type='link')
depends_on("boost")
depends_on("spectrum-mpi", when='arch=ppc64le')
def install(self, spec, prefix):
configure(
"--prefix=" + prefix,
"--with-bootfabric=cobo",
"--with-rm=slurm")
make()
make("install")
| lgpl-2.1 | -81,155,037,442,614,450 | 39.384615 | 94 | 0.651905 | false |
cedi4155476/QGIS | python/plugins/processing/algs/taudem/dinftranslimaccum2_multi.py | 12 | 5459 | # -*- coding: utf-8 -*-
"""
***************************************************************************
dinftranslimaccum2_multi.py
---------------------
Date : March 2015
Copyright : (C) 2015 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'March 2015'
__copyright__ = '(C) 2015, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4.QtGui import QIcon
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.GeoAlgorithmExecutionException import \
GeoAlgorithmExecutionException
from processing.core.parameters import ParameterFile
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterBoolean
from processing.core.outputs import OutputDirectory
from TauDEMUtils import TauDEMUtils
class DinfTransLimAccum2Multi(GeoAlgorithm):
DINF_FLOW_DIR_GRID = 'DINF_FLOW_DIR_GRID'
SUPPLY_GRID = 'SUPPLY_GRID'
CAPACITY_GRID = 'CAPACITY_GRID'
IN_CONCENTR_GRID = 'IN_CONCENTR_GRID'
OUTLETS_SHAPE = 'OUTLETS_SHAPE'
EDGE_CONTAM = 'EDGE_CONTAM'
TRANSP_LIM_ACCUM_GRID = 'TRANSP_LIM_ACCUM_GRID'
DEPOSITION_GRID = 'DEPOSITION_GRID'
OUT_CONCENTR_GRID = 'OUT_CONCENTR_GRID'
def getIcon(self):
return QIcon(os.path.dirname(__file__) + '/../../images/taudem.png')
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('D-Infinity Transport Limited Accumulation - 2 (multifile)')
self.cmdName = 'dinftranslimaccum'
self.group, self.i18n_group = self.trAlgorithm('Specialized Grid Analysis tools')
self.addParameter(ParameterFile(self.DINF_FLOW_DIR_GRID,
self.tr('D-Infinity Flow Direction Grid'), True, False))
self.addParameter(ParameterFile(self.SUPPLY_GRID,
self.tr('Supply Grid'), True, False))
self.addParameter(ParameterFile(self.CAPACITY_GRID,
self.tr('Transport Capacity Grid'), True, False))
self.addParameter(ParameterFile(self.IN_CONCENTR_GRID,
self.tr('Input Concentration Grid'), True, False))
self.addParameter(ParameterVector(self.OUTLETS_SHAPE,
self.tr('Outlets Shapefile'),
[ParameterVector.VECTOR_TYPE_POINT], True))
self.addParameter(ParameterBoolean(self.EDGE_CONTAM,
self.tr('Check for edge contamination'), True))
self.addOutput(OutputDirectory(self.TRANSP_LIM_ACCUM_GRID,
self.tr('Transport Limited Accumulation Grid')))
self.addOutput(OutputDirectory(self.DEPOSITION_GRID,
self.tr('Deposition Grid')))
self.addOutput(OutputDirectory(self.OUT_CONCENTR_GRID,
self.tr('Output Concentration Grid')))
def processAlgorithm(self, progress):
commands = []
commands.append(os.path.join(TauDEMUtils.mpiexecPath(), 'mpiexec'))
processNum = ProcessingConfig.getSetting(TauDEMUtils.MPI_PROCESSES)
if processNum <= 0:
raise GeoAlgorithmExecutionException(
self.tr('Wrong number of MPI processes used. Please set '
'correct number before running TauDEM algorithms.'))
commands.append('-n')
commands.append(unicode(processNum))
commands.append(os.path.join(TauDEMUtils.taudemMultifilePath(), self.cmdName))
commands.append('-ang')
commands.append(self.getParameterValue(self.DINF_FLOW_DIR_GRID))
commands.append('-tsup')
commands.append(self.getParameterValue(self.SUPPLY_GRID))
commands.append('-tc')
commands.append(self.getParameterValue(self.CAPACITY_GRID))
commands.append('-cs')
commands.append(self.getParameterValue(self.IN_CONCENTR_GRID))
param = self.getParameterValue(self.OUTLETS_SHAPE)
if param is not None:
commands.append('-o')
commands.append(param)
if not self.getParameterValue(self.EDGE_CONTAM):
commands.append('-nc')
commands.append('-tla')
commands.append(self.getOutputValue(self.TRANSP_LIM_ACCUM_GRID))
commands.append('-tdep')
commands.append(self.getOutputValue(self.DEPOSITION_GRID))
commands.append('-ctpt')
commands.append(self.getOutputValue(self.OUT_CONCENTR_GRID))
TauDEMUtils.executeTauDEM(commands, progress)
| gpl-2.0 | 3,714,396,306,898,000,000 | 43.745902 | 113 | 0.588936 | false |
barseghyanartur/oauthlib | tests/test_common.py | 8 | 7147 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import sys
from .unittest import TestCase
from oauthlib.common import add_params_to_uri
from oauthlib.common import CaseInsensitiveDict
from oauthlib.common import extract_params
from oauthlib.common import generate_client_id
from oauthlib.common import generate_nonce
from oauthlib.common import generate_timestamp
from oauthlib.common import generate_token
from oauthlib.common import Request
from oauthlib.common import unicode_type
from oauthlib.common import urldecode
if sys.version_info[0] == 3:
bytes_type = bytes
else:
bytes_type = lambda s, e: str(s)
PARAMS_DICT = {'foo': 'bar', 'baz': '123', }
PARAMS_TWOTUPLE = [('foo', 'bar'), ('baz', '123')]
PARAMS_FORMENCODED = 'foo=bar&baz=123'
URI = 'http://www.someuri.com'
class EncodingTest(TestCase):
def test_urldecode(self):
self.assertItemsEqual(urldecode(''), [])
self.assertItemsEqual(urldecode('='), [('', '')])
self.assertItemsEqual(urldecode('%20'), [(' ', '')])
self.assertItemsEqual(urldecode('+'), [(' ', '')])
self.assertItemsEqual(urldecode('c2'), [('c2', '')])
self.assertItemsEqual(urldecode('c2='), [('c2', '')])
self.assertItemsEqual(urldecode('foo=bar'), [('foo', 'bar')])
self.assertItemsEqual(urldecode('foo_%20~=.bar-'),
[('foo_ ~', '.bar-')])
self.assertItemsEqual(urldecode('foo=1,2,3'), [('foo', '1,2,3')])
self.assertItemsEqual(urldecode('foo=bar.*'), [('foo', 'bar.*')])
self.assertItemsEqual(urldecode('foo=bar@spam'), [('foo', 'bar@spam')])
self.assertRaises(ValueError, urldecode, 'foo bar')
self.assertRaises(ValueError, urldecode, '?')
self.assertRaises(ValueError, urldecode, '%R')
self.assertRaises(ValueError, urldecode, '%RA')
self.assertRaises(ValueError, urldecode, '%AR')
self.assertRaises(ValueError, urldecode, '%RR')
class ParameterTest(TestCase):
def test_extract_params_dict(self):
self.assertItemsEqual(extract_params(PARAMS_DICT), PARAMS_TWOTUPLE)
def test_extract_params_twotuple(self):
self.assertItemsEqual(extract_params(PARAMS_TWOTUPLE), PARAMS_TWOTUPLE)
def test_extract_params_formencoded(self):
self.assertItemsEqual(extract_params(PARAMS_FORMENCODED),
PARAMS_TWOTUPLE)
def test_extract_params_blank_string(self):
self.assertItemsEqual(extract_params(''), [])
def test_extract_params_empty_list(self):
self.assertItemsEqual(extract_params([]), [])
def test_extract_non_formencoded_string(self):
self.assertEqual(extract_params('not a formencoded string'), None)
def test_extract_invalid(self):
self.assertEqual(extract_params(object()), None)
self.assertEqual(extract_params([('')]), None)
def test_add_params_to_uri(self):
correct = '%s?%s' % (URI, PARAMS_FORMENCODED)
self.assertURLEqual(add_params_to_uri(URI, PARAMS_DICT), correct)
self.assertURLEqual(add_params_to_uri(URI, PARAMS_TWOTUPLE), correct)
class GeneratorTest(TestCase):
def test_generate_timestamp(self):
timestamp = generate_timestamp()
self.assertIsInstance(timestamp, unicode_type)
self.assertTrue(int(timestamp))
self.assertGreater(int(timestamp), 1331672335)
def test_generate_nonce(self):
"""Ping me (ib-lundgren) when you discover how to test randomness."""
nonce = generate_nonce()
for i in range(50):
self.assertNotEqual(nonce, generate_nonce())
def test_generate_token(self):
token = generate_token()
self.assertEqual(len(token), 30)
token = generate_token(length=44)
self.assertEqual(len(token), 44)
token = generate_token(length=6, chars="python")
self.assertEqual(len(token), 6)
for c in token:
self.assertIn(c, "python")
def test_generate_client_id(self):
client_id = generate_client_id()
self.assertEqual(len(client_id), 30)
client_id = generate_client_id(length=44)
self.assertEqual(len(client_id), 44)
client_id = generate_client_id(length=6, chars="python")
self.assertEqual(len(client_id), 6)
for c in client_id:
self.assertIn(c, "python")
class RequestTest(TestCase):
def test_non_unicode_params(self):
r = Request(
bytes_type('http://a.b/path?query', 'utf-8'),
http_method=bytes_type('GET', 'utf-8'),
body=bytes_type('you=shall+pass', 'utf-8'),
headers={
bytes_type('a', 'utf-8'): bytes_type('b', 'utf-8')
}
)
self.assertEqual(r.uri, 'http://a.b/path?query')
self.assertEqual(r.http_method, 'GET')
self.assertEqual(r.body, 'you=shall+pass')
self.assertEqual(r.decoded_body, [('you', 'shall pass')])
self.assertEqual(r.headers, {'a': 'b'})
def test_none_body(self):
r = Request(URI)
self.assertEqual(r.decoded_body, None)
def test_empty_list_body(self):
r = Request(URI, body=[])
self.assertEqual(r.decoded_body, [])
def test_empty_dict_body(self):
r = Request(URI, body={})
self.assertEqual(r.decoded_body, [])
def test_empty_string_body(self):
r = Request(URI, body='')
self.assertEqual(r.decoded_body, [])
def test_non_formencoded_string_body(self):
body = 'foo bar'
r = Request(URI, body=body)
self.assertEqual(r.decoded_body, None)
def test_param_free_sequence_body(self):
body = [1, 1, 2, 3, 5, 8, 13]
r = Request(URI, body=body)
self.assertEqual(r.decoded_body, None)
def test_list_body(self):
r = Request(URI, body=PARAMS_TWOTUPLE)
self.assertItemsEqual(r.decoded_body, PARAMS_TWOTUPLE)
def test_dict_body(self):
r = Request(URI, body=PARAMS_DICT)
self.assertItemsEqual(r.decoded_body, PARAMS_TWOTUPLE)
def test_getattr_existing_attribute(self):
r = Request(URI, body='foo bar')
self.assertEqual('foo bar', getattr(r, 'body'))
def test_getattr_return_default(self):
r = Request(URI, body='')
actual_value = getattr(r, 'does_not_exist', 'foo bar')
self.assertEqual('foo bar', actual_value)
def test_getattr_raise_attribute_error(self):
r = Request(URI, body='foo bar')
with self.assertRaises(AttributeError):
getattr(r, 'does_not_exist')
def test_password_body(self):
payload = 'username=foo&password=bar'
r = Request(URI, body=payload)
self.assertNotIn('bar', repr(r))
payload = 'password=bar&username=foo'
r = Request(URI, body=payload)
self.assertNotIn('bar', repr(r))
class CaseInsensitiveDictTest(TestCase):
def test_basic(self):
cid = CaseInsensitiveDict({})
cid['a'] = 'b'
cid['c'] = 'd'
del cid['c']
self.assertEqual(cid['A'], 'b')
self.assertEqual(cid['a'], 'b')
| bsd-3-clause | -2,117,983,232,243,025,400 | 33.694175 | 79 | 0.61998 | false |
claudep/translate | translate/storage/xml_extract/misc.py | 1 | 2879 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import re
import six
def reduce_tree(f, parent_unit_node, unit_node, get_children, *state):
"""Enumerate a tree, applying f to in a pre-order fashion to each node.
parent_unit_node contains the parent of unit_node. For the root of the tree,
parent_unit_node == unit_node.
get_children is a single argument function applied to a unit_node to
get a list/iterator to its children.
state is used by f to modify state information relating to whatever f does
to the tree.
"""
def as_tuple(x):
if isinstance(x, tuple):
return x
else:
return (x,)
state = f(parent_unit_node, unit_node, *state)
for child_unit_node in get_children(unit_node):
state = reduce_tree(f, unit_node, child_unit_node, get_children, *as_tuple(state))
return state
def compose_mappings(left, right):
"""Given two mappings left: A -> B and right: B -> C, create a
hash result_map: A -> C. Only values in left (i.e. things from B)
which have corresponding keys in right will have their keys mapped
to values in right. """
result_map = {}
for left_key, left_val in left.items():
try:
result_map[left_key] = right[left_val]
except KeyError:
pass
return result_map
tag_pattern = re.compile('({(?P<namespace>(\w|[-:./])*)})?(?P<tag>(\w|[-])*)')
def parse_tag(full_tag):
"""
>>> parse_tag('{urn:oasis:names:tc:opendocument:xmlns:office:1.0}document-content')
('urn:oasis:names:tc:opendocument:xmlns:office:1.0', 'document-content')
>>> parse_tag('document-content')
('', 'document-content')
"""
match = tag_pattern.match(full_tag)
if match is not None:
# Slightly hacky way of supporting 2+3
ret = []
for k in ("namespace", "tag"):
value = match.groupdict()[k] or ""
if not isinstance(value, six.text_type):
value = six.text_type(value, encoding="utf-8")
ret.append(value)
return ret[0], ret[1]
else:
raise Exception('Passed an invalid tag')
| gpl-2.0 | 4,677,905,624,702,928,000 | 32.870588 | 90 | 0.6471 | false |
fedora-infra/anitya | anitya/tests/lib/versions/test_base.py | 1 | 8780 | # -*- coding: utf-8 -*-
#
# Copyright © 2017-2020 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2, or (at your option) any later
# version. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission
# of Red Hat, Inc.
from __future__ import unicode_literals
import unittest
import mock
from anitya.lib import exceptions
from anitya.lib.versions import base
class VersionTests(unittest.TestCase):
"""Tests for the :class:`anitya.lib.versions.Version` model."""
def test_identity_string(self):
"""Assert the generic version constant is what we expect.
.. note::
If this test starts failing because the constant was modified, you
*must* write a migration to change the type column on existing
versions.
"""
self.assertEqual("Generic Version", base.Version.name)
def test_str(self):
"""Assert __str__ calls parse"""
version = base.Version(version="v1.0.0")
self.assertEqual("1.0.0", str(version))
def test_str_parse_error(self):
"""Assert __str__ calls parse"""
version = base.Version(version="v1.0.0")
version.parse = mock.Mock(side_effect=exceptions.InvalidVersion("boop"))
self.assertEqual("v1.0.0", str(version))
def test_parse_no_v(self):
"""Assert parsing a version sans leading 'v' works."""
version = base.Version(version="1.0.0")
self.assertEqual("1.0.0", version.parse())
def test_parse_leading_v(self):
"""Assert parsing a version with a leading 'v' works."""
version = base.Version(version="v1.0.0")
self.assertEqual("1.0.0", version.parse())
def test_parse_odd_version(self):
"""Assert parsing an odd version works."""
version = base.Version(version="release_1_0_0")
self.assertEqual("release_1_0_0", version.parse())
def test_parse_v_not_alone(self):
"""Assert leading 'v' isn't stripped if it's not followed by a number."""
version = base.Version(version="version1.0.0")
self.assertEqual("version1.0.0", version.parse())
def test_parse_prefix_whitespace(self):
"""Assert prefix is stripped together with any whitespace."""
version = base.Version(version="version 1.0.0", prefix="version")
self.assertEqual("1.0.0", version.parse())
def test_parse_with_prefix_no_v(self):
version = base.Version(version="release1.0.0", prefix="release")
self.assertEqual("1.0.0", version.parse())
def test_parse_with_prefix_with_v(self):
version = base.Version(version="release-v1.0.0", prefix="release-")
self.assertEqual("1.0.0", version.parse())
def test_parse_with_multiple_prefixes(self):
"""Assert parsing is working when multiple prefixes are provided."""
version = base.Version(version="release_db-1.2.3", prefix="release_db-;release")
self.assertEqual("1.2.3", version.parse())
def test_parse_with_multiple_prefixes_one_empty(self):
"""
Assert parsing is working when multiple prefixes are provided and one
is empty string.
"""
version = base.Version(version="release_db-1.2.3", prefix="release_db-; ")
self.assertEqual("1.2.3", version.parse())
def test_prerelease(self):
"""Assert prerelease is defined and returns False"""
version = base.Version(version="v1.0.0")
self.assertFalse(version.prerelease())
def test_postrelease(self):
"""Assert postrelease is defined and returns False"""
version = base.Version(version="v1.0.0")
self.assertFalse(version.postrelease())
def test_newer_single_version(self):
"""Assert newer is functional with a single instance of Version."""
version = base.Version(version="v1.0.0")
newer_version = base.Version(version="v2.0.0")
self.assertFalse(version.newer(newer_version))
self.assertTrue(newer_version.newer(version))
def test_newer_multiple_versions(self):
"""Assert newer is functional with multiple instances of Version."""
version = base.Version(version="v1.0.0")
version2 = base.Version(version="v1.1.0")
newer_version = base.Version(version="v2.0.0")
self.assertFalse(version.newer(newer_version))
self.assertTrue(newer_version.newer([version, version2]))
def test_newer_with_strings(self):
"""Assert newer handles string arguments."""
version = base.Version(version="v1.0.0")
self.assertFalse(version.newer("v2.0.0"))
def test_lt(self):
"""Assert Version supports < comparison."""
old_version = base.Version(version="v1.0.0")
new_version = base.Version(version="v1.1.0")
self.assertTrue(old_version < new_version)
self.assertFalse(new_version < old_version)
def test_lt_one_unparsable(self):
"""Assert unparsable versions sort lower than parsable ones."""
unparsable_version = base.Version(version="blarg")
unparsable_version.parse = mock.Mock(
side_effect=exceptions.InvalidVersion("blarg")
)
new_version = base.Version(version="v1.0.0")
self.assertTrue(unparsable_version < new_version)
self.assertFalse(new_version < unparsable_version)
def test_lt_both_unparsable(self):
"""Assert unparsable versions resort to string sorting."""
alphabetically_lower = base.Version(version="arg")
alphabetically_lower.parse = mock.Mock(
side_effect=exceptions.InvalidVersion("arg")
)
alphabetically_higher = base.Version(version="blarg")
alphabetically_higher.parse = mock.Mock(
side_effect=exceptions.InvalidVersion("blarg")
)
self.assertTrue(alphabetically_lower < alphabetically_higher)
def test_le(self):
"""Assert Version supports <= comparison."""
old_version = base.Version(version="v1.0.0")
equally_old_version = base.Version(version="v1.0.0")
new_version = base.Version(version="v1.1.0")
self.assertTrue(old_version <= new_version)
self.assertTrue(old_version <= equally_old_version)
self.assertFalse(new_version <= old_version)
def test_gt(self):
"""Assert Version supports > comparison."""
old_version = base.Version(version="v1.0.0")
new_version = base.Version(version="v1.1.0")
self.assertTrue(new_version > old_version)
self.assertFalse(old_version > new_version)
def test_ge(self):
"""Assert Version supports >= comparison."""
old_version = base.Version(version="v1.0.0")
equally_new_version = base.Version(version="v1.1.0")
new_version = base.Version(version="v1.1.0")
self.assertFalse(old_version >= new_version)
self.assertTrue(new_version >= equally_new_version)
self.assertTrue(new_version >= old_version)
def test_eq(self):
"""Assert Version supports == comparison."""
v1 = base.Version(version="v1.0.0")
v2 = base.Version(version="v1.0.0")
self.assertTrue(v1 == v2)
def test_eq_one_with_v(self):
"""Assert Versions where one just has a v prefix are still equal"""
v1 = base.Version(version="1.0.0")
v2 = base.Version(version="v1.0.0")
self.assertTrue(v1 == v2)
def test_eq_one_with_prefix(self):
"""Assert Versions where one just has a v prefix are still equal"""
v1 = base.Version(version="1.0.0")
v2 = base.Version(version="prefix1.0.0", prefix="prefix")
self.assertTrue(v1 == v2)
def test_eq_both_unparsable(self):
"""Assert unparsable versions that are the same string are equal."""
v1 = base.Version(version="arg")
v2 = base.Version(version="arg")
v1.parse = mock.Mock(side_effect=exceptions.InvalidVersion("arg"))
v2.parse = mock.Mock(side_effect=exceptions.InvalidVersion("arg"))
self.assertEqual(v1, v2)
| gpl-2.0 | -465,989,112,514,938,700 | 41.410628 | 88 | 0.649732 | false |
vladisac/CheckGuard | src/CheckGuard.py | 2 | 3485 | '''
* Copyright (C) 2015 Touch Vectron
*
* Author: Cornel Punga
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* Filename: CheckGuard.py
* This module will monitor files.txt for changes(for new checks)
* and then will trigger CheckParser module
*
* Last revision: 07/02/2015
*
'''
import CheckParser
from CheckLogger import check_logger
from time import sleep
try:
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
except ImportError:
print("You must have watchdog module installed")
exit()
class NewCheckHandler(PatternMatchingEventHandler):
def __init__(self):
super(NewCheckHandler, self).__init__(patterns=[r'C:\Vectron\VPosPC\files.txt'])
self.start_message = "Copyright (C) 2015 Touch Vectron\n" \
"Check Guard version 0.2.0\n" \
"Check Guard started...\n" \
"To stop CheckGuard please hit Ctrl + C\n"
self.end_message = "Check Guard stopped\n"
self.user_err_msg = "*************************************\n" \
"** Eroare la tiparirea bonului **\n" \
"*************************************\n"
self.bad_input = "Ai introdus informatie gresita!!!"
self.reprint = "Exista bonuri neprintate\n" \
"Vrei sa le printez? Y/N"
def on_start(self):
print(self.start_message)
pos_txt = CheckParser.read_init_pos()
end_pos = CheckParser.get_file_end_pos()
if (end_pos - pos_txt) > 0:
print(self.reprint)
user_ans = raw_input()
try:
assert isinstance(user_ans, str), "Bad user input"
except AssertionError as e:
check_logger.debug("{0}: {1}".format(e, user_ans))
print(self.bad_input)
if user_ans == 'Y' or user_ans == 'y':
try:
check = CheckParser.CheckParser(pos_txt)
check.read_file()
CheckParser.write_init_pos(end_pos)
except Exception as e:
print(self.user_err_msg)
check_logger.debug(e)
else:
CheckParser.write_init_pos(end_pos)
print("Bonurile existente au fost omise")
print("Omitere -> Status: OK")
def on_end(self):
print(self.end_message)
def on_modified(self, event):
try:
start_pos = CheckParser.read_init_pos()
check = CheckParser.CheckParser(start_pos)
sleep(1)
check.read_file()
CheckParser.write_init_pos(check.position)
except Exception as e:
print(self.user_err_msg)
check_logger.debug(e) | gpl-2.0 | -5,946,503,362,880,396,000 | 36.085106 | 88 | 0.572453 | false |
pabulumm/neighbors | lib/python3.4/site-packages/oauth2client/contrib/locked_file.py | 62 | 13391 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Locked file interface that should work on Unix and Windows pythons.
This module first tries to use fcntl locking to ensure serialized access
to a file, then falls back on a lock file if that is unavialable.
Usage::
f = LockedFile('filename', 'r+b', 'rb')
f.open_and_lock()
if f.is_locked():
print('Acquired filename with r+b mode')
f.file_handle().write('locked data')
else:
print('Acquired filename with rb mode')
f.unlock_and_close()
"""
from __future__ import print_function
import errno
import logging
import os
import time
from oauth2client import util
__author__ = '[email protected] (David T McWherter)'
logger = logging.getLogger(__name__)
class CredentialsFileSymbolicLinkError(Exception):
"""Credentials files must not be symbolic links."""
class AlreadyLockedException(Exception):
"""Trying to lock a file that has already been locked by the LockedFile."""
pass
def validate_file(filename):
if os.path.islink(filename):
raise CredentialsFileSymbolicLinkError(
'File: %s is a symbolic link.' % filename)
class _Opener(object):
"""Base class for different locking primitives."""
def __init__(self, filename, mode, fallback_mode):
"""Create an Opener.
Args:
filename: string, The pathname of the file.
mode: string, The preferred mode to access the file with.
fallback_mode: string, The mode to use if locking fails.
"""
self._locked = False
self._filename = filename
self._mode = mode
self._fallback_mode = fallback_mode
self._fh = None
self._lock_fd = None
def is_locked(self):
"""Was the file locked."""
return self._locked
def file_handle(self):
"""The file handle to the file. Valid only after opened."""
return self._fh
def filename(self):
"""The filename that is being locked."""
return self._filename
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries.
"""
pass
def unlock_and_close(self):
"""Unlock and close the file."""
pass
class _PosixOpener(_Opener):
"""Lock files using Posix advisory lock files."""
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Tries to create a .lock file next to the file we're trying to open.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries.
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
CredentialsFileSymbolicLinkError if the file is a symbolic link.
"""
if self._locked:
raise AlreadyLockedException('File %s is already locked' %
self._filename)
self._locked = False
validate_file(self._filename)
try:
self._fh = open(self._filename, self._mode)
except IOError as e:
# If we can't access with _mode, try _fallback_mode and don't lock.
if e.errno == errno.EACCES:
self._fh = open(self._filename, self._fallback_mode)
return
lock_filename = self._posix_lockfile(self._filename)
start_time = time.time()
while True:
try:
self._lock_fd = os.open(lock_filename,
os.O_CREAT | os.O_EXCL | os.O_RDWR)
self._locked = True
break
except OSError as e:
if e.errno != errno.EEXIST:
raise
if (time.time() - start_time) >= timeout:
logger.warn('Could not acquire lock %s in %s seconds',
lock_filename, timeout)
# Close the file and open in fallback_mode.
if self._fh:
self._fh.close()
self._fh = open(self._filename, self._fallback_mode)
return
time.sleep(delay)
def unlock_and_close(self):
"""Unlock a file by removing the .lock file, and close the handle."""
if self._locked:
lock_filename = self._posix_lockfile(self._filename)
os.close(self._lock_fd)
os.unlink(lock_filename)
self._locked = False
self._lock_fd = None
if self._fh:
self._fh.close()
def _posix_lockfile(self, filename):
"""The name of the lock file to use for posix locking."""
return '%s.lock' % filename
try:
import fcntl
class _FcntlOpener(_Opener):
"""Open, lock, and unlock a file using fcntl.lockf."""
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
CredentialsFileSymbolicLinkError: if the file is a symbolic
link.
"""
if self._locked:
raise AlreadyLockedException('File %s is already locked' %
self._filename)
start_time = time.time()
validate_file(self._filename)
try:
self._fh = open(self._filename, self._mode)
except IOError as e:
# If we can't access with _mode, try _fallback_mode and
# don't lock.
if e.errno in (errno.EPERM, errno.EACCES):
self._fh = open(self._filename, self._fallback_mode)
return
# We opened in _mode, try to lock the file.
while True:
try:
fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX)
self._locked = True
return
except IOError as e:
# If not retrying, then just pass on the error.
if timeout == 0:
raise
if e.errno != errno.EACCES:
raise
# We could not acquire the lock. Try again.
if (time.time() - start_time) >= timeout:
logger.warn('Could not lock %s in %s seconds',
self._filename, timeout)
if self._fh:
self._fh.close()
self._fh = open(self._filename, self._fallback_mode)
return
time.sleep(delay)
def unlock_and_close(self):
"""Close and unlock the file using the fcntl.lockf primitive."""
if self._locked:
fcntl.lockf(self._fh.fileno(), fcntl.LOCK_UN)
self._locked = False
if self._fh:
self._fh.close()
except ImportError:
_FcntlOpener = None
try:
import pywintypes
import win32con
import win32file
class _Win32Opener(_Opener):
"""Open, lock, and unlock a file using windows primitives."""
# Error #33:
# 'The process cannot access the file because another process'
FILE_IN_USE_ERROR = 33
# Error #158:
# 'The segment is already unlocked.'
FILE_ALREADY_UNLOCKED_ERROR = 158
def open_and_lock(self, timeout, delay):
"""Open the file and lock it.
Args:
timeout: float, How long to try to lock for.
delay: float, How long to wait between retries
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
CredentialsFileSymbolicLinkError: if the file is a symbolic
link.
"""
if self._locked:
raise AlreadyLockedException('File %s is already locked' %
self._filename)
start_time = time.time()
validate_file(self._filename)
try:
self._fh = open(self._filename, self._mode)
except IOError as e:
# If we can't access with _mode, try _fallback_mode
# and don't lock.
if e.errno == errno.EACCES:
self._fh = open(self._filename, self._fallback_mode)
return
# We opened in _mode, try to lock the file.
while True:
try:
hfile = win32file._get_osfhandle(self._fh.fileno())
win32file.LockFileEx(
hfile,
(win32con.LOCKFILE_FAIL_IMMEDIATELY |
win32con.LOCKFILE_EXCLUSIVE_LOCK), 0, -0x10000,
pywintypes.OVERLAPPED())
self._locked = True
return
except pywintypes.error as e:
if timeout == 0:
raise
# If the error is not that the file is already
# in use, raise.
if e[0] != _Win32Opener.FILE_IN_USE_ERROR:
raise
# We could not acquire the lock. Try again.
if (time.time() - start_time) >= timeout:
logger.warn('Could not lock %s in %s seconds' % (
self._filename, timeout))
if self._fh:
self._fh.close()
self._fh = open(self._filename, self._fallback_mode)
return
time.sleep(delay)
def unlock_and_close(self):
"""Close and unlock the file using the win32 primitive."""
if self._locked:
try:
hfile = win32file._get_osfhandle(self._fh.fileno())
win32file.UnlockFileEx(hfile, 0, -0x10000,
pywintypes.OVERLAPPED())
except pywintypes.error as e:
if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR:
raise
self._locked = False
if self._fh:
self._fh.close()
except ImportError:
_Win32Opener = None
class LockedFile(object):
"""Represent a file that has exclusive access."""
@util.positional(4)
def __init__(self, filename, mode, fallback_mode, use_native_locking=True):
"""Construct a LockedFile.
Args:
filename: string, The path of the file to open.
mode: string, The mode to try to open the file with.
fallback_mode: string, The mode to use if locking fails.
use_native_locking: bool, Whether or not fcntl/win32 locking is
used.
"""
opener = None
if not opener and use_native_locking:
if _Win32Opener:
opener = _Win32Opener(filename, mode, fallback_mode)
if _FcntlOpener:
opener = _FcntlOpener(filename, mode, fallback_mode)
if not opener:
opener = _PosixOpener(filename, mode, fallback_mode)
self._opener = opener
def filename(self):
"""Return the filename we were constructed with."""
return self._opener._filename
def file_handle(self):
"""Return the file_handle to the opened file."""
return self._opener.file_handle()
def is_locked(self):
"""Return whether we successfully locked the file."""
return self._opener.is_locked()
def open_and_lock(self, timeout=0, delay=0.05):
"""Open the file, trying to lock it.
Args:
timeout: float, The number of seconds to try to acquire the lock.
delay: float, The number of seconds to wait between retry attempts.
Raises:
AlreadyLockedException: if the lock is already acquired.
IOError: if the open fails.
"""
self._opener.open_and_lock(timeout, delay)
def unlock_and_close(self):
"""Unlock and close a file."""
self._opener.unlock_and_close()
| bsd-3-clause | -673,470,291,402,698,100 | 33.602067 | 79 | 0.52946 | false |
sanguinariojoe/FreeCAD | src/Mod/Ship/shipUtils/Math.py | 38 | 2930 | #***************************************************************************
#* *
#* Copyright (c) 2011, 2016 *
#* Jose Luis Cercos Pita <[email protected]> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
def isAprox(a,b,tol=0.000001):
"""returns if a value is into (b-tol,b+tol)
@param a Value to compare.
@param b Center of valid interval
@param tol Radius of valid interval
@return True if a is into (b-tol,b+tol), False otherwise
"""
if (a < b+abs(tol)) and (a > b-abs(tol)):
return True
return False
def isSamePoint(a,b,tol=0.000001):
"""returns if two points are the same with a provided tolerance
@param a Point to compare.
@param b Reference point.
@param tol Radius of valid interval
@return True if twice point are the same, False otherwise
@note FreeCAD::Base::Vector types must be provided
"""
if isAprox(a.x,b.x,tol) and isAprox(a.y,b.y,tol) and isAprox(a.z,b.z,tol):
return True
return False
def isSameVertex(a,b,tol=0.0001):
"""returns if two points are the same with a provided tolerance
@param a Point to compare.
@param b Reference point.
@param tol Radius of valid interval
@return True if twice point are the same, False otherwise
@note FreeCAD::Part::Vertex types must be provided
"""
if isAprox(a.X,b.X,tol) and isAprox(a.Y,b.Y,tol) and isAprox(a.Z,b.Z,tol):
return True
return False
| lgpl-2.1 | -3,037,627,146,048,722,400 | 50.403509 | 78 | 0.49727 | false |
huanpc/IoT-1 | gui/controller/controller/iot_platform/admin.py | 1 | 1785 | from django.contrib import admin
from .models import PlatformModel
from django import forms
#TODO: Docs here: https://docs.djangoproject.com/en/1.10/ref/contrib/admin
class PlatformAdminChangeForm(forms.ModelForm):
class Meta:
model = PlatformModel
fields = '__all__'
exclude = ['resource_id', 'id']
#TODO: https://docs.djangoproject.com/en/1.10/ref/contrib/admin/#adding-custom-validation-to-the-admin
def clean(self):
return
def save(self, commit=True):
return super().save(commit)
class PlatformAdmin(admin.ModelAdmin):
# Use 2 model Admin in one admin parent page
# https://docs.djangoproject.com/en/dev/ref/contrib/admin/#inlinemodeladmin-objects
form = PlatformAdminChangeForm
ordering = ['id']
# list_filter = ('title', 'location', 'start_time')
# fieldsets = (
# ('Required', {
# 'fields': ('url_event', 'title', 'description', 'location', 'start_time', 'end_time')
# }),
# ('Addition options', {
# 'classes': ('collapse',),
# 'fields': ('ticket_url', 'thumbnail', 'event_thumbnail', 'number_interest', 'host', 'service_fee',
# 'quantity', 'contact', 'country_code', 'lang', 'slug', ),
# }),
# )
# readonly_fields = ('event_thumbnail',)
exclude = ('id', 'resource_id')
list_display = ('id', 'resource_id', 'platform_type', 'description', 'namespace', 'label', 'version')
# Doc_heres: https://docs.djangoproject.com/en/1.10/ref/contrib/admin/#django.contrib.admin.ModelAdmin.search_fields
# search_fields = ('title',)
admin.site.register(PlatformModel, PlatformAdmin)
| mit | 873,050,636,578,505,300 | 40.511628 | 124 | 0.585434 | false |
procrasti/electrum | gui/stdio.py | 4 | 7556 | from decimal import Decimal
_ = lambda x:x
#from i18n import _
from electrum.wallet import WalletStorage, Wallet
from electrum.util import format_satoshis, set_verbosity, StoreDict
from electrum.bitcoin import is_valid, COIN, TYPE_ADDRESS
from electrum.network import filter_protocol
import sys, getpass, datetime
# minimal fdisk like gui for console usage
# written by rofl0r, with some bits stolen from the text gui (ncurses)
class ElectrumGui:
def __init__(self, config, daemon, plugins):
self.config = config
network = daemon.network
storage = WalletStorage(config.get_wallet_path())
if not storage.file_exists:
print "Wallet not found. try 'electrum create'"
exit()
self.done = 0
self.last_balance = ""
set_verbosity(False)
self.str_recipient = ""
self.str_description = ""
self.str_amount = ""
self.str_fee = ""
self.wallet = Wallet(storage)
self.wallet.start_threads(network)
self.contacts = StoreDict(self.config, 'contacts')
network.register_callback(self.on_network, ['updated', 'banner'])
self.commands = [_("[h] - displays this help text"), \
_("[i] - display transaction history"), \
_("[o] - enter payment order"), \
_("[p] - print stored payment order"), \
_("[s] - send stored payment order"), \
_("[r] - show own receipt addresses"), \
_("[c] - display contacts"), \
_("[b] - print server banner"), \
_("[q] - quit") ]
self.num_commands = len(self.commands)
def on_network(self, event, *args):
if event == 'updated':
self.updated()
elif event == 'banner':
self.print_banner()
def main_command(self):
self.print_balance()
c = raw_input("enter command: ")
if c == "h" : self.print_commands()
elif c == "i" : self.print_history()
elif c == "o" : self.enter_order()
elif c == "p" : self.print_order()
elif c == "s" : self.send_order()
elif c == "r" : self.print_addresses()
elif c == "c" : self.print_contacts()
elif c == "b" : self.print_banner()
elif c == "n" : self.network_dialog()
elif c == "e" : self.settings_dialog()
elif c == "q" : self.done = 1
else: self.print_commands()
def updated(self):
s = self.get_balance()
if s != self.last_balance:
print(s)
self.last_balance = s
return True
def print_commands(self):
self.print_list(self.commands, "Available commands")
def print_history(self):
width = [20, 40, 14, 14]
delta = (80 - sum(width) - 4)/3
format_str = "%"+"%d"%width[0]+"s"+"%"+"%d"%(width[1]+delta)+"s"+"%" \
+ "%d"%(width[2]+delta)+"s"+"%"+"%d"%(width[3]+delta)+"s"
b = 0
messages = []
for item in self.wallet.get_history():
tx_hash, confirmations, value, timestamp, balance = item
if confirmations:
try:
time_str = datetime.datetime.fromtimestamp(timestamp).isoformat(' ')[:-3]
except Exception:
time_str = "unknown"
else:
time_str = 'unconfirmed'
label = self.wallet.get_label(tx_hash)
messages.append( format_str%( time_str, label, format_satoshis(value, whitespaces=True), format_satoshis(balance, whitespaces=True) ) )
self.print_list(messages[::-1], format_str%( _("Date"), _("Description"), _("Amount"), _("Balance")))
def print_balance(self):
print(self.get_balance())
def get_balance(self):
if self.wallet.network.is_connected():
if not self.wallet.up_to_date:
msg = _( "Synchronizing..." )
else:
c, u, x = self.wallet.get_balance()
msg = _("Balance")+": %f "%(Decimal(c) / COIN)
if u:
msg += " [%f unconfirmed]"%(Decimal(u) / COIN)
if x:
msg += " [%f unmatured]"%(Decimal(x) / COIN)
else:
msg = _( "Not connected" )
return(msg)
def print_contacts(self):
messages = map(lambda x: "%20s %45s "%(x[0], x[1][1]), self.contacts.items())
self.print_list(messages, "%19s %25s "%("Key", "Value"))
def print_addresses(self):
messages = map(lambda addr: "%30s %30s "%(addr, self.wallet.labels.get(addr,"")), self.wallet.addresses())
self.print_list(messages, "%19s %25s "%("Address", "Label"))
def print_order(self):
print("send order to " + self.str_recipient + ", amount: " + self.str_amount \
+ "\nfee: " + self.str_fee + ", desc: " + self.str_description)
def enter_order(self):
self.str_recipient = raw_input("Pay to: ")
self.str_description = raw_input("Description : ")
self.str_amount = raw_input("Amount: ")
self.str_fee = raw_input("Fee: ")
def send_order(self):
self.do_send()
def print_banner(self):
for i, x in enumerate( self.wallet.network.banner.split('\n') ):
print( x )
def print_list(self, list, firstline):
self.maxpos = len(list)
if not self.maxpos: return
print(firstline)
for i in range(self.maxpos):
msg = list[i] if i < len(list) else ""
print(msg)
def main(self):
while self.done == 0: self.main_command()
def do_send(self):
if not is_valid(self.str_recipient):
print(_('Invalid Bitcoin address'))
return
try:
amount = int(Decimal(self.str_amount) * COIN)
except Exception:
print(_('Invalid Amount'))
return
try:
fee = int(Decimal(self.str_fee) * COIN)
except Exception:
print(_('Invalid Fee'))
return
if self.wallet.use_encryption:
password = self.password_dialog()
if not password:
return
else:
password = None
c = ""
while c != "y":
c = raw_input("ok to send (y/n)?")
if c == "n": return
try:
tx = self.wallet.mktx([(TYPE_ADDRESS, self.str_recipient, amount)], password, self.config, fee)
except Exception as e:
print(str(e))
return
if self.str_description:
self.wallet.labels[tx.hash()] = self.str_description
print(_("Please wait..."))
status, msg = self.network.broadcast(tx)
if status:
print(_('Payment sent.'))
#self.do_clear()
#self.update_contacts_tab()
else:
print(_('Error'))
def network_dialog(self):
print("use 'electrum setconfig server/proxy' to change your network settings")
return True
def settings_dialog(self):
print("use 'electrum setconfig' to change your settings")
return True
def password_dialog(self):
return getpass.getpass()
# XXX unused
def run_receive_tab(self, c):
#if c == 10:
# out = self.run_popup('Address', ["Edit label", "Freeze", "Prioritize"])
return
def run_contacts_tab(self, c):
pass
| mit | -5,276,390,945,176,424,000 | 31.995633 | 147 | 0.523425 | false |
google/shoptimizer | shoptimizer_api/optimizers_builtin/mpn_optimizer_test.py | 1 | 3776 | # coding=utf-8
# Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for mpn_optimizer.py."""
from typing import Any, Dict, Optional
import unittest.mock as mock
from absl.testing import parameterized
import enums
from optimizers_builtin import mpn_optimizer
from test_data import requests_bodies
def _build_request_body(has_mpn_field: bool,
mpn_value: Optional[str] = None) -> Dict[str, Any]:
"""Builds a dummy request body.
Request body includes 1 product with specific mpn value or without mpn.
Args:
has_mpn_field: Whether the request body should have mpn field or not.
mpn_value: The mpn value of the product.
Returns:
A dummy request body including 1 product.
"""
properties_to_be_removed = []
if not has_mpn_field:
properties_to_be_removed.append('mpn')
body = requests_bodies.build_request_body({'mpn': mpn_value},
properties_to_be_removed)
return body
class MPNOptimizerTest(parameterized.TestCase):
def setUp(self) -> None:
super(MPNOptimizerTest, self).setUp()
self.optimizer = mpn_optimizer.MPNOptimizer()
@parameterized.parameters(mpn for mpn in mpn_optimizer.INVALID_MPN_VALUES)
def test_process_removes_mpn_field_when_its_value_invalid(
self, invalid_mpn):
original_data = _build_request_body(True, invalid_mpn)
optimized_data, optimization_result = self.optimizer.process(original_data)
self.assertNotIn('mpn', optimized_data['entries'][0]['product'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_process_removes_mpn_field_when_its_value_invalid_after_normalized(
self):
invalid_mpn = 'N/A'
original_data = _build_request_body(True, invalid_mpn)
optimized_data, optimization_result = self.optimizer.process(original_data)
self.assertNotIn('mpn', optimized_data['entries'][0]['product'])
self.assertEqual(1, optimization_result.num_of_products_optimized)
def test_process_does_not_transform_data_when_mpn_valid(self):
original_data = _build_request_body(True, 'valid-mpn')
optimized_data, optimization_result = self.optimizer.process(original_data)
self.assertEqual(original_data, optimized_data)
self.assertEqual(0, optimization_result.num_of_products_optimized)
def test_process_does_not_transform_data_when_mpn_field_not_exist(self):
original_data = _build_request_body(False)
optimized_data, optimization_result = self.optimizer.process(original_data)
self.assertEqual(original_data, optimized_data)
self.assertEqual(0, optimization_result.num_of_products_optimized)
def test_process_sets_product_tracking_field_to_sanitized_when_invalid_mpn_value_removed(
self):
invalid_mpn_value = 'default'
original_data = _build_request_body(True, invalid_mpn_value)
tracking_field = 'customLabel4'
with mock.patch.dict('os.environ',
{'PRODUCT_TRACKING_FIELD': tracking_field}):
optimized_data, _ = self.optimizer.process(original_data)
optimized_product = optimized_data['entries'][0]['product']
self.assertEqual(enums.TrackingTag.SANITIZED.value,
optimized_product[tracking_field])
| apache-2.0 | -8,019,107,327,763,407,000 | 35.660194 | 91 | 0.716102 | false |
anuragkapur/mongo-connector | tests/test_mongo_connector.py | 1 | 5073 | # Copyright 2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file will be used with PyPi in order to package and distribute the final
# product.
"""Tests methods for mongo_connector
"""
import os
import sys
import inspect
file = inspect.getfile(inspect.currentframe())
cmd_folder = os.path.realpath(os.path.abspath(os.path.split(file)[0]))
cmd_folder = cmd_folder.rsplit("/", 1)[0]
cmd_folder += "/mongo-connector"
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import unittest
import time
import json
from mongo_connector import Connector
from optparse import OptionParser
from setup_cluster import start_cluster
from bson.timestamp import Timestamp
from util import long_to_bson_ts
main_address = '27217'
class MongoInternalTester(unittest.TestCase):
def runTest(self):
unittest.TestCase.__init__(self)
def test_connector(self):
"""Test whether the connector initiates properly
"""
c = Connector(main_address, 'config.txt', None, ['test.test'],
'_id', None, None)
c.start()
while len(c.shard_set) != 1:
time.sleep(2)
c.join()
self.assertFalse(c.can_run)
time.sleep(5)
for thread in c.shard_set.values():
self.assertFalse(thread.running)
def test_write_oplog_progress(self):
"""Test write_oplog_progress under several circumstances
"""
os.system('touch temp_config.txt')
config_file_path = os.getcwd() + '/temp_config.txt'
c = Connector(main_address, config_file_path, None, ['test.test'],
'_id', None, None)
#test that None is returned if there is no config file specified.
self.assertEqual(c.write_oplog_progress(), None)
c.oplog_progress.get_dict()[1] = Timestamp(12, 34)
#pretend to insert a thread/timestamp pair
c.write_oplog_progress()
data = json.load(open(config_file_path, 'r'))
self.assertEqual(1, int(data[0]))
self.assertEqual(long_to_bson_ts(int(data[1])), Timestamp(12, 34))
#ensure the temp file was deleted
self.assertFalse(os.path.exists(config_file_path + '~'))
#ensure that updates work properly
c.oplog_progress.get_dict()[1] = Timestamp(44, 22)
c.write_oplog_progress()
config_file = open(config_file_path, 'r')
data = json.load(config_file)
self.assertEqual(1, int(data[0]))
self.assertEqual(long_to_bson_ts(int(data[1])), Timestamp(44, 22))
os.system('rm ' + config_file_path)
config_file.close()
print("PASSED TEST WRITE OPLOG PROGRESS")
def test_read_oplog_progress(self):
"""Test read_oplog_progress
"""
c = Connector(main_address, None, None, ['test.test'], '_id',
None, None)
#testing with no file
self.assertEqual(c.read_oplog_progress(), None)
os.system('touch temp_config.txt')
config_file_path = os.getcwd() + '/temp_config.txt'
c.oplog_checkpoint = config_file_path
#testing with empty file
self.assertEqual(c.read_oplog_progress(), None)
oplog_dict = c.oplog_progress.get_dict()
#add a value to the file, delete the dict, and then read in the value
oplog_dict['oplog1'] = Timestamp(12, 34)
c.write_oplog_progress()
del oplog_dict['oplog1']
self.assertEqual(len(oplog_dict), 0)
c.read_oplog_progress()
self.assertTrue('oplog1' in oplog_dict.keys())
self.assertTrue(oplog_dict['oplog1'], Timestamp(12, 34))
oplog_dict['oplog1'] = Timestamp(55, 11)
#see if oplog progress dict is properly updated
c.read_oplog_progress()
self.assertTrue(oplog_dict['oplog1'], Timestamp(55, 11))
os.system('rm ' + config_file_path)
print("PASSED TEST READ OPLOG PROGRESS")
if __name__ == '__main__':
os.system('rm config.txt; touch config.txt')
parser = OptionParser()
#-m is for the main address, which is a host:port pair, ideally of the
#mongos. For non sharded clusters, it can be the primary.
parser.add_option("-m", "--main", action="store", type="string",
dest="main_addr", default="27217")
(options, args) = parser.parse_args()
main_address = "localhost:" + options.main_addr
if options.main_addr != "27217":
start_cluster(use_mongos=False)
else:
start_cluster(use_mongos=True)
unittest.main(argv=[sys.argv[0]])
| apache-2.0 | 5,385,982,325,778,449,000 | 31.312102 | 79 | 0.636113 | false |
jordanemedlock/psychtruths | temboo/core/Library/LastFm/Artist/GetEvents.py | 5 | 4525 | # -*- coding: utf-8 -*-
###############################################################################
#
# GetEvents
# Retrieves a list of upcoming events for this artist.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetEvents(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetEvents Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetEvents, self).__init__(temboo_session, '/Library/LastFm/Artist/GetEvents')
def new_input_set(self):
return GetEventsInputSet()
def _make_result_set(self, result, path):
return GetEventsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetEventsChoreographyExecution(session, exec_id, path)
class GetEventsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetEvents
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) Your Last.fm API Key.)
"""
super(GetEventsInputSet, self)._set_input('APIKey', value)
def set_Artist(self, value):
"""
Set the value of the Artist input for this Choreo. ((conditional, string) The artist name. Required unless providing MbID.)
"""
super(GetEventsInputSet, self)._set_input('Artist', value)
def set_AutoCorrect(self, value):
"""
Set the value of the AutoCorrect input for this Choreo. ((optional, boolean) Transform misspelled artist names into correct artist names. The corrected artist name will be returned in the response. Defaults to 0.)
"""
super(GetEventsInputSet, self)._set_input('AutoCorrect', value)
def set_FestivalsOnly(self, value):
"""
Set the value of the FestivalsOnly input for this Choreo. ((optional, boolean) Whether only festivals should be returned, or all events. Defaults to 0 (false).)
"""
super(GetEventsInputSet, self)._set_input('FestivalsOnly', value)
def set_Limit(self, value):
"""
Set the value of the Limit input for this Choreo. ((optional, integer) The number of results to fetch per page. Defaults to 50.)
"""
super(GetEventsInputSet, self)._set_input('Limit', value)
def set_MbID(self, value):
"""
Set the value of the MbID input for this Choreo. ((conditional, string) The musicbrainz id for the artist. Required unless providing Artist.)
"""
super(GetEventsInputSet, self)._set_input('MbID', value)
def set_Page(self, value):
"""
Set the value of the Page input for this Choreo. ((optional, integer) The page number to fetch. Defaults to 1.)
"""
super(GetEventsInputSet, self)._set_input('Page', value)
class GetEventsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetEvents Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((xml) The response from Last.fm.)
"""
return self._output.get('Response', None)
class GetEventsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetEventsResultSet(response, path)
| apache-2.0 | -2,100,165,090,013,714,200 | 39.401786 | 221 | 0.66232 | false |
livingbio/libsaas | libsaas/services/stripe/invoices.py | 3 | 4954 | from libsaas.services import base
from libsaas import parsers, http
from . import resource
class LineItems(resource.StripeResource):
path = 'lines'
@base.apimethod
def get(self, customer=None, count=None, offset=None):
"""
Fetch all of the objects.
:var customer: In the case of upcoming invoices, the customer of the
upcoming invoice is required. In other cases it is ignored.
:vartype customer: str
:var count: A limit on the number of objects to be returned.
Count can range between 1 and 100 objects.
:vartype count: int
:var offset: An offset into your object array. The API will return
the requested number of objects starting at that offset.
:vartype offset: int
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_json
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
def delete(self, *args, **kwargs):
raise base.MethodNotSupported()
class InvoicesBaseResource(resource.StripeResource):
path = 'invoices'
def delete(self, *args, **kwargs):
raise base.MethodNotSupported()
class Invoice(InvoicesBaseResource):
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
@base.resource(LineItems)
def lines(self):
"""
Return the resource corresponding to all invoice's lines.
"""
return LineItems(self)
@base.apimethod
def pay(self):
"""
Paying an invoice
"""
self.require_item()
url = '{0}/{1}'.format(self.get_url(), 'pay')
request = http.Request('POST', url, {})
return request, parsers.parse_json
class Invoices(InvoicesBaseResource):
@base.apimethod
def get(self, customer=None, count=None, offset=None, ending_before=None,
starting_after=None):
"""
Fetch all of the objects.
:var customer: The identifier of the customer whose invoices to return.
If none is provided, all invoices will be returned.
:vartype customer: str
:var count: A limit on the number of objects to be returned.
Count can range between 1 and 100 objects.
:vartype count: int
:var offset: An offset into your object array. The API will return
the requested number of objects starting at that offset.
:vartype offset: int
:var ending_before: A cursor (object ID) for use in pagination. Fetched
objetcs will be newer than the given object.
:vartype ending_before: str
:var starting_after: A cursor (object ID) for use in pagination.
Fetched objetcs will be older than the given object.
:vartype starting_after: str
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_json
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
@base.apimethod
def upcoming(self, customer):
"""
Fetch a customer's upcoming invoice.
:var customer: The identifier of the customer whose invoices to return.
If none is provided, all invoices will be returned.
:vartype customer: str
"""
params = base.get_params(None, locals())
url = '{0}/{1}'.format(self.get_url(), 'upcoming')
request = http.Request('GET', url, params)
return request, parsers.parse_json
class InvoiceItemBaseResource(resource.StripeResource):
path = 'invoiceitems'
def update(self, *args, **kwargs):
raise base.MethodNotSupported()
class InvoiceItem(InvoiceItemBaseResource):
def create(self, *args, **kwargs):
raise base.MethodNotSupported()
class InvoiceItems(InvoiceItemBaseResource):
@base.apimethod
def get(self, customer=None, count=None, offset=None):
"""
Fetch all of the objects.
:var customer: The identifier of the customer whose invoice items to return.
If none is provided, all invoice items will be returned.
:vartype customer: str
:var count: A limit on the number of objects to be returned.
Count can range between 1 and 100 objects.
:vartype count: int
:var offset: An offset into your object array. The API will return
the requested number of objects starting at that offset.
:vartype offset: int
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_json
def delete(self, *args, **kwargs):
raise base.MethodNotSupported()
| mit | -9,220,701,440,068,584,000 | 29.392638 | 84 | 0.631813 | false |
Ircam-Web/mezzanine-organization | organization/magazine/models.py | 1 | 5802 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016-2017 Ircam
# Copyright (c) 2016-2017 Guillaume Pellerin
# Copyright (c) 2016-2017 Emilie Zawadzki
# This file is part of mezzanine-organization.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from django.db import models
from django import forms
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from mezzanine.core.managers import SearchableManager
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse, reverse_lazy
from mezzanine.core.models import RichText, Displayable, Slugged, TeamOwnable
from mezzanine.pages.models import Page
from mezzanine.blog.models import BlogPost
from organization.network.models import Department, PersonListBlock
from organization.media.models import *
from organization.core.models import *
from organization.magazine.apps import *
BRIEF_STYLE_CHOICES = [
('grey', _('grey')),
('yellow', _('yellow')),
('black', _('black'))
]
class Article(BlogPost, SubTitled, TeamOwnable):
department = models.ForeignKey(Department, verbose_name=_('department'), related_name='articles', limit_choices_to=dict(id__in=Department.objects.all()), blank=True, null=True, on_delete=models.SET_NULL)
topics = models.ManyToManyField("Topic", verbose_name=_('topics'), related_name="articles", blank=True)
search_fields = {"title" : 20, "content": 15}
def get_absolute_url(self):
return reverse("magazine-article-detail", kwargs={"slug": self.slug})
class Meta:
verbose_name = _('article')
permissions = TeamOwnable.Meta.permissions
class ArticleImage(Image):
article = models.ForeignKey("Article", verbose_name=_('article'), related_name='images', blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _("image")
verbose_name_plural = _("images")
order_with_respect_to = "article"
class ArticleRelatedTitle(RelatedTitle):
article = models.OneToOneField("Article", verbose_name=_('article'), related_name='related_title', blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
verbose_name = _("related title")
order_with_respect_to = "article"
class ArticlePlaylist(PlaylistRelated):
article = models.ForeignKey(Article, verbose_name=_('article'), related_name='playlists', blank=True, null=True, on_delete=models.SET_NULL)
class Brief(Displayable, RichText, TeamOwnable):
style = models.CharField(_('style'), max_length=16, choices=BRIEF_STYLE_CHOICES)
text_button = models.CharField(blank=True, max_length=150, null=False, verbose_name=_('text button'))
external_content = models.URLField(blank=True, max_length=1000, null=False, verbose_name=_('external content'))
# used for autocomplete but hidden in admin
content_type = models.ForeignKey(
ContentType,
verbose_name=_('local content'),
null=True,
blank=True,
editable=False,
)
# used for autocomplete but hidden in admin
object_id = models.PositiveIntegerField(
verbose_name=_('related object'),
null=True,
editable=False,
)
content_object = GenericForeignKey('content_type', 'object_id')
def get_absolute_url(self):
return self.external_content
class Meta:
verbose_name = _('brief')
permissions = TeamOwnable.Meta.permissions
#ordering = ['sort_order']
class Topic(Page, RichText):
"""Topic for magazine menu"""
class Meta:
verbose_name = _('topic')
class ArticlePersonListBlockInline(Titled, Description):
article = models.ForeignKey(Article, verbose_name=_('Article'), related_name='article_person_list_block_inlines', blank=True, null=True, on_delete=models.SET_NULL)
person_list_block = models.ForeignKey(PersonListBlock, related_name='article_person_list_block_inlines', verbose_name=_('Person List Block'), blank=True, null=True)
class Meta:
verbose_name = _('Person List')
def __str__(self):
return self.title
class DynamicContentArticle(DynamicContent, Orderable):
article = models.ForeignKey(Article, verbose_name=_('article'), related_name='dynamic_content_articles', blank=True, null=True, on_delete=models.CASCADE)
class Meta:
verbose_name = 'Dynamic Content Article'
class DynamicMultimediaArticle(DynamicContent, Orderable):
article = models.ForeignKey(Article, verbose_name=_('article'), related_name='dynamic_multimedia', blank=True, null=True, on_delete=models.CASCADE)
class Meta:
verbose_name = 'Multimedia'
class DynamicContentMagazineContent(DynamicContent, Orderable):
magazine = models.ForeignKey("magazine", verbose_name=_('magazine'), related_name='dynamic_content', blank=True, null=True, on_delete=models.CASCADE)
class Meta:
verbose_name = 'Content'
class Magazine(Displayable):
class Meta:
verbose_name = _('magazine')
verbose_name_plural = _("magazines")
def get_absolute_url(self):
return reverse("magazine")
| agpl-3.0 | -7,482,229,496,694,557,000 | 33.742515 | 207 | 0.710445 | false |
damoti/pyjx-gwt | gwt/pyjamas/gmaps/Utils.py | 7 | 5407 | # Copyright (C) 2009 Daniel Carvalho <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __pyjamas__ import JS
# Converts javascript structures from googlemaps javascript library to
# python structures, and vice-versa.
#
# Example:
# jsobj=JS("""[{nome:"danire", year:1814}, {nome:"joano", year:"1901"}]""")
#
# #print jsobj[0].nome # this is an error!
#
# fields = dictToJs({"lista": 'l', "lista[]": 'd'})
# pyobj=translateGmapsObject(jsobj,"lista",fields)
# for line in pyobj:
# print line.nome, line.year
#
# jsobj2=translateGmapsObject(pyobj,"lista",fields,True)
# #jsobj2 is exactly the same as jsobj!
def translateGmapsObject(obj, fieldName, fields, pyToJs):
JS("""
//console['log']("translateGmapsObject " + fieldNameXXX+"("+pyToJs+")")
if (! (@{{fieldName}} in @{{fields}}))
{
//console['log']("nothing")
return @{{obj}};
}
else{
@{{action}} = @{{fields}}[@{{fieldName}}]
//console['log']("action=" + action)
if (@{{action}} == 'd')
{
//console['log']("is dict")
// this newobj can be used in js and also in python,
// like this "newobj['field']"
var newobj = {}
for (var i in @{{obj}})
// vai ficar disponivel como uma propriedade, no python!
newobj[i] = $m['translateGmapsObject'](@{{obj}}[i], i, @{{fields}}, @{{pyToJs}});
return newobj
}
else if (@{{action}} == 'l')
{
if (@{{pyToJs}}) {
var newobj = $m['listToJs'](@{{obj}})
//console['log']("is list py->js")
for (var i in newobj){
newobj[i]=$m['translateGmapsObject'](
newobj[i], @{{fieldName}} + "[]", @{{fields}},@{{pyToJs}} ) ;
}
return newobj
}else{
//console['log']("is list js->py")
var newobj = @{{list}}([])
for (var i in @{{obj}})
newobj['append']($m['translateGmapsObject'](
@{{obj}}[i], @{{fieldName}} + "[]", @{{fields}},@{{pyToJs}} ));
return newobj
}
}
else
{
//console['log']("is special")
return @{{action}}(@{{obj}})
}
}
""")
# converts a python dict to js
# It can be used in python functions that have variable number of args
#
# like
# def MapOptions(**params):
# return dictToJs(params)
#
# if MapOptions is called without arguments, the for loop will
# raise an exception.
# I could use the test "if params" BUT it always gives True...
# So I have to catch the exception.
def dictToJs(dict):
obj = JS("{}")
try:
for key in dict:
value = dict[key]
JS("@{{obj}}[@{{key}}] = @{{value}}")
except:
pass
return obj
# Converts a python list to a javascript list
def listToJs(list):
obj = JS("[]")
for i in list:
obj.push(i)
return obj
# LISTENERS
# This functions add python listener methods to any
# gmaps javascript object
def createListenerMethods(obj):
obj.addListener = __addListener
obj.removeListener = __removeListener
obj.clearListeners = __clearListeners
obj.clearInstanceListeners = __clearInstanceListeners
#obj.dumpListeners = __dumpListeners # para debug
obj.__listeners = {} #__ !
def __dumpListeners():
self = JS("this")
print "DUMP"
for eventName in self.__listeners:
print " " + eventName
for list in self.__listeners[eventName]:
print " " + str(list)
def __addListener(eventName, callback):
self = JS("this")
thelist = JS("""
$wnd['google']['maps']['event']['addListener'](this, @{{eventName}}, function(event) {
@{{callback}}(event);
})
""")
# I have to keep information about the registered listeners for
# this instance!
if eventName in self.__listeners:
self.__listeners[eventName].append(thelist)
else:
self.__listeners[eventName] = [thelist]
return thelist
def __removeListener(list):
self = JS("this")
for eventName in self.__listeners:
if list in self.__listeners[eventName]:
JS("""$wnd['google']['maps']['event']['removeListener'](@{{list}});""")
self.__listeners[eventName].remove(list)
return
# if we get here, there is nothing to remove,
# the listener specified doesn't exist or does not belong to this object
def __clearListeners(eventName):
self = JS("this")
JS("""$wnd['google']['maps']['event']['clearListeners'](this, @{{eventName}})""")
if eventName in self.__listeners:
del self.__listeners[eventName]
def __clearInstanceListeners():
self = JS("this")
JS("""$wnd['google']['maps']['event']['clearInstanceListeners'](this)""")
self.__listeners = {}
| apache-2.0 | 6,478,600,338,335,920,000 | 27.308901 | 94 | 0.574995 | false |
encukou/cython | Cython/Utility/__init__.py | 10 | 1165 |
def pylong_join(count, digits_ptr='digits', join_type='unsigned long'):
"""
Generate an unrolled shift-then-or loop over the first 'count' digits.
Assumes that they fit into 'join_type'.
(((d[2] << n) | d[1]) << n) | d[0]
"""
return ('(' * (count * 2) + "(%s)" % join_type + ' | '.join(
"%s[%d])%s)" % (digits_ptr, _i, " << PyLong_SHIFT" if _i else '')
for _i in range(count-1, -1, -1)))
# although it could potentially make use of data independence,
# this implementation is a bit slower than the simpler one above
def _pylong_join(count, digits_ptr='digits', join_type='unsigned long'):
"""
Generate an or-ed series of shifts for the first 'count' digits.
Assumes that they fit into 'join_type'.
(d[2] << 2*n) | (d[1] << 1*n) | d[0]
"""
def shift(n):
# avoid compiler warnings for overly large shifts that will be discarded anyway
return " << (%d * PyLong_SHIFT < 8 * sizeof(%s) ? %d * PyLong_SHIFT : 0)" % (n, join_type, n) if n else ''
return '(%s)' % ' | '.join(
"(((%s)%s[%d])%s)" % (join_type, digits_ptr, i, shift(i))
for i in range(count-1, -1, -1))
| apache-2.0 | 4,038,455,520,699,946,000 | 39.172414 | 114 | 0.563948 | false |
jumpstarter-io/neutron | neutron/services/loadbalancer/drivers/embrane/agent/dispatcher.py | 15 | 4590 | # Copyright 2014 Embrane, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Ivar Lazzaro, Embrane, Inc. [email protected]
from eventlet import greenthread
from eventlet import queue
from heleosapi import exceptions as h_exc
from neutron.openstack.common import log as logging
from neutron.plugins.embrane.common import contexts as ctx
from neutron.services.loadbalancer.drivers.embrane.agent import lb_operations
from neutron.services.loadbalancer.drivers.embrane import constants as econ
LOG = logging.getLogger(__name__)
class Dispatcher(object):
def __init__(self, driver, async=True):
self._async = async
self._driver = driver
self.sync_items = dict()
self.handlers = lb_operations.handlers
def dispatch_lb(self, d_context, *args, **kwargs):
item = d_context.item
event = d_context.event
n_context = d_context.n_context
chain = d_context.chain
item_id = item["id"]
if event in self.handlers:
for f in self.handlers[event]:
first_run = False
if item_id not in self.sync_items:
self.sync_items[item_id] = [queue.Queue()]
first_run = True
self.sync_items[item_id][0].put(
ctx.OperationContext(event, n_context, item, chain, f,
args, kwargs))
if first_run:
t = greenthread.spawn(self._consume_lb,
item_id,
self.sync_items[item_id][0],
self._driver,
self._async)
self.sync_items[item_id].append(t)
if not self._async:
t = self.sync_items[item_id][1]
t.wait()
def _consume_lb(self, sync_item, sync_queue, driver, a_sync):
current_state = None
while True:
try:
if current_state == econ.DELETED:
del self.sync_items[sync_item]
return
try:
operation_context = sync_queue.get(
block=a_sync,
timeout=econ.QUEUE_TIMEOUT)
except queue.Empty:
del self.sync_items[sync_item]
return
(operation_context.chain and
operation_context.chain.execute_all())
transient_state = None
try:
transient_state = operation_context.function(
driver, operation_context.n_context,
operation_context.item, *operation_context.args,
**operation_context.kwargs)
except (h_exc.PendingDva, h_exc.DvaNotFound,
h_exc.BrokenInterface, h_exc.DvaCreationFailed,
h_exc.BrokenDva, h_exc.ConfigurationFailed) as ex:
LOG.warning(econ.error_map[type(ex)], ex.message)
except h_exc.DvaDeleteFailed as ex:
LOG.warning(econ.error_map[type(ex)], ex.message)
transient_state = econ.DELETED
finally:
# if the returned transient state is None, no operations
# are required on the DVA status
if transient_state == econ.DELETED:
current_state = driver._delete_vip(
operation_context.n_context,
operation_context.item)
# Error state cannot be reverted
else:
driver._update_vip_graph_state(
operation_context.n_context,
operation_context.item)
except Exception:
LOG.exception(_('Unhandled exception occurred'))
| apache-2.0 | 2,938,358,615,519,293,000 | 41.5 | 78 | 0.533115 | false |
shinken-monitoring/mod-webui | module/plugins/config/config.py | 2 | 2138 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import os
from config_parser import ConfigParser
from shinken.log import logger
# Get plugin's parameters from configuration file (not useful currently but future ideas ...)
params = {
'fake': "fake"
}
plugin_name = os.path.splitext(os.path.basename(__file__))[0]
currentdir = os.path.dirname(os.path.realpath(__file__))
configuration_file = "%s/%s" % (currentdir, 'plugin.cfg')
logger.debug("Plugin configuration file: %s", configuration_file)
try:
scp = ConfigParser('#', '=')
params = scp.parse_config(configuration_file)
# mongo_host = params['mongo_host']
params['fake'] = params['fake']
logger.debug("WebUI plugin '%s', configuration loaded.", plugin_name)
# logger.debug("Plugin %s configuration, database: %s (%s)",
# plugin_name, params['mongo_host'], params['mongo_port'])
except Exception as exp:
logger.warning("WebUI plugin '%s', configuration file (%s) not available: %s",
plugin_name, configuration_file, str(exp))
# Will be populated by the UI with it's own value
app = None
def config_page():
app.bottle.redirect("/")
return {}
pages = {
config_page: {
'name': 'Config', 'route': '/config', 'view': 'config'
}
}
| agpl-3.0 | 2,850,102,797,286,507,500 | 30.910448 | 93 | 0.689897 | false |
mrquim/mrquimrepo | repo/script.module.pycryptodome/lib/Crypto/PublicKey/__init__.py | 6 | 3453 | # -*- coding: utf-8 -*-
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Public-key encryption and signature algorithms.
Public-key encryption uses two different keys, one for encryption and
one for decryption. The encryption key can be made public, and the
decryption key is kept private. Many public-key algorithms can also
be used to sign messages, and some can *only* be used for signatures.
"""
from Crypto.Util.asn1 import (DerSequence, DerInteger, DerBitString,
DerObjectId, DerNull)
def _expand_subject_public_key_info(encoded):
"""Parse a SubjectPublicKeyInfo structure.
It returns a triple with:
* OID (string)
* encoded public key (bytes)
* Algorithm parameters (bytes or None)
"""
#
# SubjectPublicKeyInfo ::= SEQUENCE {
# algorithm AlgorithmIdentifier,
# subjectPublicKey BIT STRING
# }
#
# AlgorithmIdentifier ::= SEQUENCE {
# algorithm OBJECT IDENTIFIER,
# parameters ANY DEFINED BY algorithm OPTIONAL
# }
#
spki = DerSequence().decode(encoded, nr_elements=2)
algo = DerSequence().decode(spki[0], nr_elements=(1,2))
algo_oid = DerObjectId().decode(algo[0])
spk = DerBitString().decode(spki[1]).value
if len(algo) == 1:
algo_params = None
else:
try:
DerNull().decode(algo[1])
algo_params = None
except:
algo_params = algo[1]
return algo_oid.value, spk, algo_params
def _create_subject_public_key_info(algo_oid, secret_key, params=None):
if params is None:
params = DerNull()
spki = DerSequence([
DerSequence([
DerObjectId(algo_oid),
params]),
DerBitString(secret_key)
])
return spki.encode()
def _extract_subject_public_key_info(x509_certificate):
"""Extract subjectPublicKeyInfo from a DER X.509 certificate."""
certificate = DerSequence().decode(x509_certificate, nr_elements=3)
tbs_certificate = DerSequence().decode(certificate[0],
nr_elements=range(6, 11))
index = 5
try:
tbs_certificate[0] + 1
# Version not present
version = 1
except TypeError:
version = DerInteger(explicit=0).decode(tbs_certificate[0]).value
if version not in (2, 3):
raise ValueError("Incorrect X.509 certificate version")
index = 6
return tbs_certificate[index]
| gpl-2.0 | -1,333,315,165,086,527,000 | 32.524272 | 73 | 0.622647 | false |
sankhesh/VTK | ThirdParty/Twisted/twisted/conch/manhole_tap.py | 42 | 4306 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
TAP plugin for creating telnet- and ssh-accessible manhole servers.
@author: Jp Calderone
"""
from zope.interface import implements
from twisted.internet import protocol
from twisted.application import service, strports
from twisted.conch.ssh import session
from twisted.conch import interfaces as iconch
from twisted.cred import portal, checkers
from twisted.python import usage
from twisted.conch.insults import insults
from twisted.conch import manhole, manhole_ssh, telnet
class makeTelnetProtocol:
def __init__(self, portal):
self.portal = portal
def __call__(self):
auth = telnet.AuthenticatingTelnetProtocol
args = (self.portal,)
return telnet.TelnetTransport(auth, *args)
class chainedProtocolFactory:
def __init__(self, namespace):
self.namespace = namespace
def __call__(self):
return insults.ServerProtocol(manhole.ColoredManhole, self.namespace)
class _StupidRealm:
implements(portal.IRealm)
def __init__(self, proto, *a, **kw):
self.protocolFactory = proto
self.protocolArgs = a
self.protocolKwArgs = kw
def requestAvatar(self, avatarId, *interfaces):
if telnet.ITelnetProtocol in interfaces:
return (telnet.ITelnetProtocol,
self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs),
lambda: None)
raise NotImplementedError()
class Options(usage.Options):
optParameters = [
["telnetPort", "t", None, "strports description of the address on which to listen for telnet connections"],
["sshPort", "s", None, "strports description of the address on which to listen for ssh connections"],
["passwd", "p", "/etc/passwd", "name of a passwd(5)-format username/password file"]]
def __init__(self):
usage.Options.__init__(self)
self['namespace'] = None
def postOptions(self):
if self['telnetPort'] is None and self['sshPort'] is None:
raise usage.UsageError("At least one of --telnetPort and --sshPort must be specified")
def makeService(options):
"""Create a manhole server service.
@type options: C{dict}
@param options: A mapping describing the configuration of
the desired service. Recognized key/value pairs are::
"telnetPort": strports description of the address on which
to listen for telnet connections. If None,
no telnet service will be started.
"sshPort": strports description of the address on which to
listen for ssh connections. If None, no ssh
service will be started.
"namespace": dictionary containing desired initial locals
for manhole connections. If None, an empty
dictionary will be used.
"passwd": Name of a passwd(5)-format username/password file.
@rtype: L{twisted.application.service.IService}
@return: A manhole service.
"""
svc = service.MultiService()
namespace = options['namespace']
if namespace is None:
namespace = {}
checker = checkers.FilePasswordDB(options['passwd'])
if options['telnetPort']:
telnetRealm = _StupidRealm(telnet.TelnetBootstrapProtocol,
insults.ServerProtocol,
manhole.ColoredManhole,
namespace)
telnetPortal = portal.Portal(telnetRealm, [checker])
telnetFactory = protocol.ServerFactory()
telnetFactory.protocol = makeTelnetProtocol(telnetPortal)
telnetService = strports.service(options['telnetPort'],
telnetFactory)
telnetService.setServiceParent(svc)
if options['sshPort']:
sshRealm = manhole_ssh.TerminalRealm()
sshRealm.chainedProtocolFactory = chainedProtocolFactory(namespace)
sshPortal = portal.Portal(sshRealm, [checker])
sshFactory = manhole_ssh.ConchFactory(sshPortal)
sshService = strports.service(options['sshPort'],
sshFactory)
sshService.setServiceParent(svc)
return svc
| bsd-3-clause | -1,717,250,764,189,003,300 | 33.725806 | 115 | 0.644914 | false |
OpenSourcePolicyCenter/PolicyBrain | webapp/settings.py | 2 | 4780 | """
Django settings for webapp project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
import dj_database_url
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
from django.conf import global_settings
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ['HTTP_X_FORWARDED_PROTO', 'https']
SECRET_KEY = os.environ.get('SECRET_KEY', 'secret key')
SITE_ID = os.environ.get('SITE_ID', 1)
# Allow all host headers
ALLOWED_HOSTS = ['*']
SITE_ID = 1
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True if os.environ.get('DEV_DEBUG') == 'True' else False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
(BASE_DIR + '/templates/')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': global_settings.TEMPLATE_CONTEXT_PROCESSORS +
['webapp.apps.pages.views.settings_context_processor',
'webapp.context_processors.google_analytics'],
},
},
]
WEBAPP_VERSION = "1.7.0"
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.humanize',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
# Apps
'webapp.apps.core',
'webapp.apps.taxbrain',
'webapp.apps.dynamic',
'webapp.apps.pages',
'webapp.apps.register',
'webapp.apps.btax',
# Third party apps
'flatblocks',
'account',
'gunicorn',
'import_export',
'storages'
]
MIDDLEWARE_CLASSES = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.gzip.GZipMiddleware',
'htmlmin.middleware.HtmlMinifyMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'webapp.urls'
WSGI_APPLICATION = 'webapp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
# if os.environ.get('DATABASE_URL', None):
# Parse database configuration from $DATABASE_URL
TEST_DATABASE = {
'TEST': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'USER': os.environ.get('DATABASE_USER', 'postgres'),
'NAME': 'test_db',
'PASSWORD': os.environ.get('DATABASE_PW', ''),
}
}
if os.environ.get('DATABASE_URL', None): # DATABASE_URL var is set
DATABASES = {'default': dj_database_url.config()}
DATABASES.update(TEST_DATABASE)
else: # DATABASE_URL is not set--try default
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'taxcalc',
'USER': os.environ.get('DATABASE_USER', 'postgres'),
'PASSWORD': os.environ.get('DATABASE_PW', ''),
'HOST': 'localhost',
'PORT': '5432',
}
}
DATABASES.update(TEST_DATABASE)
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Use whitenoise to serve static files
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
SENDGRID_API_KEY = os.environ.get("SENDGRID_API_KEY", "not-specified")
EMAIL_BACKEND = "sgbackend.SendGridBackend"
BLOG_URL = os.environ.get('BLOG_URL', 'http://news.ospc.org/')
GOOGLE_ANALYTICS_PROPERTY_ID = os.environ.get("GOOGLE_ANALYTICS_PROPERTY_ID",
"")
GOOGLE_ANALYTICS_EMBEDDED_ID = os.environ.get("GOOGLE_ANALYTICS_EMBEDDED_ID",
"")
GOOGLE_ANALYTICS_DOMAIN = os.environ.get("GOOGLE_ANALYTICS_DOMAIN", "")
| mit | 8,374,259,767,958,121,000 | 28.875 | 79 | 0.666946 | false |
kennedyshead/home-assistant | homeassistant/components/zwave/climate.py | 5 | 21953 | """Support for Z-Wave climate devices."""
# Because we do not compile openzwave on CI
from __future__ import annotations
import logging
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
DOMAIN,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
PRESET_NONE,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import ZWaveDeviceEntity, const
_LOGGER = logging.getLogger(__name__)
CONF_NAME = "name"
DEFAULT_NAME = "Z-Wave Climate"
REMOTEC = 0x5254
REMOTEC_ZXT_120 = 0x8377
REMOTEC_ZXT_120_THERMOSTAT = (REMOTEC, REMOTEC_ZXT_120)
ATTR_OPERATING_STATE = "operating_state"
ATTR_FAN_STATE = "fan_state"
ATTR_FAN_ACTION = "fan_action"
AUX_HEAT_ZWAVE_MODE = "Aux Heat"
# Device is in manufacturer specific mode (e.g. setting the valve manually)
PRESET_MANUFACTURER_SPECIFIC = "Manufacturer Specific"
WORKAROUND_ZXT_120 = "zxt_120"
DEVICE_MAPPINGS = {REMOTEC_ZXT_120_THERMOSTAT: WORKAROUND_ZXT_120}
HVAC_STATE_MAPPINGS = {
"off": HVAC_MODE_OFF,
"heat": HVAC_MODE_HEAT,
"heat mode": HVAC_MODE_HEAT,
"heat (default)": HVAC_MODE_HEAT,
"furnace": HVAC_MODE_HEAT,
"fan only": HVAC_MODE_FAN_ONLY,
"dry air": HVAC_MODE_DRY,
"moist air": HVAC_MODE_DRY,
"cool": HVAC_MODE_COOL,
"heat_cool": HVAC_MODE_HEAT_COOL,
"auto": HVAC_MODE_HEAT_COOL,
"auto changeover": HVAC_MODE_HEAT_COOL,
}
MODE_SETPOINT_MAPPINGS = {
"off": (),
"heat": ("setpoint_heating",),
"cool": ("setpoint_cooling",),
"auto": ("setpoint_heating", "setpoint_cooling"),
"aux heat": ("setpoint_heating",),
"furnace": ("setpoint_furnace",),
"dry air": ("setpoint_dry_air",),
"moist air": ("setpoint_moist_air",),
"auto changeover": ("setpoint_auto_changeover",),
"heat econ": ("setpoint_eco_heating",),
"cool econ": ("setpoint_eco_cooling",),
"away": ("setpoint_away_heating", "setpoint_away_cooling"),
"full power": ("setpoint_full_power",),
# aliases found in xml configs
"comfort": ("setpoint_heating",),
"heat mode": ("setpoint_heating",),
"heat (default)": ("setpoint_heating",),
"dry floor": ("setpoint_dry_air",),
"heat eco": ("setpoint_eco_heating",),
"energy saving": ("setpoint_eco_heating",),
"energy heat": ("setpoint_eco_heating",),
"vacation": ("setpoint_away_heating", "setpoint_away_cooling"),
# for tests
"heat_cool": ("setpoint_heating", "setpoint_cooling"),
}
HVAC_CURRENT_MAPPINGS = {
"idle": CURRENT_HVAC_IDLE,
"heat": CURRENT_HVAC_HEAT,
"pending heat": CURRENT_HVAC_IDLE,
"heating": CURRENT_HVAC_HEAT,
"cool": CURRENT_HVAC_COOL,
"pending cool": CURRENT_HVAC_IDLE,
"cooling": CURRENT_HVAC_COOL,
"fan only": CURRENT_HVAC_FAN,
"vent / economiser": CURRENT_HVAC_FAN,
"off": CURRENT_HVAC_OFF,
}
PRESET_MAPPINGS = {
"away": PRESET_AWAY,
"full power": PRESET_BOOST,
"manufacturer specific": PRESET_MANUFACTURER_SPECIFIC,
}
DEFAULT_HVAC_MODES = [
HVAC_MODE_HEAT_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_DRY,
HVAC_MODE_OFF,
HVAC_MODE_AUTO,
]
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Climate device from Config Entry."""
@callback
def async_add_climate(climate):
"""Add Z-Wave Climate Device."""
async_add_entities([climate])
async_dispatcher_connect(hass, "zwave_new_climate", async_add_climate)
def get_device(hass, values, **kwargs):
"""Create Z-Wave entity device."""
temp_unit = hass.config.units.temperature_unit
if values.primary.command_class == const.COMMAND_CLASS_THERMOSTAT_SETPOINT:
return ZWaveClimateSingleSetpoint(values, temp_unit)
if values.primary.command_class == const.COMMAND_CLASS_THERMOSTAT_MODE:
return ZWaveClimateMultipleSetpoint(values, temp_unit)
return None
class ZWaveClimateBase(ZWaveDeviceEntity, ClimateEntity):
"""Representation of a Z-Wave Climate device."""
def __init__(self, values, temp_unit):
"""Initialize the Z-Wave climate device."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self._target_temperature = None
self._target_temperature_range = (None, None)
self._current_temperature = None
self._hvac_action = None
self._hvac_list = None # [zwave_mode]
self._hvac_mapping = None # {ha_mode:zwave_mode}
self._hvac_mode = None # ha_mode
self._aux_heat = None
self._default_hvac_mode = None # ha_mode
self._preset_mapping = None # {ha_mode:zwave_mode}
self._preset_list = None # [zwave_mode]
self._preset_mode = None # ha_mode if exists, else zwave_mode
self._current_fan_mode = None
self._fan_modes = None
self._fan_action = None
self._current_swing_mode = None
self._swing_modes = None
self._unit = temp_unit
_LOGGER.debug("temp_unit is %s", self._unit)
self._zxt_120 = None
# Make sure that we have values for the key before converting to int
if self.node.manufacturer_id.strip() and self.node.product_id.strip():
specific_sensor_key = (
int(self.node.manufacturer_id, 16),
int(self.node.product_id, 16),
)
if (
specific_sensor_key in DEVICE_MAPPINGS
and DEVICE_MAPPINGS[specific_sensor_key] == WORKAROUND_ZXT_120
):
_LOGGER.debug("Remotec ZXT-120 Zwave Thermostat workaround")
self._zxt_120 = 1
self.update_properties()
def _mode(self) -> None:
"""Return thermostat mode Z-Wave value."""
raise NotImplementedError()
def _current_mode_setpoints(self) -> tuple:
"""Return a tuple of current setpoint Z-Wave value(s)."""
raise NotImplementedError()
@property
def supported_features(self):
"""Return the list of supported features."""
support = SUPPORT_TARGET_TEMPERATURE
if self._hvac_list and HVAC_MODE_HEAT_COOL in self._hvac_list:
support |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self._preset_list and PRESET_AWAY in self._preset_list:
support |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self.values.fan_mode:
support |= SUPPORT_FAN_MODE
if self._zxt_120 == 1 and self.values.zxt_120_swing_mode:
support |= SUPPORT_SWING_MODE
if self._aux_heat:
support |= SUPPORT_AUX_HEAT
if self._preset_list:
support |= SUPPORT_PRESET_MODE
return support
def update_properties(self):
"""Handle the data changes for node values."""
# Operation Mode
self._update_operation_mode()
# Current Temp
self._update_current_temp()
# Fan Mode
self._update_fan_mode()
# Swing mode
self._update_swing_mode()
# Set point
self._update_target_temp()
# Operating state
self._update_operating_state()
# Fan operating state
self._update_fan_state()
def _update_operation_mode(self):
"""Update hvac and preset modes."""
if self._mode():
self._hvac_list = []
self._hvac_mapping = {}
self._preset_list = []
self._preset_mapping = {}
mode_list = self._mode().data_items
if mode_list:
for mode in mode_list:
ha_mode = HVAC_STATE_MAPPINGS.get(str(mode).lower())
ha_preset = PRESET_MAPPINGS.get(str(mode).lower())
if mode == AUX_HEAT_ZWAVE_MODE:
# Aux Heat should not be included in any mapping
self._aux_heat = True
elif ha_mode and ha_mode not in self._hvac_mapping:
self._hvac_mapping[ha_mode] = mode
self._hvac_list.append(ha_mode)
elif ha_preset and ha_preset not in self._preset_mapping:
self._preset_mapping[ha_preset] = mode
self._preset_list.append(ha_preset)
else:
# If nothing matches
self._preset_list.append(mode)
# Default operation mode
for mode in DEFAULT_HVAC_MODES:
if mode in self._hvac_mapping.keys():
self._default_hvac_mode = mode
break
if self._preset_list:
# Presets are supported
self._preset_list.append(PRESET_NONE)
current_mode = self._mode().data
_LOGGER.debug("current_mode=%s", current_mode)
_hvac_temp = next(
(
key
for key, value in self._hvac_mapping.items()
if value == current_mode
),
None,
)
if _hvac_temp is None:
# The current mode is not a hvac mode
if (
"heat" in current_mode.lower()
and HVAC_MODE_HEAT in self._hvac_mapping.keys()
):
# The current preset modes maps to HVAC_MODE_HEAT
_LOGGER.debug("Mapped to HEAT")
self._hvac_mode = HVAC_MODE_HEAT
elif (
"cool" in current_mode.lower()
and HVAC_MODE_COOL in self._hvac_mapping.keys()
):
# The current preset modes maps to HVAC_MODE_COOL
_LOGGER.debug("Mapped to COOL")
self._hvac_mode = HVAC_MODE_COOL
else:
# The current preset modes maps to self._default_hvac_mode
_LOGGER.debug("Mapped to DEFAULT")
self._hvac_mode = self._default_hvac_mode
self._preset_mode = next(
(
key
for key, value in self._preset_mapping.items()
if value == current_mode
),
current_mode,
)
else:
# The current mode is a hvac mode
self._hvac_mode = _hvac_temp
self._preset_mode = PRESET_NONE
_LOGGER.debug("self._hvac_mapping=%s", self._hvac_mapping)
_LOGGER.debug("self._hvac_list=%s", self._hvac_list)
_LOGGER.debug("self._hvac_mode=%s", self._hvac_mode)
_LOGGER.debug("self._default_hvac_mode=%s", self._default_hvac_mode)
_LOGGER.debug("self._hvac_action=%s", self._hvac_action)
_LOGGER.debug("self._aux_heat=%s", self._aux_heat)
_LOGGER.debug("self._preset_mapping=%s", self._preset_mapping)
_LOGGER.debug("self._preset_list=%s", self._preset_list)
_LOGGER.debug("self._preset_mode=%s", self._preset_mode)
def _update_current_temp(self):
"""Update current temperature."""
if self.values.temperature:
self._current_temperature = self.values.temperature.data
device_unit = self.values.temperature.units
if device_unit is not None:
self._unit = device_unit
def _update_fan_mode(self):
"""Update fan mode."""
if self.values.fan_mode:
self._current_fan_mode = self.values.fan_mode.data
fan_modes = self.values.fan_mode.data_items
if fan_modes:
self._fan_modes = list(fan_modes)
_LOGGER.debug("self._fan_modes=%s", self._fan_modes)
_LOGGER.debug("self._current_fan_mode=%s", self._current_fan_mode)
def _update_swing_mode(self):
"""Update swing mode."""
if self._zxt_120 == 1:
if self.values.zxt_120_swing_mode:
self._current_swing_mode = self.values.zxt_120_swing_mode.data
swing_modes = self.values.zxt_120_swing_mode.data_items
if swing_modes:
self._swing_modes = list(swing_modes)
_LOGGER.debug("self._swing_modes=%s", self._swing_modes)
_LOGGER.debug("self._current_swing_mode=%s", self._current_swing_mode)
def _update_target_temp(self):
"""Update target temperature."""
current_setpoints = self._current_mode_setpoints()
self._target_temperature = None
self._target_temperature_range = (None, None)
if len(current_setpoints) == 1:
(setpoint,) = current_setpoints
if setpoint is not None:
self._target_temperature = round((float(setpoint.data)), 1)
elif len(current_setpoints) == 2:
(setpoint_low, setpoint_high) = current_setpoints
target_low, target_high = None, None
if setpoint_low is not None:
target_low = round((float(setpoint_low.data)), 1)
if setpoint_high is not None:
target_high = round((float(setpoint_high.data)), 1)
self._target_temperature_range = (target_low, target_high)
def _update_operating_state(self):
"""Update operating state."""
if self.values.operating_state:
mode = self.values.operating_state.data
self._hvac_action = HVAC_CURRENT_MAPPINGS.get(str(mode).lower(), mode)
def _update_fan_state(self):
"""Update fan state."""
if self.values.fan_action:
self._fan_action = self.values.fan_action.data
@property
def fan_mode(self):
"""Return the fan speed set."""
return self._current_fan_mode
@property
def fan_modes(self):
"""Return a list of available fan modes."""
return self._fan_modes
@property
def swing_mode(self):
"""Return the swing mode set."""
return self._current_swing_mode
@property
def swing_modes(self):
"""Return a list of available swing modes."""
return self._swing_modes
@property
def temperature_unit(self):
"""Return the unit of measurement."""
if self._unit == "C":
return TEMP_CELSIUS
if self._unit == "F":
return TEMP_FAHRENHEIT
return self._unit
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self._mode():
return self._hvac_mode
return self._default_hvac_mode
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
if self._mode():
return self._hvac_list
return []
@property
def hvac_action(self):
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
return self._hvac_action
@property
def is_aux_heat(self):
"""Return true if aux heater."""
if not self._aux_heat:
return None
if self._mode().data == AUX_HEAT_ZWAVE_MODE:
return True
return False
@property
def preset_mode(self):
"""Return preset operation ie. eco, away.
Need to be one of PRESET_*.
"""
if self._mode():
return self._preset_mode
return PRESET_NONE
@property
def preset_modes(self):
"""Return the list of available preset operation modes.
Need to be a subset of PRESET_MODES.
"""
if self._mode():
return self._preset_list
return []
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def target_temperature_low(self) -> float | None:
"""Return the lowbound target temperature we try to reach."""
return self._target_temperature_range[0]
@property
def target_temperature_high(self) -> float | None:
"""Return the highbound target temperature we try to reach."""
return self._target_temperature_range[1]
def set_temperature(self, **kwargs):
"""Set new target temperature."""
current_setpoints = self._current_mode_setpoints()
if len(current_setpoints) == 1:
(setpoint,) = current_setpoints
target_temp = kwargs.get(ATTR_TEMPERATURE)
if setpoint is not None and target_temp is not None:
_LOGGER.debug("Set temperature to %s", target_temp)
setpoint.data = target_temp
elif len(current_setpoints) == 2:
(setpoint_low, setpoint_high) = current_setpoints
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if setpoint_low is not None and target_temp_low is not None:
_LOGGER.debug("Set low temperature to %s", target_temp_low)
setpoint_low.data = target_temp_low
if setpoint_high is not None and target_temp_high is not None:
_LOGGER.debug("Set high temperature to %s", target_temp_high)
setpoint_high.data = target_temp_high
def set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
_LOGGER.debug("Set fan mode to %s", fan_mode)
if not self.values.fan_mode:
return
self.values.fan_mode.data = fan_mode
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
_LOGGER.debug("Set hvac_mode to %s", hvac_mode)
if not self._mode():
return
operation_mode = self._hvac_mapping.get(hvac_mode)
_LOGGER.debug("Set operation_mode to %s", operation_mode)
self._mode().data = operation_mode
def turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
if not self._aux_heat:
return
operation_mode = AUX_HEAT_ZWAVE_MODE
_LOGGER.debug("Aux heat on. Set operation mode to %s", operation_mode)
self._mode().data = operation_mode
def turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
if not self._aux_heat:
return
if HVAC_MODE_HEAT in self._hvac_mapping:
operation_mode = self._hvac_mapping.get(HVAC_MODE_HEAT)
else:
operation_mode = self._hvac_mapping.get(HVAC_MODE_OFF)
_LOGGER.debug("Aux heat off. Set operation mode to %s", operation_mode)
self._mode().data = operation_mode
def set_preset_mode(self, preset_mode):
"""Set new target preset mode."""
_LOGGER.debug("Set preset_mode to %s", preset_mode)
if not self._mode():
return
if preset_mode == PRESET_NONE:
# Activate the current hvac mode
self._update_operation_mode()
operation_mode = self._hvac_mapping.get(self.hvac_mode)
_LOGGER.debug("Set operation_mode to %s", operation_mode)
self._mode().data = operation_mode
else:
operation_mode = self._preset_mapping.get(preset_mode, preset_mode)
_LOGGER.debug("Set operation_mode to %s", operation_mode)
self._mode().data = operation_mode
def set_swing_mode(self, swing_mode):
"""Set new target swing mode."""
_LOGGER.debug("Set swing_mode to %s", swing_mode)
if self._zxt_120 == 1 and self.values.zxt_120_swing_mode:
self.values.zxt_120_swing_mode.data = swing_mode
@property
def extra_state_attributes(self):
"""Return the optional state attributes."""
data = super().extra_state_attributes
if self._fan_action:
data[ATTR_FAN_ACTION] = self._fan_action
return data
class ZWaveClimateSingleSetpoint(ZWaveClimateBase):
"""Representation of a single setpoint Z-Wave thermostat device."""
def __init__(self, values, temp_unit):
"""Initialize the Z-Wave climate device."""
ZWaveClimateBase.__init__(self, values, temp_unit)
def _mode(self) -> None:
"""Return thermostat mode Z-Wave value."""
return self.values.mode
def _current_mode_setpoints(self) -> tuple:
"""Return a tuple of current setpoint Z-Wave value(s)."""
return (self.values.primary,)
class ZWaveClimateMultipleSetpoint(ZWaveClimateBase):
"""Representation of a multiple setpoint Z-Wave thermostat device."""
def __init__(self, values, temp_unit):
"""Initialize the Z-Wave climate device."""
ZWaveClimateBase.__init__(self, values, temp_unit)
def _mode(self) -> None:
"""Return thermostat mode Z-Wave value."""
return self.values.primary
def _current_mode_setpoints(self) -> tuple:
"""Return a tuple of current setpoint Z-Wave value(s)."""
current_mode = str(self.values.primary.data).lower()
setpoints_names = MODE_SETPOINT_MAPPINGS.get(current_mode, ())
return tuple(getattr(self.values, name, None) for name in setpoints_names)
| apache-2.0 | 58,606,247,259,890,584 | 34.695935 | 82 | 0.583565 | false |
mjbrewer/testindex | magnum/common/pythonk8sclient/client/models/V1beta3_PersistentVolumeStatus.py | 15 | 1363 | #!/usr/bin/env python
"""
Copyright 2015 Reverb Technologies, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class V1beta3_PersistentVolumeStatus(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'phase': 'str'
}
self.attributeMap = {
'phase': 'phase'
}
#the current phase of a persistent volume
self.phase = None # str
| apache-2.0 | 1,605,808,413,105,378,000 | 29.288889 | 97 | 0.618489 | false |
t-animal/sigal | tests/conftest.py | 2 | 1060 | import blinker
import os
import PIL
import pytest
import shutil
from sigal import signals
from sigal.settings import read_settings
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
BUILD_DIR = os.path.join(CURRENT_DIR, 'sample', '_build')
@pytest.fixture(scope='session', autouse=True)
def remove_build():
"""Ensure that build directory does not exists before each test."""
if os.path.exists(BUILD_DIR):
shutil.rmtree(BUILD_DIR)
@pytest.fixture
def settings():
"""Read the sample config file."""
return read_settings(os.path.join(CURRENT_DIR, 'sample', 'sigal.conf.py'))
@pytest.fixture()
def disconnect_signals():
# Reset plugins
yield None
for name in dir(signals):
if not name.startswith('_'):
try:
sig = getattr(signals, name)
if isinstance(sig, blinker.Signal):
sig.receivers.clear()
except Exception:
pass
def pytest_report_header(config):
return "project deps: Pillow-{}".format(PIL.__version__)
| mit | 7,877,713,479,026,657,000 | 24.238095 | 78 | 0.642453 | false |
echevemaster/fedora-college | pavement.py | 2 | 10779 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- coding: utf-8; -*-
from __future__ import print_function
import os
import sys
import time
import subprocess
# Python 2.6 subprocess.check_output compatibility. Thanks Greg Hewgill!
if 'check_output' not in dir(subprocess):
def check_output(cmd_args, *args, **kwargs):
proc = subprocess.Popen(
cmd_args, *args,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
out, err = proc.communicate()
if proc.returncode != 0:
raise subprocess.CalledProcessError(args)
return out
subprocess.check_output = check_output
from paver.easy import options, task, needs, consume_args
from paver.setuputils import install_distutils_tasks
try:
import colorama
colorama.init() # Initialize colorama on Windows
except ImportError:
# Don't require colorama just for running paver tasks. This allows us to
# run `paver install' without requiring the user to first have colorama
# installed.
pass
sys.path.append('.')
from setup import setup_dict
# Constants
CODE_DIRECTORY = 'fedora_college'
DOCS_DIRECTORY = 'docs'
TESTS_DIRECTORY = 'tests'
PYTEST_FLAGS = ['--doctest-modules']
# Miscellaneous helper functions
def get_project_files():
"""Retrieve a list of project files, ignoring hidden files.
:return: sorted list of project files
:rtype: :class:`list`
"""
if is_git_project():
return get_git_project_files()
project_files = []
for top, subdirs, files in os.walk('.'):
for subdir in subdirs:
if subdir.startswith('.'):
subdirs.remove(subdir)
for f in files:
if f.startswith('.'):
continue
project_files.append(os.path.join(top, f))
return project_files
def is_git_project():
return os.path.isdir('.git')
def get_git_project_files():
"""Retrieve a list of all non-ignored files, including untracked files,
excluding deleted files.
:return: sorted list of git project files
:rtype: :class:`list`
"""
cached_and_untracked_files = git_ls_files(
'--cached', # All files cached in the index
'--others', # Untracked files
# Exclude untracked files that would be excluded by .gitignore, etc.
'--exclude-standard')
uncommitted_deleted_files = git_ls_files('--deleted')
# Since sorting of files in a set is arbitrary, return a sorted list to
# provide a well-defined order to tools like flake8, etc.
return sorted(cached_and_untracked_files - uncommitted_deleted_files)
def git_ls_files(*cmd_args):
"""Run ``git ls-files`` in the top-level project directory. Arguments go
directly to execution call.
:return: set of file names
:rtype: :class:`set`
"""
cmd = ['git', 'ls-files']
cmd.extend(cmd_args)
return set(subprocess.check_output(cmd).splitlines())
def print_passed():
# generated on http://patorjk.com/software/taag/#p=display&f=Small&t=PASSED
print_success_message(r''' ___ _ ___ ___ ___ ___
| _ \/_\ / __/ __| __| \
| _/ _ \\__ \__ \ _|| |) |
|_|/_/ \_\___/___/___|___/
''')
def print_failed():
# generated on http://patorjk.com/software/taag/#p=display&f=Small&t=FAILED
print_failure_message(r''' ___ _ ___ _ ___ ___
| __/_\ |_ _| | | __| \
| _/ _ \ | || |__| _|| |) |
|_/_/ \_\___|____|___|___/
''')
def print_success_message(message):
"""Print a message indicating success in green color to STDOUT.
:param message: the message to print
:type message: :class:`str`
"""
try:
import colorama
print(colorama.Fore.GREEN + message + colorama.Fore.RESET)
except ImportError:
print(message)
def print_failure_message(message):
"""Print a message indicating failure in red color to STDERR.
:param message: the message to print
:type message: :class:`str`
"""
try:
import colorama
print(colorama.Fore.RED + message + colorama.Fore.RESET,
file=sys.stderr)
except ImportError:
print(message, file=sys.stderr)
options(setup=setup_dict)
install_distutils_tasks()
# Task-related functions
def _doc_make(*make_args):
"""Run make in sphinx' docs directory.
:return: exit code
"""
if sys.platform == 'win32':
# Windows
make_cmd = ['make.bat']
else:
# Linux, Mac OS X, and others
make_cmd = ['make']
make_cmd.extend(make_args)
return subprocess.call(make_cmd, cwd=DOCS_DIRECTORY)
def _lint():
"""Run lint and return an exit code."""
# Flake8 doesn't have an easy way to run checks using a Python function, so
# just fork off another process to do it.
# Python 3 compat:
# - The result of subprocess call outputs are byte strings, meaning we need
# to pass a byte string to endswith.
project_python_files = [filename for filename in get_project_files()
if filename.endswith(b'.py')]
retcode = subprocess.call(
['flake8', '--max-complexity=10'] + project_python_files)
if retcode == 0:
print_success_message('No style errors')
return retcode
def _test():
"""Run the unit tests.
:return: exit code
"""
import pytest
# This runs the unit tests.
# It also runs doctest, but only on the modules in TESTS_DIRECTORY.
return pytest.main(PYTEST_FLAGS + [TESTS_DIRECTORY])
def _test_all():
"""Run lint and tests.
:return: exit code
"""
return _lint() + _test()
# Tasks
@task
@needs('doc_html', 'setuptools.command.sdist')
def sdist():
"""Build the HTML docs and the tarball."""
pass
@task
def test():
"""Run the unit tests."""
raise SystemExit(_test())
@task
def lint():
# This refuses to format properly when running `paver help' unless
# this ugliness is used.
('Perform PEP8 style check, run PyFlakes, and run McCabe complexity '
'metrics on the code.')
raise SystemExit(_lint())
@task
def test_all():
"""Perform a style check and run all unit tests."""
retcode = _test_all()
if retcode == 0:
print_passed()
else:
print_failed()
raise SystemExit(retcode)
@task
@consume_args
def run(args):
"""Run the package's main script. All arguments are passed to it."""
# The main script expects to get the called executable's name as
# argv[0]. However, paver doesn't provide that in args. Even if it did (or
# we dove into sys.argv), it wouldn't be useful because it would be paver's
# executable. So we just pass the package name in as the executable name,
# since it's close enough. This should never be seen by an end user
# installing through Setuptools anyway.
from fedora_college.main import _main
raise SystemExit(_main([CODE_DIRECTORY] + args))
@task
def commit():
"""Commit only if all the tests pass."""
if _test_all() == 0:
subprocess.check_call(['git', 'commit'])
else:
print_failure_message('\nTests failed, not committing.')
@task
def coverage():
"""Run tests and show test coverage report."""
try:
import pytest_cov # NOQA
except ImportError:
print_failure_message(
'Install the pytest coverage plugin to use this task, '
"i.e., `pip install pytest-cov'.")
raise SystemExit(1)
import pytest
pytest.main(PYTEST_FLAGS + [
'--cov', CODE_DIRECTORY,
'--cov-report', 'term-missing',
TESTS_DIRECTORY])
@task # NOQA
def doc_watch():
"""Watch for changes in the docs and rebuild HTML docs when changed."""
try:
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
except ImportError:
print_failure_message('Install the watchdog package to use this task, '
"i.e., `pip install watchdog'.")
raise SystemExit(1)
class RebuildDocsEventHandler(FileSystemEventHandler):
def __init__(self, base_paths):
self.base_paths = base_paths
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event: The event object representing the file system event.
:type event: :class:`watchdog.events.FileSystemEvent`
"""
for base_path in self.base_paths:
if event.src_path.endswith(base_path):
super(RebuildDocsEventHandler, self).dispatch(event)
# We found one that matches. We're done.
return
def on_modified(self, event):
print_failure_message('Modification detected. Rebuilding docs.')
# Strip off the path prefix.
# import os
# if event.src_path[len(os.getcwd()) + 1:].startswith(
# CODE_DIRECTORY):
# sphinx-build doesn't always pick up changes on code files,
# even though they are used to generate the documentation. As
# a workaround, just clean before building.
doc_html()
print_success_message('Docs have been rebuilt.')
print_success_message(
'Watching for changes in project files, press Ctrl-C to cancel...')
handler = RebuildDocsEventHandler(get_project_files())
observer = Observer()
observer.schedule(handler, path='.', recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
@task
@needs('doc_html')
def doc_open():
"""Build the HTML docs and open them in a web browser."""
doc_index = os.path.join(DOCS_DIRECTORY, 'build', 'html', 'index.html')
if sys.platform == 'darwin':
# Mac OS X
subprocess.check_call(['open', doc_index])
elif sys.platform == 'win32':
# Windows
subprocess.check_call(['start', doc_index], shell=True)
elif sys.platform == 'linux2':
# All freedesktop-compatible desktops
subprocess.check_call(['xdg-open', doc_index])
else:
print_failure_message(
"Unsupported platform. Please open `{0}' manually.".format(
doc_index))
@task
def get_tasks():
"""Get all paver-defined tasks."""
from paver.tasks import environment
for task in environment.get_tasks():
print(task.shortname)
@task
def doc_html():
"""Build the HTML docs."""
retcode = _doc_make('html')
if retcode:
raise SystemExit(retcode)
@task
def doc_clean():
"""Clean (delete) the built docs."""
retcode = _doc_make('clean')
if retcode:
raise SystemExit(retcode)
| bsd-3-clause | -1,290,648,177,244,723,000 | 27.143603 | 79 | 0.614621 | false |
rwaldron/hy | hy/errors.py | 3 | 1259 | # Copyright (c) 2013 Paul Tagliamonte <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
class HyError(Exception):
"""
Generic Hy error. All interal Exceptions will be subclassed from this
Exception.
"""
pass
| mit | 3,160,741,846,954,282,000 | 45.62963 | 76 | 0.764893 | false |
sreecha/grpc | tools/run_tests/sanity/core_banned_functions.py | 3 | 3033 | #!/usr/bin/env python
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
os.chdir(os.path.join(os.path.dirname(sys.argv[0]), '../../..'))
# map of banned function signature to whitelist
BANNED_EXCEPT = {
'grpc_slice_from_static_buffer(': ['src/core/lib/slice/slice.cc'],
'grpc_resource_quota_ref(': ['src/core/lib/iomgr/resource_quota.cc'],
'grpc_resource_quota_unref(':
['src/core/lib/iomgr/resource_quota.cc', 'src/core/lib/surface/server.cc'],
'grpc_slice_buffer_destroy(': ['src/core/lib/slice/slice_buffer.cc'],
'grpc_slice_buffer_reset_and_unref(':
['src/core/lib/slice/slice_buffer.cc'],
'grpc_slice_ref(': ['src/core/lib/slice/slice.cc'],
'grpc_slice_unref(': ['src/core/lib/slice/slice.cc'],
'grpc_error_create(':
['src/core/lib/iomgr/error.cc', 'src/core/lib/iomgr/error_cfstream.cc'],
'grpc_error_ref(': ['src/core/lib/iomgr/error.cc'],
'grpc_error_unref(': ['src/core/lib/iomgr/error.cc'],
'grpc_os_error(': ['src/core/lib/iomgr/error.cc'],
'grpc_wsa_error(': ['src/core/lib/iomgr/error.cc'],
'grpc_log_if_error(': ['src/core/lib/iomgr/error.cc'],
'grpc_slice_malloc(': ['src/core/lib/slice/slice.cc'],
'grpc_call_cancel_internal(': ['src/core/lib/surface/call.cc'],
'grpc_closure_create(': ['src/core/lib/iomgr/closure.cc'],
'grpc_closure_init(': ['src/core/lib/iomgr/closure.cc'],
'grpc_closure_sched(': ['src/core/lib/iomgr/closure.cc'],
'grpc_closure_run(': ['src/core/lib/iomgr/closure.cc'],
'grpc_closure_list_sched(': ['src/core/lib/iomgr/closure.cc'],
}
errors = 0
num_files = 0
for root, dirs, files in os.walk('src/core'):
if root.startswith('src/core/tsi'): continue
for filename in files:
num_files += 1
path = os.path.join(root, filename)
if os.path.splitext(path)[1] != '.cc': continue
with open(path) as f:
text = f.read()
for banned, exceptions in BANNED_EXCEPT.items():
if path in exceptions: continue
if banned in text:
print('Illegal use of "%s" in %s' % (banned, path))
errors += 1
assert errors == 0
# This check comes about from this issue:
# https://github.com/grpc/grpc/issues/15381
# Basically, a change rendered this script useless and we did not realize it.
# This dumb check ensures that this type of issue doesn't occur again.
assert num_files > 300 # we definitely have more than 300 files
| apache-2.0 | 8,939,600,725,976,868,000 | 41.125 | 79 | 0.66337 | false |
plotly/python-api | packages/python/plotly/plotly/validators/funnel/_textfont.py | 2 | 1863 | import _plotly_utils.basevalidators
class TextfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="textfont", parent_name="funnel", **kwargs):
super(TextfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Textfont"),
data_docs=kwargs.pop(
"data_docs",
"""
color
colorsrc
Sets the source reference on Chart Studio Cloud
for color .
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for family .
size
sizesrc
Sets the source reference on Chart Studio Cloud
for size .
""",
),
**kwargs
)
| mit | 6,268,432,964,624,635,000 | 39.5 | 79 | 0.534085 | false |
StephenWeber/ansible | lib/ansible/modules/cloud/webfaction/webfaction_site.py | 20 | 7086 | #!/usr/bin/python
#
# Create Webfaction website using Ansible and the Webfaction API
#
# ------------------------------------------
#
# (c) Quentin Stafford-Fraser 2015
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: webfaction_site
short_description: Add or remove a website on a Webfaction host
description:
- Add or remove a website on a Webfaction host. Further documentation at http://github.com/quentinsf/ansible-webfaction.
author: Quentin Stafford-Fraser (@quentinsf)
version_added: "2.0"
notes:
- Sadly, you I(do) need to know your webfaction hostname for the C(host) parameter. But at least, unlike the API, you don't need to know the IP address - you can use a DNS name.
- If a site of the same name exists in the account but on a different host, the operation will exit.
- "You can run playbooks that use this on a local machine, or on a Webfaction host, or elsewhere, since the scripts use the remote webfaction API - the location is not important. However, running them on multiple hosts I(simultaneously) is best avoided. If you don't specify I(localhost) as your host, you may want to add C(serial: 1) to the plays."
- See `the webfaction API <http://docs.webfaction.com/xmlrpc-api/>`_ for more info.
options:
name:
description:
- The name of the website
required: true
state:
description:
- Whether the website should exist
required: false
choices: ['present', 'absent']
default: "present"
host:
description:
- The webfaction host on which the site should be created.
required: true
https:
description:
- Whether or not to use HTTPS
required: false
choices:
- true
- false
default: 'false'
site_apps:
description:
- A mapping of URLs to apps
required: false
subdomains:
description:
- A list of subdomains associated with this site.
required: false
default: null
login_name:
description:
- The webfaction account to use
required: true
login_password:
description:
- The webfaction password to use
required: true
'''
EXAMPLES = '''
- name: create website
webfaction_site:
name: testsite1
state: present
host: myhost.webfaction.com
subdomains:
- 'testsite1.my_domain.org'
site_apps:
- ['testapp1', '/']
https: no
login_name: "{{webfaction_user}}"
login_password: "{{webfaction_passwd}}"
'''
import socket
import xmlrpclib
webfaction = xmlrpclib.ServerProxy('https://api.webfaction.com/')
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(required=False, choices=['present', 'absent'], default='present'),
# You can specify an IP address or hostname.
host = dict(required=True),
https = dict(required=False, type='bool', default=False),
subdomains = dict(required=False, type='list', default=[]),
site_apps = dict(required=False, type='list', default=[]),
login_name = dict(required=True),
login_password = dict(required=True, no_log=True),
),
supports_check_mode=True
)
site_name = module.params['name']
site_state = module.params['state']
site_host = module.params['host']
site_ip = socket.gethostbyname(site_host)
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password']
)
site_list = webfaction.list_websites(session_id)
site_map = dict([(i['name'], i) for i in site_list])
existing_site = site_map.get(site_name)
result = {}
# Here's where the real stuff happens
if site_state == 'present':
# Does a site with this name already exist?
if existing_site:
# If yes, but it's on a different IP address, then fail.
# If we wanted to allow relocation, we could add a 'relocate=true' option
# which would get the existing IP address, delete the site there, and create it
# at the new address. A bit dangerous, perhaps, so for now we'll require manual
# deletion if it's on another host.
if existing_site['ip'] != site_ip:
module.fail_json(msg="Website already exists with a different IP address. Please fix by hand.")
# If it's on this host and the key parameters are the same, nothing needs to be done.
if (existing_site['https'] == module.boolean(module.params['https'])) and \
(set(existing_site['subdomains']) == set(module.params['subdomains'])) and \
(dict(existing_site['website_apps']) == dict(module.params['site_apps'])):
module.exit_json(
changed = False
)
positional_args = [
session_id, site_name, site_ip,
module.boolean(module.params['https']),
module.params['subdomains'],
]
for a in module.params['site_apps']:
positional_args.append( (a[0], a[1]) )
if not module.check_mode:
# If this isn't a dry run, create or modify the site
result.update(
webfaction.create_website(
*positional_args
) if not existing_site else webfaction.update_website (
*positional_args
)
)
elif site_state == 'absent':
# If the site's already not there, nothing changed.
if not existing_site:
module.exit_json(
changed = False,
)
if not module.check_mode:
# If this isn't a dry run, delete the site
result.update(
webfaction.delete_website(session_id, site_name, site_ip)
)
else:
module.fail_json(msg="Unknown state specified: {}".format(site_state))
module.exit_json(
changed = True,
result = result
)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 | -114,419,468,734,313,840 | 31.95814 | 353 | 0.600762 | false |
cledio66/pyglet | experimental/pinch.py | 28 | 1196 | #!/usr/bin/env python
import sys
from pyglet.gl import *
from pyglet import window, image
import shader
w = window.Window(512, 512)
kitten = image.load('../examples/programming_guide/kitten.jpg')
pinch_f = '''
uniform sampler2D tex;
uniform vec2 size;
uniform vec2 mouse;
uniform float strength;
void main() {
vec2 h = vec2(1.0/size.x, 0.0);
vec2 pos = gl_TexCoord[0].st;
vec2 v = pos - mouse;
float d = length(v);
v = normalize(v);
v = v * clamp(exp(2. * d) / strength, 0., 1.);
gl_FragColor = texture2D(tex, pos + v);
}
'''
pinch = shader.ShaderProgram()
pinch.setShader(shader.FragmentShader('pinch_f', pinch_f))
pinch.install()
pinch.uset2F('size', float(kitten.width), float(kitten.height))
@w.event
def on_mouse_motion(x, y, *args):
pinch.uset2F('mouse', float(x)/kitten.width, float(y)/kitten.height)
return True
strength = 50.
pinch.uset1F('strength', strength)
@w.event
def on_mouse_scroll(x, y, dx, dy):
global strength
strength = max(1, strength + dy)
pinch.uset1F('strength', float(strength))
return True
while not w.has_exit:
w.dispatch_events()
glClear(GL_COLOR_BUFFER_BIT)
kitten.blit(0, 0)
w.flip()
| bsd-3-clause | 760,188,271,278,958,700 | 20.357143 | 72 | 0.659699 | false |
jgsogo/neutron | webapp/server/models/faq.py | 1 | 3656 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.core.validators import ValidationError
from django.utils.encoding import python_2_unicode_compatible
from .email import Email, MANAGERS_MAILS
import logging
log = logging.getLogger(__name__)
class QuestionManager(models.Manager):
def answered(self):
return self.filter(answer__isnull=False, show=True)
def pending(self, user):
return self.filter(answer__isnull=True, user=user)
def handle_email(self):
# Re/Send email (to someone who will answer them)
for item in self.filter(answer__isnull=True):
item.notify_team()
# Notify users about new answers
for item in self.filter(answer__isnull=False, notified=False):
item.notify_user()
@python_2_unicode_compatible
class Question(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, help_text=_("User who asked the question"))
timestamp = models.DateTimeField(auto_now=True)
public = models.BooleanField(default=False, help_text=_("Whether this question will be shown to everybody or only to the who asked it."))
show = models.BooleanField(default=False, help_text=_("Whether this question will be shown in the interface."))
user_input = models.TextField()
question = models.TextField(blank=True, null=True, help_text=_("Text that show as question"))
answer = models.TextField(blank=True, null=True, help_text=_("Answer. Will send email to user"))
notified = models.BooleanField(default=False, editable=False,
help_text=_("Whether a mail has been enqueued with the response to the user."))
objects = QuestionManager()
class Meta:
verbose_name = _('Question')
verbose_name_plural = _('Questions')
def __str__(self):
return '{}...'.format(self.user_input[:100])
def save(self, notify=False, *args, **kwargs):
super(Question, self).save(*args, **kwargs)
if notify:
self.notify_team()
def clean(self):
if self.show and not (self.answer and self.question):
raise ValidationError('Cannot show an unanswered question!')
def notify_team(self):
# Re/Send email (to someone who will answer them)
obj = Email()
obj.subject = _("{user} asked a question").format(user=self.user)
obj.recipient = MANAGERS_MAILS[0] # TODO: Who to send question to?
obj.staff_recipient = Email.STAFF_RECIPIENTS.managers
obj.template = 'email/question.txt'
obj.json = {'user': str(self.user),
'timestamp': self.timestamp,
'text': self.user_input,
}
obj.save()
def notify_user(self):
if not self.answer:
log.warn("Won't notify to user a question that haven't already been answered")
else:
obj = Email()
obj.subject = _("Your question has been answered!")
obj.recipient = self.user.email
obj.staff_recipient = Email.STAFF_RECIPIENTS.managers
obj.template = 'email/answer.txt'
obj.json = {'question_pk': self.pk,
'user': str(self.user),
'timestamp': self.timestamp,
'question': self.user_input,
'answer': self.answer,
}
obj.save()
self.notified = True
self.save()
| gpl-2.0 | 1,941,827,565,070,498,600 | 35.929293 | 141 | 0.6157 | false |
djgagne/hagelslag-unidata | hagelslag/data/SSEFModelGrid.py | 1 | 1829 | from ModelGrid import ModelGrid
from glob import glob
import pandas as pd
import numpy as np
import os
class SSEFModelGrid(ModelGrid):
"""
"""
def __init__(self, member, run_date, variable, start_date, end_date, path, single_step=False):
self.path = path
self.member = member
forecast_hours = np.arange((start_date - run_date).total_seconds() / 3600,
(end_date - run_date).total_seconds() / 3600 + 1)
if single_step:
full_path = self.path + "/".join([member, run_date.strftime("%Y%m%d"), "0000Z", "data2d"]) + "/"
else:
full_path = self.path + "/".join([member, run_date.strftime("%Y%m%d")]) + "/"
potential_filenames = []
if single_step:
for hour in forecast_hours:
potential_filenames.append("{0}ar{1}00.net{2}{3:06d}".format(full_path,
run_date.strftime("%Y%m%d"),
variable.ljust(6,"_"),
int(hour) * 3600))
else:
potential_filenames.append("{0}ssef_{1}_{2}_{3}.nc".format(full_path,
self.member,
run_date.strftime("%Y%m%d"),
variable))
filenames = []
for filename in potential_filenames:
if os.access(filename, os.R_OK):
filenames.append(filename)
super(SSEFModelGrid, self).__init__(filenames, run_date, start_date, end_date, variable)
return
| mit | 7,229,422,591,940,580,000 | 48.432432 | 108 | 0.436851 | false |
dnxbjyj/python-basic | gui/wxpython/wxPython-demo-4.0.1/demo/agw/PyCollapsiblePane.py | 1 | 9514 | #!/usr/bin/env python
import wx
import wx.lib.buttons as buttons
import os
import sys
try:
dirName = os.path.dirname(os.path.abspath(__file__))
except:
dirName = os.path.dirname(os.path.abspath(sys.argv[0]))
sys.path.append(os.path.split(dirName)[0])
try:
from agw import pycollapsiblepane as PCP
except ImportError: # if it's not there locally, try the wxPython lib.
import wx.lib.agw.pycollapsiblepane as PCP
import images
btnlbl1 = "call Expand(True)"
btnlbl2 = "call Expand(False)"
choices = ["wx.Button",
"GenButton",
"GenBitmapButton",
"GenBitmapTextButton",
"ThemedGenButton",
"ThemedGenBitmapTextButton"]
gtkChoices = ["3, 6",
"4, 8",
"5, 10"]
styles = ["CP_NO_TLW_RESIZE",
"CP_LINE_ABOVE",
"CP_USE_STATICBOX",
"CP_GTK_EXPANDER"]
class PyCollapsiblePaneDemo(wx.Panel):
def __init__(self, parent, log):
wx.Panel.__init__(self, parent)
self.log = log
self.label1 = "Click here to show pane"
self.label2 = "Click here to hide pane"
title = wx.StaticText(self, label="PyCollapsiblePane")
title.SetFont(wx.Font(18, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD))
title.SetForegroundColour("blue")
self.cpStyle = wx.CP_NO_TLW_RESIZE
self.cp = cp = PCP.PyCollapsiblePane(self, label=self.label1,
agwStyle=self.cpStyle)
self.Bind(wx.EVT_COLLAPSIBLEPANE_CHANGED, self.OnPaneChanged, cp)
self.MakePaneContent(cp.GetPane())
self.btnRB = radioBox = wx.RadioBox(self, -1, "Button Types",
choices=choices, style=wx.RA_SPECIFY_ROWS)
self.static1 = wx.StaticText(self, -1, "Collapsed Button Text:")
self.static2 = wx.StaticText(self, -1, "Expanded Button Text:")
self.buttonText1 = wx.TextCtrl(self, -1, self.label1)
self.buttonText2 = wx.TextCtrl(self, -1, self.label2)
self.updateButton = wx.Button(self, -1, "Update!")
sbox = wx.StaticBox(self, -1, 'Styles')
sboxsizer = wx.StaticBoxSizer(sbox, wx.VERTICAL)
self.styleCBs = list()
for styleName in styles:
cb = wx.CheckBox(self, -1, styleName)
if styleName == "CP_NO_TLW_RESIZE":
cb.SetValue(True)
cb.Disable()
cb.Bind(wx.EVT_CHECKBOX, self.OnStyleChoice)
self.styleCBs.append(cb)
sboxsizer.Add(cb, 0, wx.ALL, 4)
self.gtkText = wx.StaticText(self, -1, "Expander Size")
self.gtkChoice = wx.ComboBox(self, -1, choices=gtkChoices)
self.gtkChoice.SetSelection(0)
self.gtkText.Enable(False)
self.gtkChoice.Enable(False)
sizer = wx.BoxSizer(wx.VERTICAL)
radioSizer = wx.BoxSizer(wx.HORIZONTAL)
dummySizer = wx.BoxSizer(wx.VERTICAL)
dummySizer.Add(self.gtkText, 0, wx.EXPAND|wx.BOTTOM, 2)
dummySizer.Add(self.gtkChoice, 0, wx.EXPAND)
radioSizer.Add(radioBox, 0, wx.EXPAND)
radioSizer.Add(sboxsizer, 0, wx.EXPAND|wx.LEFT, 10)
radioSizer.Add(dummySizer, 0, wx.ALIGN_BOTTOM|wx.LEFT, 10)
self.SetSizer(sizer)
sizer.Add((0, 10))
sizer.Add(title, 0, wx.LEFT|wx.RIGHT, 25)
sizer.Add((0, 10))
sizer.Add(radioSizer, 0, wx.LEFT, 25)
sizer.Add((0, 10))
subSizer = wx.FlexGridSizer(2, 3, 5, 5)
subSizer.Add(self.static1, 0, wx.LEFT|wx.ALIGN_CENTER_VERTICAL, 5)
subSizer.Add(self.buttonText1, 0, wx.EXPAND)
subSizer.Add((0, 0))
subSizer.Add(self.static2, 0, wx.LEFT|wx.ALIGN_CENTER_VERTICAL, 5)
subSizer.Add(self.buttonText2, 0, wx.EXPAND)
subSizer.Add(self.updateButton, 0, wx.LEFT|wx.RIGHT, 10)
subSizer.AddGrowableCol(1)
sizer.Add(subSizer, 0, wx.EXPAND|wx.LEFT, 20)
sizer.Add((0, 15))
sizer.Add(cp, 0, wx.RIGHT|wx.LEFT|wx.EXPAND, 20)
self.btn = wx.Button(self, label=btnlbl1)
sizer.Add(self.btn, 0, wx.ALL, 25)
self.Bind(wx.EVT_BUTTON, self.OnToggle, self.btn)
self.Bind(wx.EVT_BUTTON, self.OnUpdate, self.updateButton)
self.Bind(wx.EVT_RADIOBOX, self.OnButtonChoice)
self.Bind(wx.EVT_COMBOBOX, self.OnUserChoice, self.gtkChoice)
def OnToggle(self, event):
self.cp.Collapse(self.cp.IsExpanded())
self.OnPaneChanged()
def OnUpdate(self, event):
self.label1 = self.buttonText1.GetValue()
self.label2 = self.buttonText2.GetValue()
self.OnPaneChanged(None)
def OnStyleChoice(self, evt):
style = 0
for cb in self.styleCBs:
if cb.IsChecked():
style |= getattr(wx, cb.GetLabel(), 0)
self.cpStyle = style
self.Rebuild()
def OnButtonChoice(self, event):
#self.gtkText.Enable(selection == 4)
#self.gtkChoice.Enable(selection == 4)
self.Rebuild()
def MakeButton(self):
if self.cpStyle & wx.CP_GTK_EXPANDER:
return None
selection = self.btnRB.GetSelection()
if selection == 0: # standard wx.Button
btn = wx.Button(self.cp, -1, self.label1)
elif selection == 1: # buttons.GenButton
btn = buttons.GenButton(self.cp, -1, self.label1)
elif selection == 2: # buttons.GenBitmapButton
bmp = images.Smiles.GetBitmap()
btn = buttons.GenBitmapButton(self.cp, -1, bmp)
elif selection == 3: # buttons.GenBitmapTextButton
bmp = images.Mondrian.GetBitmap()
btn = buttons.GenBitmapTextButton(self.cp, -1, bmp, self.label1)
elif selection == 4: # buttons.ThemedGenButton
btn = buttons.ThemedGenButton(self.cp, -1, self.label1)
elif selection == 5: # buttons.ThemedGenBitmapTextButton
bmp = images.Mondrian.GetBitmap()
btn = buttons.ThemedGenBitmapTextButton(self.cp, -1, bmp, self.label1)
return btn
def Rebuild(self):
isExpanded = self.cp.IsExpanded()
self.Freeze()
cp = PCP.PyCollapsiblePane(self, label=self.label1, agwStyle=self.cpStyle)
cp.Bind(wx.EVT_COLLAPSIBLEPANE_CHANGED, self.OnPaneChanged)
self.MakePaneContent(cp.GetPane())
cp.SetExpanderDimensions(*self.GetUserSize())
self.GetSizer().Replace(self.cp, cp)
self.cp.Destroy()
self.cp = cp
btn = self.MakeButton()
if btn:
self.cp.SetButton(btn)
self.gtkText.Enable(btn is None)
self.gtkChoice.Enable(btn is None)
self.btnRB.Enable(btn is not None)
if isExpanded:
self.cp.Expand()
self.Thaw()
self.OnPaneChanged(None)
self.Layout()
def OnPaneChanged(self, event=None):
if event:
self.log.write('wx.EVT_COLLAPSIBLEPANE_CHANGED: %s\n' % event.Collapsed)
# redo the layout
self.Layout()
# and also change the labels
if self.cp.IsExpanded():
self.cp.SetLabel(self.label2)
self.btn.SetLabel(btnlbl2)
else:
self.cp.SetLabel(self.label1)
self.btn.SetLabel(btnlbl1)
self.btn.SetInitialSize()
def OnUserChoice(self, event):
self.cp.SetExpanderDimensions(*self.GetUserSize(event.GetSelection()))
def GetUserSize(self, selection=None):
if selection is None:
selection = self.gtkChoice.GetSelection()
choice = gtkChoices[selection]
width, height = choice.split(",")
return int(width), int(height)
def MakePaneContent(self, pane):
'''Just make a few controls to put on the collapsible pane'''
nameLbl = wx.StaticText(pane, -1, "Name:")
name = wx.TextCtrl(pane, -1, "");
addrLbl = wx.StaticText(pane, -1, "Address:")
addr1 = wx.TextCtrl(pane, -1, "");
addr2 = wx.TextCtrl(pane, -1, "");
cstLbl = wx.StaticText(pane, -1, "City, State, Zip:")
city = wx.TextCtrl(pane, -1, "", size=(150,-1));
state = wx.TextCtrl(pane, -1, "", size=(50,-1));
zip = wx.TextCtrl(pane, -1, "", size=(70,-1));
addrSizer = wx.FlexGridSizer(cols=2, hgap=5, vgap=5)
addrSizer.AddGrowableCol(1)
addrSizer.Add(nameLbl, 0,
wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL)
addrSizer.Add(name, 0, wx.EXPAND)
addrSizer.Add(addrLbl, 0,
wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL)
addrSizer.Add(addr1, 0, wx.EXPAND)
addrSizer.Add((5,5))
addrSizer.Add(addr2, 0, wx.EXPAND)
addrSizer.Add(cstLbl, 0,
wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL)
cstSizer = wx.BoxSizer(wx.HORIZONTAL)
cstSizer.Add(city, 1)
cstSizer.Add(state, 0, wx.LEFT|wx.RIGHT, 5)
cstSizer.Add(zip)
addrSizer.Add(cstSizer, 0, wx.EXPAND)
border = wx.BoxSizer()
border.Add(addrSizer, 1, wx.EXPAND|wx.ALL, 5)
pane.SetSizer(border)
#----------------------------------------------------------------------
def runTest(frame, nb, log):
win = PyCollapsiblePaneDemo(nb, log)
return win
#----------------------------------------------------------------------
overview = PCP.__doc__
if __name__ == '__main__':
import sys,os
import run
run.main(['', os.path.basename(sys.argv[0])] + sys.argv[1:])
| mit | -2,905,981,874,011,165,700 | 29.88961 | 96 | 0.585979 | false |
jensreeder/scikit-bio | skbio/sequence/_dna.py | 1 | 3982 | # ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
from skbio.util import classproperty, overrides
from ._nucleotide_sequence import NucleotideSequence
from ._iupac_sequence import IUPACSequence
class DNA(NucleotideSequence):
"""Store DNA sequence data and optional associated metadata.
Only characters in the IUPAC DNA character set [1]_ are supported.
Parameters
----------
sequence : str, Sequence, or 1D np.ndarray (np.uint8 or '\|S1')
Characters representing the DNA sequence itself.
id : str, optional
Sequence identifier (e.g., an accession number).
description : str, optional
Description or comment about the sequence (e.g., "green fluorescent
protein").
quality : 1D array_like (int), optional
Phred quality scores stored as nonnegative integers, one per sequence
character. If provided, must be the same length as the DNA sequence.
Can be a 1D ``np.ndarray`` of integers or a structure that can be
converted into this representation using ``np.asarray``. A copy will
*not* be made if `quality` is already a 1D ``np.ndarray`` with an
``int`` ``dtype``. The array will be made read-only (i.e., its
``WRITEABLE`` flag will be set to ``False``).
validate : bool, optional
If ``True``, validation will be performed to ensure that all sequence
characters are in the IUPAC DNA character set. If ``False``, validation
will not be performed. Turning off validation will improve runtime
performance. If invalid characters are present, however, there is
**no guarantee that operations performed on the resulting object will
work or behave as expected.** Only turn off validation if you are
certain that the sequence characters are valid. To store sequence data
that is not IUPAC-compliant, use ``Sequence``.
case_insenstive : bool, optional
If ``True``, lowercase sequence characters will be converted to
uppercase characters in order to be valid IUPAC DNA characters.
Attributes
----------
id
description
values
quality
alphabet
gap_chars
nondegenerate_chars
degenerate_chars
degenerate_map
complement_map
See Also
--------
RNA
References
----------
.. [1] Nomenclature for incompletely specified bases in nucleic acid
sequences: recommendations 1984.
Nucleic Acids Res. May 10, 1985; 13(9): 3021-3030.
A Cornish-Bowden
Examples
--------
>>> from skbio import DNA
>>> s = DNA('ACCGAAT')
>>> s
DNA('ACCGAAT', length=7)
Convert lowercase characters to uppercase:
>>> s = DNA('AcCGaaT', case_insensitive=True)
>>> s
DNA('ACCGAAT', length=7)
"""
@classproperty
@overrides(NucleotideSequence)
def complement_map(cls):
comp_map = {
'A': 'T', 'T': 'A', 'G': 'C', 'C': 'G', 'Y': 'R', 'R': 'Y',
'S': 'S', 'W': 'W', 'K': 'M', 'M': 'K', 'B': 'V', 'D': 'H',
'H': 'D', 'V': 'B', 'N': 'N'
}
comp_map.update({c: c for c in cls.gap_chars})
return comp_map
@classproperty
@overrides(IUPACSequence)
def nondegenerate_chars(cls):
return set("ACGT")
@classproperty
@overrides(IUPACSequence)
def degenerate_map(cls):
return {
"R": set("AG"), "Y": set("CT"), "M": set("AC"), "K": set("TG"),
"W": set("AT"), "S": set("GC"), "B": set("CGT"), "D": set("AGT"),
"H": set("ACT"), "V": set("ACG"), "N": set("ACGT")
}
| bsd-3-clause | -4,151,907,586,631,150,000 | 33.929825 | 79 | 0.590407 | false |
leppa/home-assistant | homeassistant/components/nmbs/sensor.py | 1 | 8775 | """Get ride details and liveboard details for NMBS (Belgian railway)."""
import logging
from pyrail import iRail
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round((delta.total_seconds() / 60))
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round((int(delay) / 60))
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round((duration.total_seconds() / 60)))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
sensors.append(NMBSLiveBoard(api_client, station_live))
add_entities(sensors, True)
class NMBSLiveBoard(Entity):
"""Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._unique_id = f"nmbs_live_{self._station}"
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return "NMBS Live"
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = "Track {} - {}".format(
next_departure["platform"], next_departure["station"]
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "min"
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"])
attrs = {
"departure": f"In {departure} minutes",
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"])
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
)
return
duration = get_ride_duration(
next_connection["departure"]["time"],
next_connection["arrival"]["time"],
next_connection["departure"]["delay"],
)
self._state = duration
| apache-2.0 | 6,632,218,503,248,311,000 | 30.451613 | 86 | 0.602279 | false |
MainScientist/raspberry-bluetooth | client/main.py | 1 | 1782 | from bluetooth import *
import sys
import json
if sys.version < '3':
input = raw_input
# search for the SampleServer service
uuid = "94f39d29-7d6d-437d-973b-fba39e49d4ee"
update = True
while update:
update = False
attempts = 0
while attempts < 4:
service_matches = find_service(uuid=uuid, address=None)
if len(service_matches) > 0:
break
attempts += 1
print("Could not find server. Trying again... ({})".format(attempts))
first_match = service_matches[0]
port = first_match["port"]
name = first_match["name"]
host = first_match["host"]
print("connecting to \"%s\" on %s" % (name, host))
# Create the client socket
sock=BluetoothSocket(RFCOMM)
sock.connect((host, port))
print("connected.")
while True:
action, *args = input("> ").split(" ")
if len(action) == 0 or action == "exit": break
sock.send(json.dumps({"action": action, "args": args}).encode("utf-8"))
if action == "shut_down": break
msg = sock.recv(1024*10).decode("utf-8")
print(msg)
response = json.loads(msg)
if "error" in response:
print(response["error"])
else:
if action == "list":
for e in response["value"]:
print(e)
elif action == "test":
print(response["value"])
elif action == "connect":
print(response["value"])
elif action == "ifconfig":
print(response["value"])
elif action == "exec":
print(response["value"])
elif action == "update":
print(response["value"])
update = True
break
sock.close()
| mit | -7,932,918,487,926,437,000 | 28.213115 | 79 | 0.532548 | false |
apache/incubator-airflow | airflow/contrib/hooks/azure_container_instance_hook.py | 7 | 1252 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated.
Please use `airflow.providers.microsoft.azure.hooks.azure_container_instance`.
"""
import warnings
# pylint: disable=unused-import
from airflow.providers.microsoft.azure.hooks.azure_container_instance import ( # noqa
AzureContainerInstanceHook,
)
warnings.warn(
"This module is deprecated. "
"Please use `airflow.providers.microsoft.azure.hooks.azure_container_instance`.",
DeprecationWarning,
stacklevel=2,
)
| apache-2.0 | -6,046,939,303,062,299,000 | 34.771429 | 86 | 0.766773 | false |
anandbhoraskar/Diamond | src/collectors/nagios/test/testnagios.py | 31 | 2540 | #!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from nagios import NagiosStatsCollector
##########################################################################
class TestNagiosStatsCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NagiosStatsCollector', {
'interval': 10,
'bin': 'true',
'use_sudo': False
})
self.collector = NagiosStatsCollector(config, None)
def test_import(self):
self.assertTrue(NagiosStatsCollector)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('nagiostat').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
metrics = {
'AVGACTHSTLAT': 196,
'AVGACTSVCLAT': 242,
'AVGACTHSTEXT': 4037,
'AVGACTSVCEXT': 340,
'NUMHSTUP': 63,
'NUMHSTDOWN': 0,
'NUMHSTUNR': 0,
'NUMSVCOK': 1409,
'NUMSVCWARN': 3,
'NUMSVCUNKN': 0,
'NUMSVCCRIT': 7,
'NUMHSTACTCHK5M': 56,
'NUMHSTPSVCHK5M': 0,
'NUMSVCACTCHK5M': 541,
'NUMSVCPSVCHK5M': 0,
'NUMACTHSTCHECKS5M': 56,
'NUMOACTHSTCHECKS5M': 1,
'NUMCACHEDHSTCHECKS5M': 1,
'NUMSACTHSTCHECKS5M': 55,
'NUMPARHSTCHECKS5M': 55,
'NUMSERHSTCHECKS5M': 0,
'NUMPSVHSTCHECKS5M': 0,
'NUMACTSVCCHECKS5M': 1101,
'NUMOACTSVCCHECKS5M': 0,
'NUMCACHEDSVCCHECKS5M': 0,
'NUMSACTSVCCHECKS5M': 1101,
'NUMPSVSVCCHECKS5M': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
| mit | -428,946,765,868,991,300 | 30.358025 | 74 | 0.511024 | false |
gcrahay/fir_irma_plugin | fir_irma/decorators.py | 1 | 4916 | from functools import wraps
from uuid import UUID
from ipware.ip import get_ip
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
from django.utils.six.moves.urllib.parse import urlparse
from django.shortcuts import resolve_url, redirect
from fir_irma.models import IrmaScan
from fir_irma.utils import process_error, ERROR_NOT_FOUND, ERROR_UNAUTHORIZED
def user_is_owner_or_privileged(login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user is the owner of the scan or privileged,,
redirecting to the log-in page if necessary. The request must have a scan_id parameter.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated():
if 'scan_id' in kwargs:
scan_id = UUID(kwargs.get('scan_id'))
try:
scan = IrmaScan.objects.get(irma_scan=scan_id)
except IrmaScan.DoesNotExist:
return process_error(request, error=ERROR_NOT_FOUND)
if (request.user == scan.user and request.user.has_perm('fir_irma.scan_files')) or \
request.user.has_perm('fir_irma.read_all_results'):
kwargs['scan'] = scan
return view_func(request, *args, **kwargs)
elif settings.IRMA_ANONYMOUS_SCAN and settings.IRMA_IS_STANDALONE:
if 'scan_id' in kwargs:
scan_id = UUID(kwargs.get('scan_id'))
client_ip = get_ip(request)
try:
scan = IrmaScan.objects.get(irma_scan=scan_id, client_ip=client_ip)
kwargs['scan'] = scan
return view_func(request, *args, **kwargs)
except IrmaScan.DoesNotExist:
return process_error(request, error=ERROR_NOT_FOUND)
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
_wrapped_view.csrf_exempt = True
return _wrapped_view
return decorator
def login_and_perm_required(perm, login_url=None, unprivileged_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user is authenticated and has permission,
redirecting to the log-in page if necessary.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated():
if not isinstance(perm, (list, tuple)):
perms = (perm, )
else:
perms = perm
if request.user.has_perms(perms):
return view_func(request, *args, **kwargs)
if unprivileged_url is not None:
return redirect(unprivileged_url)
return process_error(request, error=ERROR_UNAUTHORIZED)
elif settings.IRMA_ANONYMOUS_SCAN and settings.IRMA_IS_STANDALONE:
return view_func(request, *args, **kwargs)
else:
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
_wrapped_view.csrf_exempt = True
return _wrapped_view
return decorator
| apache-2.0 | 1,218,710,751,414,903,000 | 49.163265 | 114 | 0.592148 | false |
KalVentures/AlertNotifications | setup.py | 1 | 1169 | #!/usr/bin/env python
import ConfigParser
import os
path = os.path.dirname(os.path.abspath(__file__))
Config = ConfigParser.ConfigParser()
cfgfile = open(path+"/config.ini",'w')
serverName = raw_input( 'Enter server name : ')
serverEmail = raw_input( 'Server Email : ')
serverPassword = raw_input('Server Email Password : ')
alertEmail = raw_input( 'Email to send Alerts to: ')
Config.add_section('ServerEmail')
Config.add_section('AlertEmail')
Config.add_section('AlertInformation')
Config.set('ServerEmail','email',serverEmail)
Config.set('ServerEmail','password',serverPassword)
Config.set('ServerEmail','name',serverName)
Config.set('AlertEmail','email',alertEmail)
Config.set('AlertInformation','ip',"0.0.0.0")
Config.set('AlertInformation','lastReboot', "")
#add any information you want sent along with IP:
#Config.set('AlertInformation','App_port',"22")
Config.write(cfgfile)
cfgfile.close()
path = os.path.dirname(os.path.abspath(__file__))
print("Set up crontab with the following path:")
print(path+"/alertNotifications.py\n")
print("Example for 5 interval:")
print("*/5 * * * * python "+path+"/alertNotifications.py\n")
| mit | 6,012,178,676,631,908,000 | 27.512195 | 60 | 0.712575 | false |
RequireSun/spider_888 | main.py | 1 | 1687 | import unittest
import os
import codecs
from functools import reduce
from selenium import webdriver
from bs4 import BeautifulSoup
import lxml
class SeleniumTest(unittest.TestCase):
def setUp(self):
self.driver = webdriver.PhantomJS("D:\Program_Coding\phantomjs\\bin\phantomjs")
def testEle(self):
if not os.path.exists('./output'):
os.makedirs('./output')
self.rooms = codecs.open("./output/douyu.txt", "w", "utf-8")
page = 0
driver = self.driver
driver.get('http://www.douyu.com/directory/all')
soup = BeautifulSoup(driver.page_source, 'xml')
while True:
page += 1
print('parsing page:', page)
imgs = soup.find_all('img')
# nums = soup.find_all('span', {'class': 'dy-num fr'})
rooms = []
for img in imgs:
rooms.append(img.get("src").strip())
# for title, num in zip(titles, nums):
# rooms.append(title.get_text().strip() + "\t" + num.get_text().strip())
rooms = reduce(lambda arr, item: arr if item in arr else arr + [item], [[], ] + rooms)
self.rooms.writelines([line + "\r\n" for line in rooms])
if driver.page_source.find('shark-pager-disable-next') != -1:
break
elem = driver.find_element_by_class_name('shark-pager-next')
elem.click()
soup = BeautifulSoup(driver.page_source, 'xml')
def tearDown(self):
# for item in self.rooms:
# print(item['title'], item['text'])
self.rooms.close()
print('down')
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | 6,909,504,654,843,624,000 | 33.428571 | 98 | 0.558388 | false |
Hitachi-Data-Systems/org-chart-builder | pptx/shapes/table.py | 1 | 11385 | # encoding: utf-8
"""
Table-related objects such as Table and Cell.
"""
from . import Subshape
from ..dml.fill import FillFormat
from ..text import TextFrame
from ..util import lazyproperty, to_unicode
class Table(object):
"""
A table shape. Not intended to be constructed directly, use
:meth:`.Slide.shapes.add_table` to add a table to a slide.
"""
def __init__(self, tbl, graphic_frame):
super(Table, self).__init__()
self._tbl = tbl
self._graphic_frame = graphic_frame
def cell(self, row_idx, col_idx):
"""
Return table cell at *row_idx*, *col_idx* location. Indexes are
zero-based, e.g. cell(0, 0) is the top, left cell.
"""
row = self.rows[row_idx]
return row.cells[col_idx]
@lazyproperty
def columns(self):
"""
Read-only reference to collection of |_Column| objects representing
the table's columns. |_Column| objects are accessed using list
notation, e.g. ``col = tbl.columns[0]``.
"""
return _ColumnCollection(self._tbl, self)
@property
def first_col(self):
"""
Read/write boolean property which, when true, indicates the first
column should be formatted differently, as for a side-heading column
at the far left of the table.
"""
return self._tbl.firstCol
@first_col.setter
def first_col(self, value):
self._tbl.firstCol = value
@property
def first_row(self):
"""
Read/write boolean property which, when true, indicates the first
row should be formatted differently, e.g. for column headings.
"""
return self._tbl.firstRow
@first_row.setter
def first_row(self, value):
self._tbl.firstRow = value
@property
def horz_banding(self):
"""
Read/write boolean property which, when true, indicates the rows of
the table should appear with alternating shading.
"""
return self._tbl.bandRow
@horz_banding.setter
def horz_banding(self, value):
self._tbl.bandRow = value
@property
def last_col(self):
"""
Read/write boolean property which, when true, indicates the last
column should be formatted differently, as for a row totals column at
the far right of the table.
"""
return self._tbl.lastCol
@last_col.setter
def last_col(self, value):
self._tbl.lastCol = value
@property
def last_row(self):
"""
Read/write boolean property which, when true, indicates the last
row should be formatted differently, as for a totals row at the
bottom of the table.
"""
return self._tbl.lastRow
@last_row.setter
def last_row(self, value):
self._tbl.lastRow = value
def notify_height_changed(self):
"""
Called by a row when its height changes, triggering the graphic frame
to recalculate its total height (as the sum of the row heights).
"""
new_table_height = sum([row.height for row in self.rows])
self._graphic_frame.height = new_table_height
def notify_width_changed(self):
"""
Called by a column when its width changes, triggering the graphic
frame to recalculate its total width (as the sum of the column
widths).
"""
new_table_width = sum([col.width for col in self.columns])
self._graphic_frame.width = new_table_width
@property
def part(self):
"""
The package part containing this table.
"""
return self._graphic_frame.part
@lazyproperty
def rows(self):
"""
Read-only reference to collection of |_Row| objects representing the
table's rows. |_Row| objects are accessed using list notation, e.g.
``col = tbl.rows[0]``.
"""
return _RowCollection(self._tbl, self)
@property
def vert_banding(self):
"""
Read/write boolean property which, when true, indicates the columns
of the table should appear with alternating shading.
"""
return self._tbl.bandCol
@vert_banding.setter
def vert_banding(self, value):
self._tbl.bandCol = value
class _Cell(Subshape):
"""
Table cell
"""
def __init__(self, tc, parent):
super(_Cell, self).__init__(parent)
self._tc = tc
@lazyproperty
def fill(self):
"""
|FillFormat| instance for this cell, providing access to fill
properties such as foreground color.
"""
tcPr = self._tc.get_or_add_tcPr()
return FillFormat.from_fill_parent(tcPr)
@property
def margin_left(self):
"""
Read/write integer value of left margin of cell as a |BaseLength|
value object. If assigned |None|, the default value is used, 0.1
inches for left and right margins and 0.05 inches for top and bottom.
"""
return self._tc.marL
@margin_left.setter
def margin_left(self, margin_left):
self._validate_margin_value(margin_left)
self._tc.marL = margin_left
@property
def margin_right(self):
"""
Right margin of cell.
"""
return self._tc.marR
@margin_right.setter
def margin_right(self, margin_right):
self._validate_margin_value(margin_right)
self._tc.marR = margin_right
@property
def margin_top(self):
"""
Top margin of cell.
"""
return self._tc.marT
@margin_top.setter
def margin_top(self, margin_top):
self._validate_margin_value(margin_top)
self._tc.marT = margin_top
@property
def margin_bottom(self):
"""
Bottom margin of cell.
"""
return self._tc.marB
@margin_bottom.setter
def margin_bottom(self, margin_bottom):
self._validate_margin_value(margin_bottom)
self._tc.marB = margin_bottom
def text(self, text):
"""
Replace all text in cell with single run containing *text*
"""
self.textframe.text = to_unicode(text)
#: Write-only. Assignment to *text* replaces all text currently contained
#: in the cell, resulting in a text frame containing exactly one
#: paragraph, itself containing a single run. The assigned value can be a
#: 7-bit ASCII string, a UTF-8 encoded 8-bit string, or unicode. String
#: values are converted to unicode assuming UTF-8 encoding.
text = property(None, text)
@property
def textframe(self):
"""
|TextFrame| instance containing the text that appears in the cell.
"""
txBody = self._tc.get_or_add_txBody()
return TextFrame(txBody, self)
@property
def vertical_anchor(self):
"""
Vertical anchor of this table cell, determines the vertical alignment
of text in the cell. Value is like ``MSO_ANCHOR.MIDDLE``. Can be
|None|, meaning the cell has no vertical anchor setting and its
effective value is inherited from a higher-level object.
"""
return self._tc.anchor
@vertical_anchor.setter
def vertical_anchor(self, mso_anchor_idx):
"""
Set vertical_anchor of this cell to *vertical_anchor*, a constant
value like ``MSO_ANCHOR.MIDDLE``. If *vertical_anchor* is |None|, any
vertical anchor setting is cleared and its effective value is
inherited.
"""
self._tc.anchor = mso_anchor_idx
@staticmethod
def _validate_margin_value(margin_value):
"""
Raise ValueError if *margin_value* is not a positive integer value or
|None|.
"""
if (not isinstance(margin_value, (int, long))
and margin_value is not None):
tmpl = "margin value must be integer or None, got '%s'"
raise TypeError(tmpl % margin_value)
class _Column(Subshape):
"""
Table column
"""
def __init__(self, gridCol, parent):
super(_Column, self).__init__(parent)
self._gridCol = gridCol
@property
def width(self):
"""
Width of column in EMU.
"""
return self._gridCol.w
@width.setter
def width(self, width):
self._gridCol.w = width
self._parent.notify_width_changed()
class _Row(Subshape):
"""
Table row
"""
def __init__(self, tr, parent):
super(_Row, self).__init__(parent)
self._tr = tr
@property
def cells(self):
"""
Read-only reference to collection of cells in row. An individual cell
is referenced using list notation, e.g. ``cell = row.cells[0]``.
"""
return _CellCollection(self._tr, self)
@property
def height(self):
"""
Height of row in EMU.
"""
return self._tr.h
@height.setter
def height(self, height):
self._tr.h = height
self._parent.notify_height_changed()
class _CellCollection(Subshape):
"""
"Horizontal" sequence of row cells
"""
def __init__(self, tr, parent):
super(_CellCollection, self).__init__(parent)
self._tr = tr
def __getitem__(self, idx):
"""
Provides indexed access, (e.g. 'cells[0]').
"""
if idx < 0 or idx >= len(self._tr.tc_lst):
msg = "cell index [%d] out of range" % idx
raise IndexError(msg)
return _Cell(self._tr.tc_lst[idx], self)
def __len__(self):
"""
Supports len() function (e.g. 'len(cells) == 1').
"""
return len(self._tr.tc_lst)
class _ColumnCollection(Subshape):
"""
Sequence of table columns.
"""
def __init__(self, tbl, parent):
super(_ColumnCollection, self).__init__(parent)
self._tbl = tbl
def __getitem__(self, idx):
"""
Provides indexed access, (e.g. 'columns[0]').
"""
if idx < 0 or idx >= len(self._tbl.tblGrid.gridCol_lst):
msg = "column index [%d] out of range" % idx
raise IndexError(msg)
return _Column(self._tbl.tblGrid.gridCol_lst[idx], self)
def __len__(self):
"""
Supports len() function (e.g. 'len(columns) == 1').
"""
return len(self._tbl.tblGrid.gridCol_lst)
def notify_width_changed(self):
"""
Called by a column when its width changes. Pass along to parent.
"""
self._parent.notify_width_changed()
class _RowCollection(Subshape):
"""
Sequence of table rows.
"""
def __init__(self, tbl, parent):
super(_RowCollection, self).__init__(parent)
self._tbl = tbl
def __getitem__(self, idx):
"""
Provides indexed access, (e.g. 'rows[0]').
"""
if idx < 0 or idx >= len(self):
msg = "row index [%d] out of range" % idx
raise IndexError(msg)
return _Row(self._tbl.tr_lst[idx], self)
def __len__(self):
"""
Supports len() function (e.g. 'len(rows) == 1').
"""
return len(self._tbl.tr_lst)
def notify_height_changed(self):
"""
Called by a row when its height changes. Pass along to parent.
"""
self._parent.notify_height_changed()
| apache-2.0 | -2,436,521,445,867,053,000 | 27.4625 | 77 | 0.580061 | false |
tecan/xchat-rt | plugins/scripts/Supybot-0.83.4.1-bitcoinotc-bot/build/lib/supybot/plugins/Scheduler/config.py | 15 | 2370 | ###
# Copyright (c) 2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('Scheduler', True)
Scheduler = conf.registerPlugin('Scheduler')
# This is where your configuration variables (if any) should go. For example:
# conf.registerGlobalValue(Scheduler, 'someConfigVariableName',
# registry.Boolean(False, """Help for someConfigVariableName."""))
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| gpl-2.0 | 2,024,887,460,731,238,000 | 48.375 | 79 | 0.76962 | false |
susam/taskplot | taskplot/test/__init__.py | 1 | 1553 | # Copyright (c) 2014 Susam Pal
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for taskplot package
Modules:
taskplot.test.test_taskplot -- Tests for taskplot package
"""
__version__ = '0.1.2'
__date__ = '5 March 2014'
__author__ = 'Susam Pal <[email protected]>'
| bsd-2-clause | 6,228,962,082,598,472,000 | 40.972973 | 72 | 0.758532 | false |
Lilykos/invenio | invenio/modules/communities/bundles.py | 4 | 1135 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Communities bundles."""
from __future__ import unicode_literals
from invenio.ext.assets import Bundle
js = Bundle(
"js/communities/custom.js",
filters="uglifyjs",
output="communities.js",
weight=91
)
styles = Bundle(
"css/communities/communities.less",
filters="less,cleancss",
output="communities.css",
weight=91
)
| gpl-2.0 | -6,493,565,547,725,836,000 | 28.102564 | 74 | 0.723348 | false |
damonkelley/django-name | tests/test_feeds.py | 1 | 2433 | import pytest
from django.core.urlresolvers import reverse
from name.feeds import NameAtomFeed
from name.models import Name
# Give all tests access to the database.
pytestmark = pytest.mark.django_db
def test_feed_has_georss_namespace(rf):
"""Check that the georss namespace is present in the response
content.
"""
request = rf.get(reverse('name:feed'))
feed = NameAtomFeed()
response = feed(request)
assert 'xmlns:georss' in response.content
def test_feed_response_is_application_xml(rf):
"""Verify the Content-Type header is set to `application/xml`."""
request = rf.get(reverse('name:feed'))
feed = NameAtomFeed()
response = feed(request)
assert response['Content-Type'] == 'application/xml'
def test_feed_item_with_location(rf):
"""Verify that the response returns ok when objects with locations
are present in the feed.
"""
name = Name.objects.create(name="Test", name_type=Name.PERSONAL)
name.location_set.create(latitude=33.210241, longitude=-97.148857)
request = rf.get(reverse('name:feed'))
feed = NameAtomFeed()
response = feed(request)
assert response.status_code == 200
def test_feed_with_item_without_location(rf):
"""Verify that the response returns ok when objects without
locations are present in the feed.
"""
Name.objects.create(name="Test", name_type=Name.PERSONAL)
request = rf.get(reverse('name:feed'))
feed = NameAtomFeed()
response = feed(request)
assert response.status_code == 200
def test_feed_item_without_location_has_georss_element(rf):
"""Verify that the <georss:point> element is present for the
feed entry.
"""
name = Name.objects.create(name="Test", name_type=Name.PERSONAL)
name.location_set.create(latitude=33.210241, longitude=-97.148857)
request = rf.get(reverse('name:feed'))
feed = NameAtomFeed()
response = feed(request)
assert '<georss:point>' in response.content
assert name.location_set.current_location.geo_point() in response.content
def test_feed_item_without_location_does_not_have_georss_element(rf):
"""Verify that the <georss:point> element is not present for the
feed entry.
"""
Name.objects.create(name="Test", name_type=Name.PERSONAL)
request = rf.get(reverse('name:feed'))
feed = NameAtomFeed()
response = feed(request)
assert '<georss:point>' not in response.content
| bsd-3-clause | 1,013,191,295,262,192,400 | 28.670732 | 77 | 0.69626 | false |
jkyeung/XlsxWriter | xlsxwriter/test/worksheet/test_extract_filter_tokens.py | 1 | 2714 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
class TestExtractFilterTokens(unittest.TestCase):
"""
Test the Worksheet _extract_filter_tokens() method.
"""
def setUp(self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_extract_filter_tokens(self):
"""Test the _extract_filter_tokens() method"""
testcases = [
[
None,
[],
],
[
'',
[],
],
[
'0 < 2001',
['0', '<', '2001'],
],
[
'x < 2000',
['x', '<', '2000'],
],
[
'x > 2000',
['x', '>', '2000'],
],
[
'x == 2000',
['x', '==', '2000'],
],
[
'x > 2000 and x < 5000',
['x', '>', '2000', 'and', 'x', '<', '5000'],
],
[
'x = "goo"',
['x', '=', 'goo'],
],
[
'x = moo',
['x', '=', 'moo'],
],
[
'x = "foo baz"',
['x', '=', 'foo baz'],
],
[
'x = "moo "" bar"',
['x', '=', 'moo " bar'],
],
[
'x = "foo bar" or x = "bar foo"',
['x', '=', 'foo bar', 'or', 'x', '=', 'bar foo'],
],
[
'x = "foo "" bar" or x = "bar "" foo"',
['x', '=', 'foo " bar', 'or', 'x', '=', 'bar " foo'],
],
[
'x = """"""""',
['x', '=', '"""'],
],
[
'x = Blanks',
['x', '=', 'Blanks'],
],
[
'x = NonBlanks',
['x', '=', 'NonBlanks'],
],
[
'top 10 %',
['top', '10', '%'],
],
[
'top 10 items',
['top', '10', 'items'],
],
]
for testcase in testcases:
expression = testcase[0]
exp = testcase[1]
got = self.worksheet._extract_filter_tokens(expression)
self.assertEqual(got, exp)
| bsd-2-clause | 8,725,232,953,985,692,000 | 20.712 | 79 | 0.271186 | false |
NewpTone/stacklab-nova | debian/python-nova/usr/share/pyshared/nova/virt/powervm/common.py | 9 | 3935 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 IBM
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ftplib
import os
import paramiko
from nova import exception as nova_exception
from nova.openstack.common import log as logging
from nova.virt.powervm import exception
LOG = logging.getLogger(__name__)
class Connection(object):
def __init__(self, host, username, password, port=22):
self.host = host
self.username = username
self.password = password
self.port = port
def ssh_connect(connection):
"""Method to connect to remote system using ssh protocol.
:param connection: a Connection object.
:returns: paramiko.SSHClient -- an active ssh connection.
:raises: PowerVMConnectionFailed
"""
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(connection.host,
username=connection.username,
password=connection.password,
port=connection.port)
return ssh
except Exception:
LOG.exception(_('Connection error connecting PowerVM manager'))
raise exception.PowerVMConnectionFailed()
def ssh_command_as_root(ssh_connection, cmd, check_exit_code=True):
"""Method to execute remote command as root.
:param connection: an active paramiko.SSHClient connection.
:param command: string containing the command to run.
:returns: Tuple -- a tuple of (stdout, stderr)
:raises: nova.exception.ProcessExecutionError
"""
chan = ssh_connection._transport.open_session()
# This command is required to be executed
# in order to become root.
chan.exec_command('ioscli oem_setup_env')
bufsize = -1
stdin = chan.makefile('wb', bufsize)
stdout = chan.makefile('rb', bufsize)
stderr = chan.makefile_stderr('rb', bufsize)
# We run the command and then call 'exit' to exit from
# super user environment.
stdin.write('%s\n%s\n' % (cmd, 'exit'))
stdin.flush()
exit_status = chan.recv_exit_status()
# Lets handle the error just like nova.utils.ssh_execute does.
if exit_status != -1:
LOG.debug(_('Result was %s') % exit_status)
if check_exit_code and exit_status != 0:
raise nova_exception.ProcessExecutionError(exit_code=exit_status,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return (stdout, stderr)
def ftp_put_command(connection, local_path, remote_dir):
"""Method to transfer a file via ftp.
:param connection: a Connection object.
:param local_path: path to the local file
:param remote_dir: path to remote destination
:raises: PowerVMFileTransferFailed
"""
try:
ftp = ftplib.FTP(host=connection.host,
user=connection.username,
passwd=connection.password)
ftp.cwd(remote_dir)
name = os.path.split(local_path)[1]
f = open(local_path, "rb")
ftp.storbinary("STOR " + name, f)
f.close()
ftp.close()
except Exception:
LOG.exception(_('File transfer to PowerVM manager failed'))
raise exception.PowerVMFileTransferFailed(file_path=local_path)
| apache-2.0 | -1,958,306,552,171,396,400 | 34.133929 | 78 | 0.638374 | false |
yujikato/DIRAC | src/DIRAC/Core/scripts/dirac_info.py | 2 | 3315 | #!/usr/bin/env python
########################################################################
# File : dirac-info
# Author : Andrei Tsaregorodtsev
########################################################################
"""
Report info about local DIRAC installation
Example:
$ dirac-info
Option Value
============================
Setup Dirac-Production
ConfigurationServer dips://ccdiracli08.in2p3.fr:9135/Configuration/Server
Installation path /opt/dirac/versions/v7r2-pre33_1613239204
Installation type client
Platform Linux_x86_64_glibc-2.17
VirtualOrganization dteam
User DN /DC=org/DC=ugrid/O=people/O=BITP/CN=Andrii Lytovchenko
Proxy validity, secs 0
Use Server Certificate Yes
Skip CA Checks No
DIRAC version v7r2-pre33
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
__RCSID__ = "$Id$"
from DIRAC.Core.Utilities.DIRACScript import DIRACScript
@DIRACScript()
def main():
import os
import DIRAC
from DIRAC import gConfig
from DIRAC.Core.Base import Script
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getVOForGroup
from DIRAC.Core.Utilities.PrettyPrint import printTable
def version(arg):
Script.disableCS()
print(DIRAC.version)
DIRAC.exit(0)
def platform(arg):
Script.disableCS()
print(DIRAC.getPlatform())
DIRAC.exit(0)
Script.registerSwitch("v", "version", "print version of current DIRAC installation", version)
Script.registerSwitch("p", "platform", "print platform of current DIRAC installation", platform)
Script.parseCommandLine(ignoreErrors=True)
records = []
records.append(('Setup', gConfig.getValue('/DIRAC/Setup', 'Unknown')))
records.append(('ConfigurationServer', gConfig.getValue('/DIRAC/Configuration/Servers', [])))
records.append(('Installation path', DIRAC.rootPath))
if os.path.exists(os.path.join(DIRAC.rootPath, DIRAC.getPlatform(), 'bin', 'mysql')):
records.append(('Installation type', 'server'))
else:
records.append(('Installation type', 'client'))
records.append(('Platform', DIRAC.getPlatform()))
ret = getProxyInfo(disableVOMS=True)
if ret['OK']:
if 'group' in ret['Value']:
vo = getVOForGroup(ret['Value']['group'])
else:
vo = getVOForGroup('')
if not vo:
vo = "None"
records.append(('VirtualOrganization', vo))
if 'identity' in ret['Value']:
records.append(('User DN', ret['Value']['identity']))
if 'secondsLeft' in ret['Value']:
records.append(('Proxy validity, secs', {'Value': str(ret['Value']['secondsLeft']), 'Just': 'L'}))
if gConfig.getValue('/DIRAC/Security/UseServerCertificate', True):
records.append(('Use Server Certificate', 'Yes'))
else:
records.append(('Use Server Certificate', 'No'))
if gConfig.getValue('/DIRAC/Security/SkipCAChecks', False):
records.append(('Skip CA Checks', 'Yes'))
else:
records.append(('Skip CA Checks', 'No'))
records.append(('DIRAC version', DIRAC.version))
fields = ['Option', 'Value']
print()
printTable(fields, records, numbering=False)
print()
if __name__ == "__main__":
main()
| gpl-3.0 | 5,530,768,076,849,534,000 | 29.981308 | 104 | 0.641327 | false |
qk4l/Flexget | flexget/plugins/modify/convert_magnet.py | 4 | 4421 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import os
import time
import logging
from flexget import plugin
from flexget.event import event
from flexget.utils.tools import parse_timedelta
from flexget.utils.pathscrub import pathscrub
log = logging.getLogger('convert_magnet')
class ConvertMagnet(object):
"""Convert magnet only entries to a torrent file"""
schema = {
"oneOf": [
# Allow convert_magnet: no form to turn off plugin altogether
{"type": "boolean"},
{
"type": "object",
"properties": {
"timeout": {"type": "string", "format": "interval"},
"force": {"type": "boolean"}
},
"additionalProperties": False
}
]
}
def magnet_to_torrent(self, magnet_uri, destination_folder, timeout):
import libtorrent
params = libtorrent.parse_magnet_uri(magnet_uri)
session = libtorrent.session()
lt_version = [int(v) for v in libtorrent.version.split('.')]
if lt_version > [0,16,13,0]:
# for some reason the info_hash needs to be bytes but it's a struct called sha1_hash
params['info_hash'] = params['info_hash'].to_bytes()
handle = libtorrent.add_magnet_uri(session, magnet_uri, params)
log.debug('Acquiring torrent metadata for magnet %s', magnet_uri)
timeout_value = timeout
while not handle.has_metadata():
time.sleep(0.1)
timeout_value -= 0.1
if timeout_value <= 0:
raise plugin.PluginError('Timed out after {} seconds trying to magnetize'.format(timeout))
log.debug('Metadata acquired')
torrent_info = handle.get_torrent_info()
torrent_file = libtorrent.create_torrent(torrent_info)
torrent_path = pathscrub(os.path.join(destination_folder, torrent_info.name() + ".torrent"))
with open(torrent_path, "wb") as f:
f.write(libtorrent.bencode(torrent_file.generate()))
log.debug('Torrent file wrote to %s', torrent_path)
return torrent_path
def prepare_config(self, config):
if not isinstance(config, dict):
config = {}
config.setdefault('timeout', '30 seconds')
config.setdefault('force', False)
return config
@plugin.priority(255)
def on_task_start(self, task, config):
if config is False:
return
try:
import libtorrent # noqa
except ImportError:
raise plugin.DependencyError('convert_magnet', 'libtorrent', 'libtorrent package required', log)
@plugin.priority(130)
def on_task_download(self, task, config):
if config is False:
return
config = self.prepare_config(config)
# Create the conversion target directory
converted_path = os.path.join(task.manager.config_base, 'converted')
timeout = parse_timedelta(config['timeout']).total_seconds()
if not os.path.isdir(converted_path):
os.mkdir(converted_path)
for entry in task.accepted:
if entry['url'].startswith('magnet:'):
entry.setdefault('urls', [entry['url']])
try:
log.info('Converting entry {} magnet URI to a torrent file'.format(entry['title']))
torrent_file = self.magnet_to_torrent(entry['url'], converted_path, timeout)
except (plugin.PluginError, TypeError) as e:
log.error('Unable to convert Magnet URI for entry %s: %s', entry['title'], e)
if config['force']:
entry.fail('Magnet URI conversion failed')
continue
# Windows paths need an extra / prepended to them for url
if not torrent_file.startswith('/'):
torrent_file = '/' + torrent_file
entry['url'] = torrent_file
entry['file'] = torrent_file
# make sure it's first in the list because of how download plugin works
entry['urls'].insert(0, 'file://{}'.format(torrent_file))
@event('plugin.register')
def register_plugin():
plugin.register(ConvertMagnet, 'convert_magnet', api_ver=2)
| mit | 6,655,577,053,572,856,000 | 39.559633 | 108 | 0.59059 | false |
wbcyclist/django-xadmin | xadmin/plugins/details.py | 4 | 2923 |
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse, NoReverseMatch
from django.db import models
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, ListAdminView
class DetailsPlugin(BaseAdminPlugin):
show_detail_fields = []
show_all_rel_details = True
def result_item(self, item, obj, field_name, row):
if (self.show_all_rel_details or (field_name in self.show_detail_fields)):
rel_obj = None
if hasattr(item.field, 'rel') and isinstance(item.field.rel, models.ManyToOneRel):
rel_obj = getattr(obj, field_name)
elif field_name in self.show_detail_fields:
rel_obj = obj
if rel_obj:
if rel_obj.__class__ in site._registry:
try:
model_admin = site._registry[rel_obj.__class__]
has_view_perm = model_admin(self.admin_view.request).has_view_permission(rel_obj)
has_change_perm = model_admin(self.admin_view.request).has_change_permission(rel_obj)
except:
has_view_perm = self.admin_view.has_model_perm(rel_obj.__class__, 'view')
has_change_perm = self.has_model_perm(rel_obj.__class__, 'change')
else:
has_view_perm = self.admin_view.has_model_perm(rel_obj.__class__, 'view')
has_change_perm = self.has_model_perm(rel_obj.__class__, 'change')
if rel_obj and has_view_perm:
opts = rel_obj._meta
try:
item_res_uri = reverse(
'%s:%s_%s_detail' % (self.admin_site.app_name,
opts.app_label, opts.module_name),
args=(getattr(rel_obj, opts.pk.attname),))
if item_res_uri:
if has_change_perm:
edit_url = reverse(
'%s:%s_%s_change' % (self.admin_site.app_name, opts.app_label, opts.module_name),
args=(getattr(rel_obj, opts.pk.attname),))
else:
edit_url = ''
item.btns.append('<a data-res-uri="%s" data-edit-uri="%s" class="details-handler" rel="tooltip" title="%s"><i class="fa fa-info-sign"></i></a>'
% (item_res_uri, edit_url, _(u'Details of %s') % str(rel_obj)))
except NoReverseMatch:
pass
return item
# Media
def get_media(self, media):
if self.show_all_rel_details or self.show_detail_fields:
media = media + self.vendor('xadmin.plugin.details.js', 'xadmin.form.css')
return media
site.register_plugin(DetailsPlugin, ListAdminView)
| bsd-3-clause | -283,898,404,691,465,000 | 45.396825 | 167 | 0.522066 | false |
rakshit-agrawal/sonnet | sonnet/python/modules/base_info_test.py | 1 | 11356 | # Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for sonnet.python.modules.base."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
from sonnet.python.modules import base
from sonnet.python.modules import base_info
from sonnet.python.modules import basic
import tensorflow as tf
from tensorflow.python.util import nest
logging = tf.logging
THIS_MODULE = "__main__"
LINEAR_MODULE = "sonnet.python.modules.basic"
DumbNamedTuple = collections.namedtuple("DumbNamedTuple", ("arg1", "arg2"))
class NotATensor(object):
pass
class DumbModule(base.AbstractModule):
"""Dumb module to test ModuleInfo."""
def __init__(self, name, no_nest=False):
base.AbstractModule.__init__(self, name=name)
self.no_nest = no_nest
def _build(self, inputs):
if isinstance(inputs, (NotATensor, tf.SparseTensor)):
outputs = inputs
else:
if self.no_nest:
outputs = inputs
else:
outputs = nest.map_structure(tf.identity, inputs)
return outputs
def _copy_default_graph():
# Save default graph into `meta_graph_def`.
meta_graph_def = tf.train.export_meta_graph()
# Reset default graph.
tf.reset_default_graph()
# Load default graph from `meta_graph_def`.
tf.train.import_meta_graph(meta_graph_def)
class ModuleInfoTest(tf.test.TestCase):
def testIsNamedTuple(self):
self.assertTrue(base_info._is_namedtuple(DumbNamedTuple(1, 2)))
self.assertFalse(base_info._is_namedtuple((1, 2, 3)))
self.assertFalse(base_info._is_namedtuple([1, 2, 3]))
self.assertFalse(base_info._is_namedtuple(NotATensor()))
def testIsIterable(self):
self.assertTrue(base_info._is_iterable((1, 2, 3)))
self.assertTrue(base_info._is_iterable([1, 2, 3]))
self.assertTrue(base_info._is_iterable({1: 1, 2: 2, 3: 3}))
self.assertTrue(base_info._is_iterable(
collections.OrderedDict([(1, 1), (2, 2)])))
self.assertTrue(base_info._is_iterable(DumbNamedTuple(1, 2)))
tensor = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
self.assertFalse(base_info._is_iterable(set([1, 2, 3])))
self.assertFalse(base_info._is_iterable(tensor))
sparse_tensor = tf.SparseTensor(
indices=tf.placeholder(dtype=tf.int64, shape=(10, 2,)),
values=tf.placeholder(dtype=tf.float32, shape=(10,)),
dense_shape=tf.placeholder(dtype=tf.int64, shape=(2,)))
self.assertFalse(base_info._is_iterable(sparse_tensor))
self.assertFalse(base_info._is_iterable(NotATensor()))
self.assertFalse(base_info._is_iterable("foo"))
def generator():
for count in xrange(3):
self.assertFalse(False)
yield count
self.assertFalse(base_info._is_iterable(generator))
def testModuleInfo_multiple_modules(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb")
dumb_1 = DumbModule(name="dumb")
linear = basic.Linear(10, name="linear")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb(ph_0)
with tf.name_scope("foo"):
dumb_1(ph_0)
linear(ph_0)
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
self.assertEqual(len(sonnet_collection), 3)
# item 0.
self.assertEqual(sonnet_collection[0].module_name, "dumb")
self.assertEqual(sonnet_collection[0].class_name,
"{}.DumbModule".format(THIS_MODULE))
self.assertEqual(sonnet_collection[0].scope_name, "dumb")
self.assertEqual(len(sonnet_collection[0].connected_subgraphs), 1)
self.assertEqual(
sonnet_collection[0].connected_subgraphs[0].name_scope, "dumb")
# item 1.
self.assertEqual(sonnet_collection[1].module_name, "dumb_1")
self.assertEqual(sonnet_collection[1].scope_name, "dumb_1")
self.assertEqual(sonnet_collection[1].class_name,
"{}.DumbModule".format(THIS_MODULE))
self.assertEqual(sonnet_collection[1].scope_name, "dumb_1")
self.assertEqual(len(sonnet_collection[1].connected_subgraphs), 1)
self.assertEqual(
sonnet_collection[1].connected_subgraphs[0].name_scope, "foo/dumb_1")
# item 2.
self.assertEqual(sonnet_collection[2].module_name, "linear")
self.assertEqual(sonnet_collection[2].scope_name, "linear")
self.assertEqual(sonnet_collection[2].class_name,
"{}.Linear".format(LINEAR_MODULE))
self.assertEqual(sonnet_collection[2].scope_name, "linear")
self.assertEqual(len(sonnet_collection[2].connected_subgraphs), 1)
self.assertEqual(
sonnet_collection[2].connected_subgraphs[0].name_scope, "linear")
check()
_copy_default_graph()
check()
def testModuleInfo_multiple_subgraph(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb(ph_0)
with tf.name_scope("foo"):
dumb(ph_0)
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
self.assertEqual(len(sonnet_collection), 1)
self.assertEqual(len(sonnet_collection[0].connected_subgraphs), 2)
connected_subgraph_0 = sonnet_collection[0].connected_subgraphs[0]
connected_subgraph_1 = sonnet_collection[0].connected_subgraphs[1]
self.assertEqual(connected_subgraph_0.name_scope, "dumb_a")
self.assertEqual(connected_subgraph_1.name_scope, "foo/dumb_a")
check()
_copy_default_graph()
check()
def testModuleInfo_tensor(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb(ph_0)
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"], tf.Tensor)
self.assertIsInstance(connected_subgraph.outputs, tf.Tensor)
check()
_copy_default_graph()
check()
def testModuleInfo_sparsetensor(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
sparse_tensor = tf.SparseTensor(
indices=tf.placeholder(dtype=tf.int64, shape=(10, 2,)),
values=tf.placeholder(dtype=tf.float32, shape=(10,)),
dense_shape=tf.placeholder(dtype=tf.int64, shape=(2,)))
dumb(sparse_tensor)
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(
connected_subgraph.inputs["inputs"], tf.SparseTensor)
self.assertIsInstance(connected_subgraph.outputs, tf.SparseTensor)
check()
_copy_default_graph()
check()
def testModuleInfo_tuple(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
ph_1 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb((ph_0, ph_1))
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"], tuple)
self.assertIsInstance(connected_subgraph.outputs, tuple)
check()
_copy_default_graph()
check()
def testModuleInfo_namedtuple(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
ph_1 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb(DumbNamedTuple(ph_0, ph_1))
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertTrue(
base_info._is_namedtuple(connected_subgraph.inputs["inputs"]))
self.assertTrue(base_info._is_namedtuple(connected_subgraph.outputs))
check()
_copy_default_graph()
check()
def testModuleInfo_dict(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
ph_1 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
dumb({"ph_0": ph_0, "ph_1": ph_1})
def check():
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"], dict)
self.assertIsInstance(connected_subgraph.outputs, dict)
check()
_copy_default_graph()
check()
def testModuleInfo_not_a_tensor(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a")
dumb(NotATensor())
def check(check_type):
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"], check_type)
self.assertIsInstance(connected_subgraph.outputs, check_type)
check(NotATensor)
_copy_default_graph()
check(base_info._UnserializableObject)
def testModuleInfo_recursion(self):
# pylint: disable=not-callable
tf.reset_default_graph()
dumb = DumbModule(name="dumb_a", no_nest=True)
ph_0 = tf.placeholder(dtype=tf.float32, shape=(1, 10,))
val = {"one": ph_0, "self": None}
val["self"] = val
dumb(val)
def check(check_type):
sonnet_collection = tf.get_default_graph().get_collection(
base_info.SONNET_COLLECTION_NAME)
connected_subgraph = sonnet_collection[0].connected_subgraphs[0]
self.assertIsInstance(connected_subgraph.inputs["inputs"]["one"],
tf.Tensor)
self.assertIsInstance(
connected_subgraph.inputs["inputs"]["self"], check_type)
self.assertIsInstance(connected_subgraph.outputs["one"], tf.Tensor)
self.assertIsInstance(connected_subgraph.outputs["self"], check_type)
check(dict)
_copy_default_graph()
check(base_info._UnserializableObject)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | -8,056,377,779,293,690,000 | 37.62585 | 79 | 0.671715 | false |
3nids/QGIS | tests/src/python/test_qgsrasterrerderer_createsld.py | 30 | 29657 | # -*- coding: utf-8 -*-
"""
***************************************************************************
test_qgsrasterrenderer_createsld.py
---------------------
Date : December 2018
Copyright : (C) 2018 by Luigi Pirelli
Email : luipir at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *less
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Luigi Pirelli'
__date__ = 'December 2018'
__copyright__ = '(C) 2018, Luigi Pirelli'
import qgis # NOQA
import os
import random
from qgis.PyQt.QtCore import (
Qt,
QDir,
QFile,
QIODevice,
QPointF,
QSizeF,
QFileInfo,
)
from qgis.PyQt.QtXml import QDomDocument
from qgis.PyQt.QtGui import QColor, QFont
from qgis.core import (
QgsRasterLayer,
QgsRasterRenderer,
QgsMultiBandColorRenderer,
QgsSingleBandGrayRenderer,
QgsPalettedRasterRenderer,
QgsSingleBandPseudoColorRenderer,
QgsContrastEnhancement,
QgsRasterMinMaxOrigin,
Qgis,
QgsRasterBandStats,
QgsRasterShader,
QgsColorRampShader,
)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
# Convenience instances in case you may need them
# not used in this test
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsRasterRendererCreateSld(unittest.TestCase):
"""
This class tests the creation of SLD from QGis raster layers
"""
@classmethod
def setUpClass(self):
pass
def setUp(self):
pass
def tearDown(self):
pass
def __init__(self, methodName):
"""Run once on class initialization."""
unittest.TestCase.__init__(self, methodName)
myPath = os.path.join(TEST_DATA_DIR, 'landsat.tif')
rasterFileInfo = QFileInfo(myPath)
self.raster_layer = QgsRasterLayer(rasterFileInfo.filePath(),
rasterFileInfo.completeBaseName())
def testSingleBandPseudoColorRenderer_Interpolated(self):
# get min and max of the band to renderer
bandNo = 3
stats = self.raster_layer.dataProvider().bandStatistics(bandNo, QgsRasterBandStats.Min | QgsRasterBandStats.Max)
minValue = stats.minimumValue
maxValue = stats.maximumValue
# create shader for the renderer
shader = QgsRasterShader(minValue, maxValue)
colorRampShaderFcn = QgsColorRampShader(minValue, maxValue)
colorRampShaderFcn.setColorRampType(QgsColorRampShader.Interpolated)
colorRampShaderFcn.setClassificationMode(QgsColorRampShader.Continuous)
colorRampShaderFcn.setClip(True)
items = []
for index in range(10):
items.append(QgsColorRampShader.ColorRampItem(index, QColor('#{0:02d}{0:02d}{0:02d}'.format(index)),
"{}".format(index)))
colorRampShaderFcn.setColorRampItemList(items)
shader.setRasterShaderFunction(colorRampShaderFcn)
# create instance to test
rasterRenderer = QgsSingleBandPseudoColorRenderer(self.raster_layer.dataProvider(), bandNo, shader)
self.raster_layer.setRenderer(rasterRenderer)
# do test
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
self.assertChannelBand(root, 'sld:GrayChannel', '{}'.format(bandNo))
# check ColorMapEntry classes
colorMap = root.elementsByTagName('sld:ColorMap')
colorMap = colorMap.item(0).toElement()
self.assertFalse(colorMap.isNull())
self.assertEqual(colorMap.attribute('type'), 'ramp')
colorMapEntries = colorMap.elementsByTagName('sld:ColorMapEntry')
self.assertEqual(colorMapEntries.count(), 10)
for index in range(colorMapEntries.count()):
colorMapEntry = colorMapEntries.at(index).toElement()
self.assertEqual(colorMapEntry.attribute('quantity'), '{}'.format(index))
self.assertEqual(colorMapEntry.attribute('label'), '{}'.format(index))
self.assertEqual(colorMapEntry.attribute('opacity'), '')
self.assertEqual(colorMapEntry.attribute('color'), '#{0:02d}{0:02d}{0:02d}'.format(index))
def testSingleBandPseudoColorRenderer_Discrete(self):
# get min and max of the band to renderer
bandNo = 3
stats = self.raster_layer.dataProvider().bandStatistics(bandNo, QgsRasterBandStats.Min | QgsRasterBandStats.Max)
minValue = stats.minimumValue
maxValue = stats.maximumValue
# create shader for the renderer
shader = QgsRasterShader(minValue, maxValue)
colorRampShaderFcn = QgsColorRampShader(minValue, maxValue)
colorRampShaderFcn.setColorRampType(QgsColorRampShader.Discrete)
colorRampShaderFcn.setClassificationMode(QgsColorRampShader.Continuous)
colorRampShaderFcn.setClip(True)
items = []
for index in range(10):
items.append(QgsColorRampShader.ColorRampItem(index, QColor('#{0:02d}{0:02d}{0:02d}'.format(index)),
"{}".format(index)))
colorRampShaderFcn.setColorRampItemList(items)
shader.setRasterShaderFunction(colorRampShaderFcn)
# create instance to test
rasterRenderer = QgsSingleBandPseudoColorRenderer(self.raster_layer.dataProvider(), bandNo, shader)
self.raster_layer.setRenderer(rasterRenderer)
# do test
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
self.assertChannelBand(root, 'sld:GrayChannel', '{}'.format(bandNo))
# check ColorMapEntry classes
colorMap = root.elementsByTagName('sld:ColorMap')
colorMap = colorMap.item(0).toElement()
self.assertFalse(colorMap.isNull())
self.assertEqual(colorMap.attribute('type'), 'intervals')
colorMapEntries = colorMap.elementsByTagName('sld:ColorMapEntry')
self.assertEqual(colorMapEntries.count(), 10)
for index in range(colorMapEntries.count()):
colorMapEntry = colorMapEntries.at(index).toElement()
self.assertEqual(colorMapEntry.attribute('quantity'), '{}'.format(index))
self.assertEqual(colorMapEntry.attribute('label'), '{}'.format(index))
self.assertEqual(colorMapEntry.attribute('opacity'), '')
self.assertEqual(colorMapEntry.attribute('color'), '#{0:02d}{0:02d}{0:02d}'.format(index))
def testSingleBandPseudoColorRenderer_Exact(self):
# get min and max of the band to renderer
bandNo = 3
stats = self.raster_layer.dataProvider().bandStatistics(bandNo, QgsRasterBandStats.Min | QgsRasterBandStats.Max)
minValue = stats.minimumValue
maxValue = stats.maximumValue
# create shader for the renderer
shader = QgsRasterShader(minValue, maxValue)
colorRampShaderFcn = QgsColorRampShader(minValue, maxValue)
colorRampShaderFcn.setColorRampType(QgsColorRampShader.Exact)
colorRampShaderFcn.setClassificationMode(QgsColorRampShader.Continuous)
colorRampShaderFcn.setClip(True)
items = []
for index in range(10):
items.append(QgsColorRampShader.ColorRampItem(index, QColor('#{0:02d}{0:02d}{0:02d}'.format(index)),
"{}".format(index)))
colorRampShaderFcn.setColorRampItemList(items)
shader.setRasterShaderFunction(colorRampShaderFcn)
# create instance to test
rasterRenderer = QgsSingleBandPseudoColorRenderer(self.raster_layer.dataProvider(), bandNo, shader)
self.raster_layer.setRenderer(rasterRenderer)
# do test
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
self.assertChannelBand(root, 'sld:GrayChannel', '{}'.format(bandNo))
# check ColorMapEntry classes
colorMap = root.elementsByTagName('sld:ColorMap')
colorMap = colorMap.item(0).toElement()
self.assertFalse(colorMap.isNull())
self.assertEqual(colorMap.attribute('type'), 'values')
self.assertFalse(colorMap.hasAttribute('extendend'))
colorMapEntries = colorMap.elementsByTagName('sld:ColorMapEntry')
self.assertEqual(colorMapEntries.count(), 10)
for index in range(colorMapEntries.count()):
colorMapEntry = colorMapEntries.at(index).toElement()
self.assertEqual(colorMapEntry.attribute('quantity'), '{}'.format(index))
self.assertEqual(colorMapEntry.attribute('label'), '{}'.format(index))
self.assertEqual(colorMapEntry.attribute('opacity'), '')
self.assertEqual(colorMapEntry.attribute('color'), '#{0:02d}{0:02d}{0:02d}'.format(index))
# add check that is set ColoMap extended="true" if colormap is bigger that 255 entries
# !NOTE! can't reuse previous shader => segmentation fault
shader = QgsRasterShader(minValue, maxValue)
colorRampShaderFcn = QgsColorRampShader(minValue, maxValue)
colorRampShaderFcn.setColorRampType(QgsColorRampShader.Exact)
colorRampShaderFcn.setClassificationMode(QgsColorRampShader.Continuous)
colorRampShaderFcn.setClip(True)
items = []
for index in range(255):
items.append(
QgsColorRampShader.ColorRampItem(index, QColor.fromHsv(index, 255, 255, 255), "{}".format(index)))
colorRampShaderFcn.setColorRampItemList(items)
shader.setRasterShaderFunction(colorRampShaderFcn)
# create instance to test
rasterRenderer = QgsSingleBandPseudoColorRenderer(self.raster_layer.dataProvider(), bandNo, shader)
# self.raster_layer.setRenderer(rasterRenderer)
# dom, root = self.rendererToSld(self.raster_layer.renderer())
# self.assertTrue( colorMap.hasAttribute( 'extendend' ) )
# self.assertEqual( colorMap.attribute( 'extendend' ), 'true' )
def testPalettedRasterRenderer(self):
# create 10 color classes
# classesString = '122 0 0 0 255 122\n123 1 1 1 255 123\n124 2 2 2 255 124\n125 3 3 3 255 125\n126 4 4 4 255 126\n127 5 5 5 255 127\n128 6 6 6 255 128\n129 7 7 7 255 129\n130 8 8 8 255 130'
classesString = ''
for index in range(10):
classesString += '{0} {0} {0} {0} 255 {0}\n'.format(index)
classes = QgsPalettedRasterRenderer.classDataFromString(classesString)
rasterRenderer = QgsPalettedRasterRenderer(
self.raster_layer.dataProvider(), 3, classes)
self.raster_layer.setRenderer(rasterRenderer)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
self.assertChannelBand(root, 'sld:GrayChannel', '3')
# check ColorMapEntry classes
colorMap = root.elementsByTagName('sld:ColorMap')
colorMap = colorMap.item(0).toElement()
self.assertFalse(colorMap.isNull())
self.assertEqual(colorMap.attribute('type'), 'values')
self.assertFalse(colorMap.hasAttribute('extendend'))
colorMapEntries = colorMap.elementsByTagName('sld:ColorMapEntry')
self.assertEqual(colorMapEntries.count(), 10)
for index in range(colorMapEntries.count()):
colorMapEntry = colorMapEntries.at(index).toElement()
self.assertEqual(colorMapEntry.attribute('quantity'), '{}'.format(index))
self.assertEqual(colorMapEntry.attribute('label'), '{}'.format(index))
self.assertEqual(colorMapEntry.attribute('opacity'), '')
self.assertEqual(colorMapEntry.attribute('color'), '#{0:02d}{0:02d}{0:02d}'.format(index))
# add check that is set ColoMap extended="true" if colormap is bigger that 255 entries
classesString = ''
values = range(255)
for index in range(255):
classesString += '{0} {1} {1} {1} 255 {0}\n'.format(index, random.choice(values))
classes = QgsPalettedRasterRenderer.classDataFromString(classesString)
rasterRenderer = QgsPalettedRasterRenderer(
self.raster_layer.dataProvider(), 3, classes)
self.raster_layer.setRenderer(rasterRenderer)
dom, root = self.rendererToSld(self.raster_layer.renderer())
colorMap = root.elementsByTagName('sld:ColorMap')
colorMap = colorMap.item(0).toElement()
self.assertTrue(colorMap.hasAttribute('extended'))
self.assertEqual(colorMap.attribute('extended'), 'true')
def testMultiBandColorRenderer(self):
rasterRenderer = QgsMultiBandColorRenderer(
self.raster_layer.dataProvider(), 3, 1, 2)
self.raster_layer.setRenderer(rasterRenderer)
self.raster_layer.setContrastEnhancement(algorithm=QgsContrastEnhancement.StretchToMinimumMaximum,
limits=QgsRasterMinMaxOrigin.MinMax)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
self.assertChannelBand(root, 'sld:RedChannel', '3')
self.assertChannelBand(root, 'sld:GreenChannel', '1')
self.assertChannelBand(root, 'sld:BlueChannel', '2')
def testSingleBandGrayRenderer(self):
# check with StretchToMinimumMaximum
rasterRenderer = QgsSingleBandGrayRenderer(self.raster_layer.dataProvider(), 3)
self.raster_layer.setRenderer(rasterRenderer)
self.raster_layer.setContrastEnhancement(algorithm=QgsContrastEnhancement.StretchToMinimumMaximum,
limits=QgsRasterMinMaxOrigin.MinMax)
maximum = self.raster_layer.renderer().contrastEnhancement().maximumValue()
minmum = self.raster_layer.renderer().contrastEnhancement().minimumValue()
self.assertEqual(minmum, 51)
self.assertEqual(maximum, 172)
# check default values
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
self.assertChannelBand(root, 'sld:GrayChannel', '3')
elements = root.elementsByTagName('sld:ContrastEnhancement')
self.assertEqual(len(elements), 1)
enhancement = elements.at(0).toElement()
self.assertFalse(enhancement.isNull())
normalize = enhancement.firstChildElement('sld:Normalize')
self.assertFalse(normalize.isNull())
self.assertVendorOption(normalize, 'algorithm', 'StretchToMinimumMaximum')
self.assertVendorOption(normalize, 'minValue', '51')
self.assertVendorOption(normalize, 'maxValue', '172')
elements = root.elementsByTagName('sld:ColorMap')
self.assertEqual(len(elements), 1)
colorMap = elements.at(0).toElement()
self.assertFalse(colorMap.isNull())
colorMapEntries = colorMap.elementsByTagName('sld:ColorMapEntry')
self.assertEqual(len(colorMapEntries), 2)
clorMap1 = colorMapEntries.at(0)
self.assertEqual(clorMap1.attributes().namedItem('color').nodeValue(), '#000000')
self.assertEqual(clorMap1.attributes().namedItem('quantity').nodeValue(), '0')
clorMap2 = colorMapEntries.at(1)
self.assertEqual(clorMap2.attributes().namedItem('color').nodeValue(), '#ffffff')
self.assertEqual(clorMap2.attributes().namedItem('quantity').nodeValue(), '255')
# check when StretchAndClipToMinimumMaximum
# then min/max have always to be the real one and not that set in the contrastEnhancement
self.raster_layer.setContrastEnhancement(algorithm=QgsContrastEnhancement.StretchAndClipToMinimumMaximum,
limits=QgsRasterMinMaxOrigin.MinMax)
minmum = self.raster_layer.renderer().contrastEnhancement().setMinimumValue(100)
maximum = self.raster_layer.renderer().contrastEnhancement().maximumValue()
minmum = self.raster_layer.renderer().contrastEnhancement().minimumValue()
self.assertEqual(minmum, 100)
self.assertEqual(maximum, 172)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
self.assertChannelBand(root, 'sld:GrayChannel', '3')
elements = root.elementsByTagName('sld:ContrastEnhancement')
self.assertEqual(len(elements), 1)
enhancement = elements.at(0).toElement()
self.assertFalse(enhancement.isNull())
normalize = enhancement.firstChildElement('sld:Normalize')
self.assertFalse(normalize.isNull())
self.assertVendorOption(normalize, 'minValue', '51')
self.assertVendorOption(normalize, 'maxValue', '172')
elements = root.elementsByTagName('sld:ColorMap')
self.assertEqual(len(elements), 1)
colorMap = elements.at(0).toElement()
self.assertFalse(colorMap.isNull())
colorMapEntries = colorMap.elementsByTagName('sld:ColorMapEntry')
self.assertEqual(len(colorMapEntries), 4)
clorMap1 = colorMapEntries.at(0)
self.assertEqual(clorMap1.attributes().namedItem('color').nodeValue(), '#000000')
self.assertEqual(clorMap1.attributes().namedItem('quantity').nodeValue(), '100')
self.assertEqual(clorMap1.attributes().namedItem('opacity').nodeValue(), '0')
clorMap2 = colorMapEntries.at(1)
self.assertEqual(clorMap2.attributes().namedItem('color').nodeValue(), '#000000')
self.assertEqual(clorMap2.attributes().namedItem('quantity').nodeValue(), '100')
clorMap3 = colorMapEntries.at(2)
self.assertEqual(clorMap3.attributes().namedItem('color').nodeValue(), '#ffffff')
self.assertEqual(clorMap3.attributes().namedItem('quantity').nodeValue(), '172')
clorMap4 = colorMapEntries.at(3)
self.assertEqual(clorMap4.attributes().namedItem('color').nodeValue(), '#ffffff')
self.assertEqual(clorMap4.attributes().namedItem('quantity').nodeValue(), '172')
self.assertEqual(clorMap4.attributes().namedItem('opacity').nodeValue(), '0')
# check when ClipToMinimumMaximum
# then min/max have always to be the real one and not that set in the contrastEnhancement
self.raster_layer.setContrastEnhancement(algorithm=QgsContrastEnhancement.ClipToMinimumMaximum,
limits=QgsRasterMinMaxOrigin.MinMax)
minmum = self.raster_layer.renderer().contrastEnhancement().setMinimumValue(100)
maximum = self.raster_layer.renderer().contrastEnhancement().maximumValue()
minmum = self.raster_layer.renderer().contrastEnhancement().minimumValue()
self.assertEqual(minmum, 100)
self.assertEqual(maximum, 172)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
self.assertChannelBand(root, 'sld:GrayChannel', '3')
elements = root.elementsByTagName('sld:ContrastEnhancement')
self.assertEqual(len(elements), 1)
enhancement = elements.at(0).toElement()
self.assertFalse(enhancement.isNull())
normalize = enhancement.firstChildElement('sld:Normalize')
self.assertFalse(normalize.isNull())
self.assertVendorOption(normalize, 'minValue', '51')
self.assertVendorOption(normalize, 'maxValue', '172')
elements = root.elementsByTagName('sld:ColorMap')
self.assertEqual(len(elements), 1)
colorMap = elements.at(0).toElement()
self.assertFalse(colorMap.isNull())
colorMapEntries = colorMap.elementsByTagName('sld:ColorMapEntry')
self.assertEqual(len(colorMapEntries), 4)
clorMap1 = colorMapEntries.at(0)
self.assertEqual(clorMap1.attributes().namedItem('color').nodeValue(), '#000000')
self.assertEqual(clorMap1.attributes().namedItem('quantity').nodeValue(), '100')
self.assertEqual(clorMap1.attributes().namedItem('opacity').nodeValue(), '0')
clorMap2 = colorMapEntries.at(1)
self.assertEqual(clorMap2.attributes().namedItem('color').nodeValue(), '#000000')
self.assertEqual(clorMap2.attributes().namedItem('quantity').nodeValue(), '100')
clorMap3 = colorMapEntries.at(2)
self.assertEqual(clorMap3.attributes().namedItem('color').nodeValue(), '#ffffff')
self.assertEqual(clorMap3.attributes().namedItem('quantity').nodeValue(), '172')
clorMap4 = colorMapEntries.at(3)
self.assertEqual(clorMap4.attributes().namedItem('color').nodeValue(), '#ffffff')
self.assertEqual(clorMap4.attributes().namedItem('quantity').nodeValue(), '172')
self.assertEqual(clorMap4.attributes().namedItem('opacity').nodeValue(), '0')
def testRasterRenderer(self):
class fakerenderer(QgsRasterRenderer):
def __init__(self, interface):
QgsRasterRenderer.__init__(self, interface, '')
rasterRenderer = fakerenderer(self.raster_layer.dataProvider())
self.raster_layer.setRenderer(rasterRenderer)
# check opacity default value is not exported
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertNoOpacity(root)
# check if opacity is not the default value
rasterRenderer.setOpacity(1.1)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertOpacity(root, '1.1')
# check gamma properties from [-100:0] stretched to [0:1]
# and (0:100] stretche dto (1:100]
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '-100'})
# self.assertGamma(root, '0')
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '-50'})
# self.assertGamma(root, '0.5')
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '0'})
# self.assertGamma(root, '1')
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '1'})
# self.assertGamma(root, '1')
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '100'})
# self.assertGamma(root, '100')
# # input contrast are always integer, btw the value is managed also if it's double
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '1.1'})
# self.assertGamma(root, '1.1')
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '1.6'})
# self.assertGamma(root, '1.6')
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '-50.5'})
# self.assertGamma(root, '0.495')
# dom, root = self.rendererToSld(rasterRenderer, {'contrast': '-0.1'})
# self.assertGamma(root, '0.999')
def testStretchingAlgorithm(self):
rasterRenderer = QgsMultiBandColorRenderer(
self.raster_layer.dataProvider(), 3, 1, 2)
self.raster_layer.setRenderer(rasterRenderer)
# check StretchToMinimumMaximum stretching alg
self.raster_layer.setContrastEnhancement(algorithm=QgsContrastEnhancement.StretchToMinimumMaximum,
limits=QgsRasterMinMaxOrigin.MinMax)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertContrastEnhancement(root, 'sld:RedChannel', 'StretchToMinimumMaximum', '51', '172')
self.assertContrastEnhancement(root, 'sld:GreenChannel', 'StretchToMinimumMaximum', '122', '130')
self.assertContrastEnhancement(root, 'sld:BlueChannel', 'StretchToMinimumMaximum', '133', '148')
# check StretchAndClipToMinimumMaximum stretching alg
self.raster_layer.setContrastEnhancement(algorithm=QgsContrastEnhancement.StretchAndClipToMinimumMaximum,
limits=QgsRasterMinMaxOrigin.MinMax)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertContrastEnhancement(root, 'sld:RedChannel', 'ClipToZero', '51', '172')
self.assertContrastEnhancement(root, 'sld:GreenChannel', 'ClipToZero', '122', '130')
self.assertContrastEnhancement(root, 'sld:BlueChannel', 'ClipToZero', '133', '148')
# check ClipToMinimumMaximum stretching alg
self.raster_layer.setContrastEnhancement(algorithm=QgsContrastEnhancement.ClipToMinimumMaximum,
limits=QgsRasterMinMaxOrigin.MinMax)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertContrastEnhancement(root, 'sld:RedChannel', 'ClipToMinimumMaximum', '51', '172')
self.assertContrastEnhancement(root, 'sld:GreenChannel', 'ClipToMinimumMaximum', '122', '130')
self.assertContrastEnhancement(root, 'sld:BlueChannel', 'ClipToMinimumMaximum', '133', '148')
# check NoEnhancement stretching alg
self.raster_layer.setContrastEnhancement(algorithm=QgsContrastEnhancement.NoEnhancement)
dom, root = self.rendererToSld(self.raster_layer.renderer())
self.assertContrastEnhancement(root, 'sld:RedChannel')
self.assertContrastEnhancement(root, 'sld:GreenChannel')
self.assertContrastEnhancement(root, 'sld:BlueChannel')
def assertVendorOption(self, root, name, expectedValue):
"""Set expectedValue=None to check that the vendor option is not present."""
vendorOptions = root.elementsByTagName('sld:VendorOption')
found = False
for vendorOptionIndex in range(vendorOptions.count()):
vendorOption = vendorOptions.at(vendorOptionIndex)
self.assertEqual('sld:VendorOption', vendorOption.nodeName())
if (vendorOption.attributes().namedItem('name').nodeValue() == name):
found = True
self.assertEqual(vendorOption.firstChild().nodeValue(), expectedValue)
if (expectedValue is None) and found:
self.fail("found VendorOption: {} where supposed not present".format(name))
if expectedValue and not found:
self.fail("Not found VendorOption: {}".format(name))
def assertGamma(self, root, expectedValue, index=0):
enhancement = root.elementsByTagName('sld:ContrastEnhancement').item(index)
gamma = enhancement.firstChildElement('sld:GammaValue')
self.assertEqual(expectedValue, gamma.firstChild().nodeValue())
def assertOpacity(self, root, expectedValue, index=0):
opacity = root.elementsByTagName('sld:Opacity').item(index)
self.assertEqual(expectedValue, opacity.firstChild().nodeValue())
def assertNoOpacity(self, root):
opacities = root.elementsByTagName('sld:Opacity')
self.assertEqual(opacities.size(), 0)
def assertContrastEnhancement(self, root, bandTag, expectedAlg=None, expectedMin=None, expectedMax=None, index=0):
channelSelection = root.elementsByTagName('sld:ChannelSelection').item(index)
self.assertIsNotNone(channelSelection)
band = channelSelection.firstChildElement(bandTag)
# check if no enhancement alg is iset
if (not expectedAlg):
contrastEnhancementName = band.firstChildElement('sld:ContrastEnhancement')
self.assertEqual('', contrastEnhancementName.firstChild().nodeName())
return
# check if enhancement alg is set
contrastEnhancementName = band.firstChildElement('sld:ContrastEnhancement')
self.assertEqual('sld:Normalize', contrastEnhancementName.firstChild().nodeName())
normalize = contrastEnhancementName.firstChildElement('sld:Normalize')
vendorOptions = normalize.elementsByTagName('VendorOption')
for vendorOptionIndex in range(vendorOptions.count()):
vendorOption = vendorOptions.at(vendorOptionIndex)
self.assertEqual('VendorOption', vendorOption.nodeName())
if (vendorOption.attributes().namedItem('name').nodeValue() == 'algorithm'):
self.assertEqual(expectedAlg, vendorOption.firstChild().nodeValue())
elif (vendorOption.attributes().namedItem('name').nodeValue() == 'minValue'):
self.assertEqual(expectedMin, vendorOption.firstChild().nodeValue())
elif (vendorOption.attributes().namedItem('name').nodeValue() == 'maxValue'):
self.assertEqual(expectedMax, vendorOption.firstChild().nodeValue())
else:
self.fail(
'Unrecognised vendorOption name {}'.format(vendorOption.attributes().namedItem('name').nodeValue()))
def assertChannelBand(self, root, bandTag, expectedValue, index=0):
channelSelection = root.elementsByTagName('sld:ChannelSelection').item(index)
self.assertIsNotNone(channelSelection)
band = channelSelection.firstChildElement(bandTag)
sourceChannelName = band.firstChildElement('sld:SourceChannelName')
self.assertEqual(expectedValue, sourceChannelName.firstChild().nodeValue())
def rendererToSld(self, renderer, properties={}):
dom = QDomDocument()
root = dom.createElement("FakeRoot")
dom.appendChild(root)
renderer.toSld(dom, root, properties)
return dom, root
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -567,622,813,537,030,100 | 50.938704 | 197 | 0.664127 | false |
tensorflow/models | research/object_detection/utils/json_utils.py | 2 | 2694 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for dealing with writing json strings.
json_utils wraps json.dump and json.dumps so that they can be used to safely
control the precision of floats when writing to json strings or files.
"""
import json
import re
def FormatFloat(json_str, float_digits):
pattern = re.compile(r'\d+\.\d+')
float_repr = '{:.' + '{}'.format(float_digits) + 'f}'
def MRound(match):
return float_repr.format(float(match.group()))
return re.sub(pattern, MRound, json_str)
def Dump(obj, fid, float_digits=-1, **params):
"""Wrapper of json.dump that allows specifying the float precision used.
Args:
obj: The object to dump.
fid: The file id to write to.
float_digits: The number of digits of precision when writing floats out.
**params: Additional parameters to pass to json.dumps.
"""
json_str = Dumps(obj, float_digits, **params)
fid.write(json_str)
def Dumps(obj, float_digits=-1, **params):
"""Wrapper of json.dumps that allows specifying the float precision used.
Args:
obj: The object to dump.
float_digits: The number of digits of precision when writing floats out.
**params: Additional parameters to pass to json.dumps.
Returns:
output: JSON string representation of obj.
"""
json_str = json.dumps(obj, **params)
if float_digits > -1:
json_str = FormatFloat(json_str, float_digits)
return json_str
def PrettyParams(**params):
"""Returns parameters for use with Dump and Dumps to output pretty json.
Example usage:
```json_str = json_utils.Dumps(obj, **json_utils.PrettyParams())```
```json_str = json_utils.Dumps(
obj, **json_utils.PrettyParams(allow_nans=False))```
Args:
**params: Additional params to pass to json.dump or json.dumps.
Returns:
params: Parameters that are compatible with json_utils.Dump and
json_utils.Dumps.
"""
params['float_digits'] = 4
params['sort_keys'] = True
params['indent'] = 2
params['separators'] = (',', ': ')
return params
| apache-2.0 | 720,803,917,854,253,400 | 31.853659 | 80 | 0.67706 | false |
saukrIppl/seahub | seahub/base/database_storage/database_storage.py | 1 | 9365 | # DatabaseStorage for django.
# 2011 (c) Mike Mueller <[email protected]>
# 2009 (c) GameKeeper Gambling Ltd, Ivanov E.
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.core.files.storage import Storage
from django.core.files import File
from django.db import connection, transaction
import base64
import hashlib
import StringIO
import urlparse
from datetime import datetime
from seahub.utils.timeutils import value_to_db_datetime
class DatabaseStorage(Storage):
"""
Implements the Django Storage API for storing files in the database,
rather than on the filesystem. Uses the Django database layer, so any
database supported by Django should theoretically work.
Usage: Create an instance of DatabaseStorage and pass it as the storage
parameter of your FileField, ImageField, etc.::
image = models.ImageField(
null=True,
blank=True,
upload_to='attachments/',
storage=DatabaseStorage(options=DBS_OPTIONS),
)
Files submitted using this field will be saved into the default Django
database, using the options specified in the constructor. The upload_to
path will be prepended to uploads, so that the file 'bar.png' would be
retrieved later as 'attachments/bar.png' in this example.
Uses the default get_available_name strategy, so duplicate filenames will
be silently renamed to foo_1.jpg, foo_2.jpg, etc.
You are responsible for creating a table in the database with the
following columns:
filename VARCHAR(256) NOT NULL PRIMARY KEY,
data TEXT NOT NULL,
size INTEGER NOT NULL,
The best place to do this is probably in your_app/sql/foo.sql, which will
run during syncdb. The length 256 is up to you, you can also pass a
max_length parameter to FileFields to be consistent with your column here.
On SQL Server, you should probably use nvarchar to support unicode.
Remember, this is not designed for huge objects. It is probably best used
on files under 1MB in size. All files are base64-encoded before being
stored, so they will use 1.33x the storage of the original file.
Here's an example view to serve files stored in the database.
def image_view(request, filename):
# Read file from database
storage = DatabaseStorage(options=DBS_OPTIONS)
image_file = storage.open(filename, 'rb')
if not image_file:
raise Http404
file_content = image_file.read()
# Prepare response
content_type, content_encoding = mimetypes.guess_type(filename)
response = HttpResponse(content=file_content, mimetype=content_type)
response['Content-Disposition'] = 'inline; filename=%s' % filename
if content_encoding:
response['Content-Encoding'] = content_encoding
return response
"""
def __init__(self, options):
"""
Create a DatabaseStorage object with the specified options dictionary.
Required options:
'table': The name of the database table for file storage.
'base_url': The base URL where database files should be found.
This is used to construct URLs for FileFields and
you will need to define a view that handles requests
at this location (example given above).
Allowed options:
'name_column': Name of the filename column (default: 'filename')
'data_column': Name of the data column (default: 'data')
'size_column': Name of the size column (default: 'size')
'data_column', 'size_column', 'base_url' keys.
"""
required_keys = [
'table',
'base_url',
]
allowed_keys = [
'name_column',
'name_md5_column',
'data_column',
'size_column',
'mtime_column',
]
for key in required_keys:
if key not in options:
raise ImproperlyConfigured(
'DatabaseStorage missing required option: ' + key)
for key in options:
if key not in required_keys and key not in allowed_keys:
raise ImproperlyConfigured(
'Unrecognized DatabaseStorage option: ' + key)
# Note: These fields are used as keys in string substitutions
# throughout this class. If you change a name here, be sure to update
# all the affected format strings.
self.table = options['table']
self.base_url = options['base_url']
self.name_column = options.get('name_column', 'filename')
self.name_md5_column = options.get('name_md5_column', 'filename_md5')
self.data_column = options.get('data_column', 'data')
self.size_column = options.get('size_column', 'size')
self.mtime_column = options.get('mtime_column', 'mtime')
def _open(self, name, mode='rb'):
"""
Open a file stored in the database. name should be the full name of
the file, including the upload_to path that may have been used.
Path separator should always be '/'. mode should always be 'rb'.
Returns a Django File object if found, otherwise None.
"""
assert mode == 'rb', "DatabaseStorage open mode must be 'rb'."
name_md5 = hashlib.md5(name).hexdigest()
query = 'SELECT %(data_column)s FROM %(table)s ' + \
'WHERE %(name_md5_column)s = %%s'
query %= self.__dict__
cursor = connection.cursor()
cursor.execute(query, [name_md5])
row = cursor.fetchone()
if row is None:
return None
inMemFile = StringIO.StringIO(base64.b64decode(row[0]))
inMemFile.name = name
inMemFile.mode = mode
return File(inMemFile)
def _save(self, name, content):
"""
Save the given content as file with the specified name. Backslashes
in the name will be converted to forward '/'.
"""
name = name.replace('\\', '/')
name_md5 = hashlib.md5(name).hexdigest()
binary = content.read()
size = len(binary)
encoded = base64.b64encode(binary)
mtime = value_to_db_datetime(datetime.today())
with transaction.atomic(using='default'):
cursor = connection.cursor()
if self.exists(name):
query = 'UPDATE %(table)s SET %(data_column)s = %%s, ' + \
'%(size_column)s = %%s, %(mtime_column)s = %%s ' + \
'WHERE %(name_md5_column)s = %%s'
query %= self.__dict__
cursor.execute(query, [encoded, size, mtime, name])
else:
query = 'INSERT INTO %(table)s (%(name_column)s, ' + \
'%(name_md5_column)s, %(data_column)s, %(size_column)s, '+ \
'%(mtime_column)s) VALUES (%%s, %%s, %%s, %%s, %%s)'
query %= self.__dict__
cursor.execute(query, (name, name_md5, encoded, size, mtime))
return name
def exists(self, name):
name_md5 = hashlib.md5(name).hexdigest()
query = 'SELECT COUNT(*) FROM %(table)s WHERE %(name_md5_column)s = %%s'
query %= self.__dict__
cursor = connection.cursor()
cursor.execute(query, [name_md5])
row = cursor.fetchone()
return int(row[0]) > 0
def delete(self, name):
if self.exists(name):
with transaction.atomic(using='default'):
name_md5 = hashlib.md5(name).hexdigest()
query = 'DELETE FROM %(table)s WHERE %(name_md5_column)s = %%s'
query %= self.__dict__
connection.cursor().execute(query, [name_md5])
def path(self, name):
raise NotImplementedError('DatabaseStorage does not support path().')
def url(self, name):
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
result = urlparse.urljoin(self.base_url, name).replace('\\', '/')
return result
def size(self, name):
"Get the size of the given filename or raise ObjectDoesNotExist."
name_md5 = hashlib.md5(name).hexdigest()
query = 'SELECT %(size_column)s FROM %(table)s ' + \
'WHERE %(name_md5_column)s = %%s'
query %= self.__dict__
cursor = connection.cursor()
cursor.execute(query, [name_md5])
row = cursor.fetchone()
if not row:
raise ObjectDoesNotExist(
"DatabaseStorage file not found: %s" % name)
return int(row[0])
def modified_time(self, name):
"Get the modified time of the given filename or raise ObjectDoesNotExist."
name_md5 = hashlib.md5(name).hexdigest()
query = 'SELECT %(mtime_column)s FROM %(table)s ' + \
'WHERE %(name_md5_column)s = %%s'
query %= self.__dict__
cursor = connection.cursor()
cursor.execute(query, [name_md5])
row = cursor.fetchone()
if not row:
raise ObjectDoesNotExist(
"DatabaseStorage file not found: %s" % name)
return row[0]
| apache-2.0 | -1,862,318,086,356,039,000 | 38.020833 | 82 | 0.595729 | false |
docee/kodistubs | xbmc.py | 2 | 32093 | ## @package xbmc
# Various classes and functions to interact with XBMC.
#
"""
Various classes and functions to interact with Kodi.
"""
import xbmcgui
CAPTURE_FLAG_CONTINUOUS = 1
CAPTURE_FLAG_IMMEDIATELY = 2
CAPTURE_STATE_DONE = 3
CAPTURE_STATE_FAILED = 4
CAPTURE_STATE_WORKING = 0
DRIVE_NOT_READY = 1
ENGLISH_NAME = 2
ISO_639_1 = 0
ISO_639_2 = 1
LOGDEBUG = 0
LOGERROR = 4
LOGFATAL = 6
LOGINFO = 1
LOGNONE = 7
LOGNOTICE = 2
LOGSEVERE = 5
LOGWARNING = 3
PLAYER_CORE_AUTO = 0
PLAYER_CORE_DVDPLAYER = 1
PLAYER_CORE_MPLAYER = 2
PLAYER_CORE_PAPLAYER = 3
PLAYLIST_MUSIC = 0
PLAYLIST_VIDEO = 1
SERVER_AIRPLAYSERVER = 2
SERVER_EVENTSERVER = 6
SERVER_JSONRPCSERVER = 3
SERVER_UPNPRENDERER = 4
SERVER_UPNPSERVER = 5
SERVER_WEBSERVER = 1
SERVER_ZEROCONF = 7
TRAY_CLOSED_MEDIA_PRESENT = 96
TRAY_CLOSED_NO_MEDIA = 64
TRAY_OPEN = 16
__author__ = 'Team Kodi <http://kodi.tv>'
__credits__ = 'Team Kodi'
__date__ = 'Fri May 01 16:22:03 BST 2015'
__platform__ = 'ALL'
__version__ = '2.20.0'
abortRequested = False
class Keyboard(object):
def __init__(self, line='', heading='', hidden=False):
"""
Creates a new Keyboard object with default text heading and hidden input flag if supplied.
line: string - default text entry.
heading: string - keyboard heading.
hidden: boolean - True for hidden text entry.
Example:
kb = xbmc.Keyboard('default', 'heading', True)
kb.setDefault('password') # optional
kb.setHeading('Enter password') # optional
kb.setHiddenInput(True) # optional
kb.doModal()
if (kb.isConfirmed()):
text = kb.getText()
"""
pass
def doModal(self, autoclose=0):
"""Show keyboard and wait for user action.
autoclose: integer - milliseconds to autoclose dialog.
Note:
autoclose = 0 - This disables autoclose
Example:
kb.doModal(30000)
"""
pass
def setDefault(self, line=''):
"""Set the default text entry.
line: string - default text entry.
Example:
kb.setDefault('password')
"""
pass
def setHiddenInput(self, hidden=False):
"""Allows hidden text entry.
hidden: boolean - True for hidden text entry.
Example:
kb.setHiddenInput(True)
"""
pass
def setHeading(self, heading):
"""Set the keyboard heading.
heading: string - keyboard heading.
Example:
kb.setHeading('Enter password')
"""
pass
def getText(self):
"""Returns the user input as a string.
Note:
This will always return the text entry even if you cancel the keyboard.
Use the isConfirmed() method to check if user cancelled the keyboard.
"""
return str
def isConfirmed(self):
"""Returns False if the user cancelled the input.
example:
- if (kb.isConfirmed()):"""
return bool
class Player(object):
def __init__(self, playerCore=None):
"""Creates a new Player with as default the xbmc music playlist.
Args:
playerCore: Use a specified playcore instead of letting xbmc decide the playercore to use.
- xbmc.PLAYER_CORE_AUTO
- xbmc.PLAYER_CORE_DVDPLAYER
- xbmc.PLAYER_CORE_MPLAYER
- xbmc.PLAYER_CORE_PAPLAYER
"""
pass
def play(self, item=None, listitem=None, windowed=False, statrpos=-1):
"""
play([item, listitem, windowed, startpos]) -- Play this item.
item : [opt] string - filename, url or playlist.
listitem : [opt] listitem - used with setInfo() to set different infolabels.
windowed : [opt] bool - true=play video windowed, false=play users preference.(default)
startpos : [opt] int - starting position when playing a playlist. Default = -1
*Note, If item is not given then the Player will try to play the current item
in the current playlist.
You can use the above as keywords for arguments and skip certain optional arguments.
Once you use a keyword, all following arguments require the keyword.
example:
- listitem = xbmcgui.ListItem('Ironman')
- listitem.setInfo('video', {'Title': 'Ironman', 'Genre': 'Science Fiction'})
- xbmc.Player().play(url, listitem, windowed)
- xbmc.Player().play(playlist, listitem, windowed, startpos)
"""
pass
def stop(self):
"""Stop playing."""
pass
def pause(self):
"""Pause playing."""
pass
def playnext(self):
"""Play next item in playlist."""
pass
def playprevious(self):
"""Play previous item in playlist."""
pass
def playselected(self, selected):
"""Play a certain item from the current playlist."""
pass
def onPlayBackStarted(self):
"""Will be called when xbmc starts playing a file."""
pass
def onPlayBackEnded(self):
"""Will be called when xbmc stops playing a file."""
pass
def onPlayBackStopped(self):
"""Will be called when user stops xbmc playing a file."""
def onPlayBackPaused(self):
"""Will be called when user pauses a playing file."""
pass
def onPlayBackResumed(self):
"""Will be called when user resumes a paused file."""
pass
def onPlayBackSeek(self, time, seekOffset):
"""
onPlayBackSeek(time, seekOffset) -- onPlayBackSeek method.
time : integer - time to seek to.
seekOffset : integer - ?.
Will be called when user seeks to a time
"""
pass
def onPlayBackSeekChapter(self, chapter):
"""
onPlayBackSeekChapter(chapter) -- onPlayBackSeekChapter method.
chapter : integer - chapter to seek to.
Will be called when user performs a chapter seek
"""
pass
def onPlayBackSpeedChanged(self, speed):
"""
onPlayBackSpeedChanged(speed) -- onPlayBackSpeedChanged method.
speed : integer - current speed of player.
*Note, negative speed means player is rewinding, 1 is normal playback speed.
Will be called when players speed changes. (eg. user FF/RW)
"""
pass
def onQueueNextItem(self):
"""
onQueueNextItem() -- onQueueNextItem method.
Will be called when player requests next item
"""
pass
def isPlaying(self):
"""Returns True is xbmc is playing a file."""
return bool
def isPlayingAudio(self):
"""Returns True is xbmc is playing an audio file."""
return bool
def isPlayingVideo(self):
"""Returns True if xbmc is playing a video."""
return bool
def getPlayingFile(self):
"""
getPlayingFile() --returns the current playing file as a string.
Note: For LiveTV, returns a pvr:// url which is not translatable to an OS specific file or external url
Throws: Exception, if player is not playing a file.
"""
return str
def getVideoInfoTag(self):
"""Returns the VideoInfoTag of the current playing Movie.
Raises:
Exception: If player is not playing a file or current file is not a movie file.
Note:
This doesn't work yet, it's not tested.
"""
return InfoTagVideo
def getMusicInfoTag(self):
"""Returns the MusicInfoTag of the current playing 'Song'.
Raises:
Exception: If player is not playing a file or current file is not a music file.
"""
return InfoTagMusic
def getTotalTime(self):
"""Returns the total time of the current playing media in seconds.
This is only accurate to the full second.
Raises:
Exception: If player is not playing a file.
"""
return float
def getTime(self):
"""Returns the current time of the current playing media as fractional seconds.
Raises:
Exception: If player is not playing a file.
"""
return float
def seekTime(self, pTime):
"""Seeks the specified amount of time as fractional seconds.
The time specified is relative to the beginning of the currently playing media file.
Raises:
Exception: If player is not playing a file.
"""
pass
def setSubtitles(self, subtitleFile):
"""Set subtitle file and enable subtitles.
subtitleFile: string or unicode - Path to subtitle.
Example:
setSubtitles('/path/to/subtitle/test.srt')
"""
pass
def getSubtitles(self):
"""Get subtitle stream name."""
return str
def disableSubtitles(self):
"""Disable subtitles."""
pass
def getAvailableAudioStreams(self):
"""Get audio stream names."""
return list
def getAvailableSubtitleStreams(self):
"""
getAvailableSubtitleStreams() -- get Subtitle stream names
"""
return list
def setAudioStream(self, iStream):
"""Set audio stream.
iStream: int
"""
pass
def setSubtitleStream(self, iStream):
"""
setSubtitleStream(iStream) -- set Subtitle Stream
iStream : int
example:
- setSubtitleStream(1)
"""
pass
def showSubtitles(self, bVisible):
"""
showSubtitles(bVisible)--enable/disable subtitles
bVisible : boolean - True for visible subtitles.
example:
- xbmc.Player().showSubtitles(True)
"""
pass
class PlayList(object):
def __init__(self, playList):
"""Retrieve a reference from a valid xbmc playlist
playlist: int - can be one of the next values:
0: xbmc.PLAYLIST_MUSIC
1: xbmc.PLAYLIST_VIDEO
Use PlayList[int position] or __getitem__(int position) to get a PlayListItem.
"""
pass
def __getitem__(self, item):
"""x.__getitem__(y) <==> x[y]"""
return xbmcgui.ListItem
def __len__(self):
"""x.__len__() <==> len(x)"""
return int
def add(self, url, listitem=None, index=-1):
"""Adds a new file to the playlist.
url: string or unicode - filename or url to add.
listitem: listitem - used with setInfo() to set different infolabels.
index: integer - position to add playlist item.
Example:
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
video = 'F:\\movies\\Ironman.mov'
listitem = xbmcgui.ListItem('Ironman', thumbnailImage='F:\\movies\\Ironman.tbn')
listitem.setInfo('video', {'Title': 'Ironman', 'Genre': 'Science Fiction'})
playlist.add(url=video, listitem=listitem, index=7)
"""
pass
def load(self, filename):
"""Load a playlist.
Clear current playlist and copy items from the file to this Playlist filename can be like .pls or .m3u ...
Returns False if unable to load playlist, True otherwise.
"""
return bool
def remove(self, filename):
"""Remove an item with this filename from the playlist."""
pass
def clear(self):
"""Clear all items in the playlist."""
pass
def shuffle(self):
"""Shuffle the playlist."""
pass
def unshuffle(self):
"""Unshuffle the playlist."""
pass
def size(self):
"""Returns the total number of PlayListItems in this playlist."""
return int
def getposition(self):
"""Returns the position of the current song in this playlist."""
return int
def getPlayListId(self):
"""getPlayListId() --returns an integer."""
return int
class PlayListItem(object):
"""Creates a new PlaylistItem which can be added to a PlayList."""
def getdescription(self):
"""Returns the description of this PlayListItem."""
return str
def getduration(self):
"""Returns the duration of this PlayListItem."""
return long
def getfilename(self):
"""Returns the filename of this PlayListItem."""
return str
class InfoTagMusic(object):
def getURL(self):
"""Returns a string."""
return str
def getTitle(self):
"""Returns a string."""
return str
def getArtist(self):
"""Returns a string."""
return str
def getAlbumArtist(self):
"""Returns a string."""
return str
def getAlbum(self):
"""Returns a string."""
return str
def getGenre(self):
"""Returns a string."""
return str
def getDuration(self):
"""Returns an integer."""
return int
def getTrack(self):
"""Returns an integer."""
return int
def getDisc(self):
"""Returns an integer."""
return int
def getTrackAndDisc(self):
"""Returns an integer."""
return int
def getReleaseDate(self):
"""Returns a string."""
return str
def getListeners(self):
"""Returns an integer."""
return int
def getPlayCount(self):
"""Returns an integer."""
return int
def getLastPlayed(self):
"""Returns a string."""
return str
def getComment(self):
"""Returns a string."""
return str
def getLyrics(self):
"""Returns a string."""
return str
class InfoTagVideo(object):
def getDirector(self):
"""Returns a string."""
return str
def getWritingCredits(self):
"""Returns a string."""
return str
def getGenre(self):
"""Returns a string."""
return str
def getTagLine(self):
"""Returns a string."""
return str
def getPlotOutline(self):
"""Returns a string."""
return str
def getPlot(self):
"""Returns a string."""
return str
def getPictureURL(self):
"""Returns a string."""
return str
def getTitle(self):
"""Returns a string."""
return str
def getOriginalTitle(self):
"""Returns a string."""
return str
def getVotes(self):
"""Returns a string."""
return str
def getCast(self):
"""Returns a string."""
return str
def getFile(self):
"""Returns a string."""
return str
def getPath(self):
"""Returns a string."""
return str
def getIMDBNumber(self):
"""Returns a string."""
return str
def getYear(self):
"""Returns an integer."""
return int
def getPremiered(self):
"""Returns a string."""
return str
def getFirstAired(self):
"""Returns a string."""
return str
def getRating(self):
"""Returns a float."""
return float
def getPlayCount(self):
"""Returns an integer."""
return int
def getLastPlayed(self):
"""Returns a string."""
return str
class Monitor(object):
"""
Monitor class.
Monitor() -- Creates a new Monitor to notify addon about changes.
"""
def onAbortRequested(self):
"""
Deprecated!
"""
pass
def onDatabaseUpdated(self, database):
"""
Deprecated!
"""
pass
def onScreensaverActivated(self):
"""
onScreensaverActivated() -- onScreensaverActivated method.
Will be called when screensaver kicks in
"""
pass
def onScreensaverDeactivated(self):
"""
onScreensaverDeactivated() -- onScreensaverDeactivated method.
Will be called when screensaver goes off
"""
pass
def onSettingsChanged(self):
"""
onSettingsChanged() -- onSettingsChanged method.
Will be called when addon settings are changed
"""
pass
def onDatabaseScanStarted(self, database):
"""
Deprecated!
"""
pass
def onNotification(self, sender, method, data):
"""
onNotification(sender, method, data) -- onNotification method.
sender : str - sender of the notification
method : str - name of the notification
data : str - JSON-encoded data of the notification
Will be called when XBMC receives or sends a notification
"""
pass
def onCleanStarted(self, library):
"""
onCleanStarted(library) -- onCleanStarted method.
library : video/music as string
Will be called when library clean has started
and return video or music to indicate which library is being cleaned
"""
pass
def onCleanFinished(self, library):
"""
onCleanFinished(library)--onCleanFinished method.
library : video/music as string
Will be called when library clean has ended
and return video or music to indicate which library has been cleaned
"""
pass
def onDPMSActivated(self):
"""
onDPMSActivated() --onDPMSActivated method.
Will be called when energysaving/DPMS gets active
"""
pass
def onDPMSDeactivated(self):
"""
onDPMSDeactivated() --onDPMSDeactivated method.
Will be called when energysaving/DPMS is turned off
"""
pass
def onScanFinished(self, library):
"""
onScanFinished(library)--onScanFinished method.
library : video/music as string
Will be called when library scan has ended
and return video or music to indicate which library has been scanned
"""
pass
def onScanStarted(self, library):
"""
onScanStarted(library)--onScanStarted method.
library : video/music as string
Will be called when library scan has started
and return video or music to indicate which library is being scanned
"""
pass
def waitForAbort(self, timeout):
"""
waitForAbort([timeout]) -- Block until abort is requested, or until timeout occurs.
If an abort requested have already been made, return immediately.
Returns True when abort have been requested, False if a timeout is given and the operation times out.
:param timeout: float - (optional) timeout in seconds. Default: no timeout.
:return: bool
"""
return bool
def abortRequested(self):
"""
Returns True if abort has been requested.
"""
return bool
class RenderCapture(object):
def capture(self, width, height, flags=0):
"""
capture(width, height [, flags])--issue capture request.
width : Width capture image should be rendered to
height : Height capture image should should be rendered to
flags : Optional. Flags that control the capture processing.
The value for 'flags' could be or'ed from the following constants:
- xbmc.CAPTURE_FLAG_CONTINUOUS : after a capture is done, issue a new capture request immediately
- xbmc.CAPTURE_FLAG_IMMEDIATELY : read out immediately whencapture() is called, this can cause a busy wait
"""
pass
def getAspectRatio(self):
"""
getAspectRatio() --returns aspect ratio of currently displayed video as a float number.
"""
return float
def getCaptureState(self):
"""
getCaptureState() --returns processing state of capture request.
The returned value could be compared against the following constants:
- xbmc.CAPTURE_STATE_WORKING : Capture request in progress.
- xbmc.CAPTURE_STATE_DONE : Capture request done. The image could be retrieved withgetImage()
- xbmc.CAPTURE_STATE_FAILED : Capture request failed.
"""
return int
def getHeight(self):
"""
getHeight() --returns height of captured image.
"""
return int
def getImage(self):
"""
getImage() --returns captured image as a bytearray.
The size of the image isgetWidth() *getHeight() * 4
"""
return bytearray
def getImageFormat(self):
"""
getImageFormat() --returns format of captured image: 'BGRA' or 'RGBA'.
"""
return str
def getWidth(self):
"""
getWidth() --returns width of captured image.
"""
return int
def waitForCaptureStateChangeEvent(self, msecs=0):
"""
waitForCaptureStateChangeEvent([msecs])--wait for capture state change event.
msecs : Milliseconds to wait. Waits forever if not specified.
The method will return 1 if the Event was triggered. Otherwise it will return 0.
"""
return int
#noinspection PyUnusedLocal
def audioResume():
"""
audioResume()--Resume Audio engine.
example: xbmc.audioResume()
"""
pass
def audioSuspend():
"""
audioSuspend()--Suspend Audio engine.
example:
- xbmc.audioSuspend()
"""
pass
def convertLanguage(language, format):
"""
convertLanguage(language, format)--Returns the given language converted to the given format as a string.
language: string either as name in English, two letter code (ISO 639-1), or three letter code (ISO 639-2/T(B)
format: format of the returned language string
xbmc.ISO_639_1: two letter code as defined in ISO 639-1
xbmc.ISO_639_2: three letter code as defined in ISO 639-2/T or ISO 639-2/B
xbmc.ENGLISH_NAME: full language name in English (default)
example:
- language = xbmc.convertLanguage(English, xbmc.ISO_639_2)
"""
return str
def enableNavSounds(yesNo):
"""
enableNavSounds(yesNo)--Enables/Disables nav sounds
yesNo : integer - enable (True) or disable (False) nav sounds
example:
- xbmc.enableNavSounds(True)
"""
pass
def executeJSONRPC(jsonrpccommand):
"""
executeJSONRPC(jsonrpccommand)--Execute an JSONRPC command.
jsonrpccommand : string - jsonrpc command to execute.
List of commands - http://wiki.xbmc.org/?title=JSON-RPC_API
example:
- response = xbmc.executeJSONRPC('{ "jsonrpc": "2.0", "method": "JSONRPC.Introspect", "id": 1 }')
"""
return str
def executebuiltin(function, wait=False):
"""
executebuiltin(function)--Execute a built in XBMC function.
function : string - builtin function to execute.
List of functions - http://wiki.xbmc.org/?title=List_of_Built_In_Functions
example:
- xbmc.executebuiltin('XBMC.RunXBE(c:\avalaunch.xbe)')
"""
pass
def executescript(script):
"""
executescript(script)--Execute a python script.
script : string - script filename to execute.
example:
- xbmc.executescript('special://home/scripts/update.py')
"""
pass
def getCacheThumbName(path):
"""
getCacheThumbName(path)--Returns a thumb cache filename.
path : string or unicode - path to file
example:
- thumb = xbmc.getCacheThumbName('f:\videos\movie.avi')
"""
return str
def getCleanMovieTitle(path, usefoldername=False):
"""
getCleanMovieTitle(path[, usefoldername])--Returns a clean movie title and year string if available.
path : string or unicode - String to clean
bool : [opt] bool - use folder names (defaults to false)
example:
- title, year = xbmc.getCleanMovieTitle('/path/to/moviefolder/test.avi', True)
"""
return tuple
def getCondVisibility(condition):
"""
getCondVisibility(condition)--Returns True (1) or False (0) as a bool.
condition : string - condition to check.
List of Conditions -http://wiki.xbmc.org/?title=List_of_Boolean_Conditions
*Note, You can combine two (or more) of the above settings by using "+" as an AND operator,
"|" as an OR operator, "!" as a NOT operator, and "[" and "]" to bracket expressions.
example:
- visible = xbmc.getCondVisibility('[Control.IsVisible(41) + !Control.IsVisible(12)]')
"""
return bool
def getDVDState():
"""
getDVDState()--Returns the dvd state as an integer.
return values are:
- 1 : xbmc.DRIVE_NOT_READY
- 16 : xbmc.TRAY_OPEN
- 64 : xbmc.TRAY_CLOSED_NO_MEDIA
- 96 : xbmc.TRAY_CLOSED_MEDIA_PRESENT
example:
- dvdstate = xbmc.getDVDState()
"""
return long
def getFreeMem():
"""
getFreeMem()--Returns the amount of free memory in MB as an integer.
example:
- freemem = xbmc.getFreeMem()
"""
return long
def getGlobalIdleTime():
"""
getGlobalIdleTime()--Returns the elapsed idle time in seconds as an integer.
example:
- t = xbmc.getGlobalIdleTime()
"""
return long
def getIPAddress():
"""
getIPAddress()--Returns the current ip address as a string.
example:
- ip = xbmc.getIPAddress()
"""
return str
def getInfoImage(infotag):
"""
getInfoImage(infotag)--Returns a filename including path to the InfoImage's thumbnail as a string.
infotag : string - infotag for value you want returned.
List of InfoTags -http://wiki.xbmc.org/?title=InfoLabels
example:
- filename = xbmc.getInfoImage('Weather.Conditions')
"""
return str
def getInfoLabel(cLine):
"""
getInfoLabel(infotag)--Returns an InfoLabel as a string.
infotag : string - infoTag for value you want returned.
List of InfoTags -http://wiki.xbmc.org/?title=InfoLabels
example:
- label = xbmc.getInfoLabel('Weather.Conditions')
"""
return str
def getLanguage(format=ENGLISH_NAME, region=False):
"""
getLanguage([format], [region])--Returns the active language as a string.
format: [opt] format of the returned language string
- xbmc.ISO_639_1: two letter code as defined in ISO 639-1
- xbmc.ISO_639_2: three letter code as defined in ISO 639-2/T or ISO 639-2/B
- xbmc.ENGLISH_NAME: full language name in English (default)
region: [opt] append the region delimited by "-" of the language (setting) to the returned language string
example:
- language = xbmc.getLanguage(xbmc.ENGLISH_NAME)
"""
return str
def getLocalizedString(id):
"""
getLocalizedString(id)--Returns a localized 'unicode string'.
id : integer - id# for string you want to localize.
*Note, See strings.po in language folders for which id
you need for a string.
example:
- locstr = xbmc.getLocalizedString(6)
"""
return unicode
def getRegion(id):
"""
getRegion(id)--Returns your regions setting as a string for the specified id.
id : string - id of setting to return
*Note, choices are (dateshort, datelong, time, meridiem, tempunit, speedunit)You can use the above as keywords for arguments.
example:
- date_long_format = xbmc.getRegion('datelong')
"""
return str
def getSkinDir():
"""
getSkinDir()--Returns the active skin directory as a string.
*Note, This is not the full path like 'special://home/addons/MediaCenter', but only 'MediaCenter'.
example:
- skindir = xbmc.getSkinDir()
"""
return str
def getSupportedMedia(mediaType):
"""
getSupportedMedia(media)--Returns the supported file types for the specific media as a string.
media : string - media type
*Note, media type can be (video, music, picture).The return value is a pipe separated string of filetypes (eg. '.mov|.avi').
You can use the above as keywords for arguments.
example:
- mTypes = xbmc.getSupportedMedia('video')
"""
return str
def log(msg, level=LOGNOTICE):
"""
log(msg[, level])--Write a string to XBMC's log file and the debug window.
msg : string - text to output.
level : [opt] integer - log level to ouput at. (default=LOGNOTICE)
*Note, You can use the above as keywords for arguments and skip certain optional arguments.
Once you use a keyword, all following arguments require the keyword.
Text is written to the log for the following conditions.
XBMC loglevel == -1 (NONE, nothing at all is logged)
XBMC loglevel == 0 (NORMAL, shows LOGNOTICE, LOGERROR, LOGSEVERE and LOGFATAL) * XBMC loglevel == 1 (DEBUG, shows all)
See pydocs for valid values for level.
example:
- xbmc.output(msg='This is a test string.', level=xbmc.LOGDEBUG));
"""
pass
def makeLegalFilename(filename, fatX=True):
"""
makeLegalFilename(filename[, fatX])--Returns a legal filename or path as a string.
filename : string or unicode - filename/path to make legal
fatX : [opt] bool - True=Xbox file system(Default)
*Note, If fatX is true you should pass a full path. If fatX is false only pass the basename of the path.
You can use the above as keywords for arguments and skip certain optional arguments.
Once you use a keyword, all following arguments require the keyword.
example:
- filename = xbmc.makeLegalFilename('F: Age: The Meltdown.avi')
"""
return str
def playSFX(filename, useCached=True):
"""
playSFX(filename,[useCached])--Plays a wav file by filename
filename : string - filename of the wav file to play.
useCached : [opt] bool - False = Dump any previously cached wav associated with filename
example:
- xbmc.playSFX('special://xbmc/scripts/dingdong.wav')
- xbmc.playSFX('special://xbmc/scripts/dingdong.wav',False)
"""
pass
def stopSFX():
"""
stopSFX() -- Stops wav file
example:
- xbmc.stopSFX()
"""
def restart():
"""
restart()--Restart the htpc. example:
- xbmc.restart()
"""
pass
def shutdown():
"""
Shutdown()--Shutdown the htpc.
example:
- xbmc.shutdown()
"""
pass
def skinHasImage(image):
"""
skinHasImage(image)--Returns True if the image file exists in the skin.
image : string - image filename
*Note, If the media resides in a subfolder include it.
(eg. home-myfiles\home-myfiles2.png)You can use the above as keywords for arguments.
example:
- exists = xbmc.skinHasImage('ButtonFocusedTexture.png')
"""
return bool
def sleep(timemillis):
"""
sleep(time)--Sleeps for 'time' msec.
time : integer - number of msec to sleep.
*Note, This is useful if you have for example aPlayer class that is waiting
for onPlayBackEnded() calls.
Throws: PyExc_TypeError, if time is not an integer.
example:
- xbmc.sleep(2000) # sleeps for 2 seconds
"""
pass
def startServer(iTyp, bStart, bWait=False):
"""
startServer(typ, bStart, bWait)--start or stop a server.
typ : integer - use SERVER_* constants
bStart : bool - start (True) or stop (False) a server
bWait : [opt] bool - wait on stop before returning (not supported by all servers)
returnValue : bool - True or False
example:
- xbmc.startServer(xbmc.SERVER_AIRPLAYSERVER, False)
"""
pass
def translatePath(path):
"""
translatePath(path)--Returns the translated path.
path : string or unicode - Path to format
*Note, Only useful if you are coding for both Linux and Windows.
e.g. Converts 'special://masterprofile/script_data' -> '/home/user/XBMC/UserData/script_data' on Linux.
example:
- fpath = xbmc.translatePath('special://masterprofile/script_data')
"""
return unicode
def validatePath(path):
"""
validatePath(path)--Returns the validated path.
path : string or unicode - Path to format
*Note, Only useful if you are coding for both Linux and Windows for fixing slash problems.
e.g. Corrects 'Z://something' -> 'Z:'
example:
- fpath = xbmc.validatePath(somepath)
"""
return unicode
| mit | -5,947,889,436,594,857,000 | 25.545079 | 129 | 0.615119 | false |
vIiRuS/Lagerregal | mail/tests.py | 1 | 2753 | import unittest
from django.test.client import Client
from django.test import TestCase
from django.urls import reverse
from django.contrib.contenttypes.models import ContentType
from model_mommy import mommy
from mail.models import MailTemplate, MailTemplateRecipient, MailHistory
from users.models import Lageruser
class TestMailTemplate(TestCase):
def setUp(self):
self.client = Client()
Lageruser.objects.create_superuser("test", "[email protected]", "test")
self.client.login(username="test", password="test")
def test_template_creation(self):
template = mommy.make(MailTemplate)
self.assertEqual(str(template), template.name)
self.assertEqual(template.get_absolute_url(), reverse('mail-detail', kwargs={'pk': template.pk}))
self.assertEqual(template.get_edit_url(), reverse('mail-edit', kwargs={'pk': template.pk}))
def test_list_view(self):
response = self.client.get('/mails/')
self.assertEqual(response.status_code, 200)
def test_create_view(self):
response = self.client.get('/mails/add')
self.assertEqual(response.status_code, 200)
def test_detail_view(self):
template = mommy.make(MailTemplate)
response = self.client.get('/mails/view/%i' % template.pk)
self.assertEqual(response.status_code, 200)
def test_update_view(self):
template = mommy.make(MailTemplate)
response = self.client.get('/mails/edit/%i' % template.pk)
self.assertEqual(response.status_code, 200)
def test_delete_view(self):
template = mommy.make(MailTemplate)
response = self.client.get('/mails/delete/%i' % template.pk)
self.assertEqual(response.status_code, 200)
class TestMailTemplateRecipient(TestCase):
def setUp(self):
self.client = Client()
Lageruser.objects.create_superuser("test", "[email protected]", "test")
self.client.login(username="test", password="test")
@unittest.skip("first figuring out strange behaviour of content_object")
def test_template_creation(self):
con = mommy.make(ContentType)
rec = mommy.make(MailTemplateRecipient, content_type=con)
self.assertEqual(str(rec), str(rec.content_type.name + ": " + str(rec.content_object)))
class TestMailHistory(TestCase):
def setUp(self):
self.client = Client()
Lageruser.objects.create_superuser("test", "[email protected]", "test")
self.client.login(username="test", password="test")
@unittest.skip("find out why url does not exist")
def test_mail_history_creation(self):
hist = mommy.make(MailHistory)
self.assertEqual(hist.get_absolute_url(), reverse('mailhistory-detail', kwargs={'pk': hist.pk}))
| bsd-3-clause | 2,042,978,328,537,801,700 | 37.236111 | 105 | 0.682528 | false |
elopio/snapcraft | tests/integration/plugins/test_rust_plugin.py | 1 | 5455 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Marius Gripsgard ([email protected])
# Copyright (C) 2017-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
import fixtures
import testscenarios
import yaml
from testtools.matchers import Equals, FileExists, MatchesRegex, Not
from tests import integration
from tests.matchers import HasArchitecture
class RustPluginBaseTestCase(integration.TestCase):
def run_snapcraft(self, command, project_dir=None, debug=True):
try:
failed = True
super().run_snapcraft(command, project_dir, debug)
failed = False
except subprocess.CalledProcessError:
if self.deb_arch == 'arm64':
# https://github.com/rust-lang/rustup.sh/issues/82
self.expectFailure(
'The rustup script does not support arm64.',
self.assertFalse, failed)
else:
raise
class RustPluginTestCase(RustPluginBaseTestCase):
def test_stage_rust_plugin(self):
self.run_snapcraft('stage', 'rust-hello')
binary_output = self.get_output_ignoring_non_zero_exit(
os.path.join(self.stage_dir, 'bin', 'rust-hello'))
self.assertThat(binary_output, Equals('There is rust on snaps!\n'))
def test_stage_rust_with_revision(self):
self.run_snapcraft('stage', 'rust-with-revision')
binary_output = self.get_output_ignoring_non_zero_exit(
os.path.join(self.stage_dir, 'bin', 'rust-with-revision'))
self.assertIn('Rust revision: 1.12.0', binary_output)
def test_stage_rust_plugin_with_conditional_feature(self):
self.run_snapcraft('stage', 'rust-with-conditional')
binary_output = self.get_output_ignoring_non_zero_exit(
os.path.join(self.stage_dir, 'bin', 'simple-rust'))
self.assertThat(binary_output, Equals('Conditional features work!\n'))
def test_stage_rust_with_source_subdir(self):
self.run_snapcraft('stage', 'rust-subdir')
binary_output = self.get_output_ignoring_non_zero_exit(
os.path.join(self.stage_dir, 'bin', 'rust-subdir'))
self.assertThat(
binary_output,
Equals('Rust in a subdirectory works\n'))
# Test for bug https://bugs.launchpad.net/snapcraft/+bug/1654764
self.assertThat('Cargo.lock', Not(FileExists()))
def test_stage_rust_with_source_and_source_subdir(self):
self.copy_project_to_cwd('rust-subdir')
with open('snapcraft.yaml') as snapcraft_yaml_file:
snapcraft_yaml = yaml.load(snapcraft_yaml_file)
snapcraft_yaml['parts']['rust-subdir']['source'] = '.'
snapcraft_yaml['parts']['rust-subdir']['source-subdir'] = 'subdir'
with open('snapcraft.yaml', 'w') as snapcraft_yaml_file:
yaml.dump(snapcraft_yaml, snapcraft_yaml_file)
self.run_snapcraft('pull')
self.assertThat(
os.path.join('parts', 'rust-subdir', 'src', 'subdir',
'Cargo.lock'), FileExists())
def test_cross_compiling(self):
if self.deb_arch != 'amd64':
self.skipTest('The test only handles amd64 to arm64')
self.run_snapcraft(['build', '--target-arch=arm64'],
'rust-hello')
binary = os.path.join(self.parts_dir, 'rust-hello', 'install', 'bin',
'rust-hello')
self.assertThat(binary, HasArchitecture('aarch64'))
class RustPluginConfinementTestCase(testscenarios.WithScenarios,
RustPluginBaseTestCase):
scenarios = (
('classic', dict(confinement='classic',
startswith='/snap/')),
('strict', dict(confinement='strict',
startswith='/lib')),
)
def _set_confinement(self, snapcraft_yaml_file):
with open(snapcraft_yaml_file) as f:
snapcraft_yaml = yaml.load(f)
snapcraft_yaml['confinement'] = self.confinement
with open(snapcraft_yaml_file, 'w') as f:
yaml.dump(snapcraft_yaml, f)
def test_prime(self):
if os.environ.get('ADT_TEST') and self.deb_arch == 'armhf':
self.skipTest("The autopkgtest armhf runners can't install snaps")
self.useFixture(fixtures.EnvironmentVariable(
'SNAPCRAFT_SETUP_CORE', '1'))
self.copy_project_to_cwd('rust-hello')
self._set_confinement('snapcraft.yaml')
self.run_snapcraft('prime')
bin_path = os.path.join('prime', 'bin', 'rust-hello')
interpreter = subprocess.check_output([
self.patchelf_command, '--print-interpreter', bin_path]).decode()
expected_interpreter = r'^{}.*'.format(self.startswith)
self.assertThat(interpreter, MatchesRegex(expected_interpreter))
| gpl-3.0 | -8,654,411,118,230,419,000 | 38.244604 | 78 | 0.631714 | false |
rghe/ansible | lib/ansible/modules/storage/netapp/na_ontap_lun.py | 6 | 13348 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: na_ontap_lun
short_description: Manage NetApp ONTAP luns
extends_documentation_fragment:
- netapp.na_ontap
version_added: '2.6'
author: NetApp Ansible Team ([email protected])
description:
- Create, destroy, resize luns on NetApp ONTAP.
options:
state:
description:
- Whether the specified lun should exist or not.
choices: ['present', 'absent']
default: present
name:
description:
- The name of the lun to manage.
required: true
flexvol_name:
description:
- The name of the FlexVol the lun should exist on.
required: true
size:
description:
- The size of the lun in C(size_unit).
- Required when C(state=present).
size_unit:
description:
- The unit used to interpret the size parameter.
choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb']
default: 'gb'
force_resize:
description:
Forcibly reduce the size. This is required for reducing the size of the LUN to avoid accidentally
reducing the LUN size.
type: bool
default: false
force_remove:
description:
- If "true", override checks that prevent a LUN from being destroyed if it is online and mapped.
- If "false", destroying an online and mapped LUN will fail.
type: bool
default: false
force_remove_fenced:
description:
- If "true", override checks that prevent a LUN from being destroyed while it is fenced.
- If "false", attempting to destroy a fenced LUN will fail.
- The default if not specified is "false". This field is available in Data ONTAP 8.2 and later.
type: bool
default: false
vserver:
required: true
description:
- The name of the vserver to use.
ostype:
description:
- The os type for the LUN.
default: 'image'
space_reserve:
description:
- This can be set to "false" which will create a LUN without any space being reserved.
type: bool
default: True
space_allocation:
description:
- This enables support for the SCSI Thin Provisioning features. If the Host and file system do
not support this do not enable it.
type: bool
default: False
version_added: '2.7'
'''
EXAMPLES = """
- name: Create LUN
na_ontap_lun:
state: present
name: ansibleLUN
flexvol_name: ansibleVolume
vserver: ansibleVServer
size: 5
size_unit: mb
ostype: linux
space_reserve: True
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
- name: Resize Lun
na_ontap_lun:
state: present
name: ansibleLUN
force_resize: True
flexvol_name: ansibleVolume
vserver: ansibleVServer
size: 5
size_unit: gb
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppOntapLUN(object):
def __init__(self):
self._size_unit_map = dict(
bytes=1,
b=1,
kb=1024,
mb=1024 ** 2,
gb=1024 ** 3,
tb=1024 ** 4,
pb=1024 ** 5,
eb=1024 ** 6,
zb=1024 ** 7,
yb=1024 ** 8
)
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, choices=['present', 'absent'], default='present'),
name=dict(required=True, type='str'),
size=dict(type='int'),
size_unit=dict(default='gb',
choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb',
'pb', 'eb', 'zb', 'yb'], type='str'),
force_resize=dict(default=False, type='bool'),
force_remove=dict(default=False, type='bool'),
force_remove_fenced=dict(default=False, type='bool'),
flexvol_name=dict(required=True, type='str'),
vserver=dict(required=True, type='str'),
ostype=dict(required=False, type='str', default='image'),
space_reserve=dict(required=False, type='bool', default=True),
space_allocation=dict(required=False, type='bool', default=False),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['size'])
],
supports_check_mode=True
)
parameters = self.module.params
# set up state variables
self.state = parameters['state']
self.name = parameters['name']
self.size_unit = parameters['size_unit']
if parameters['size'] is not None:
self.size = parameters['size'] * self._size_unit_map[self.size_unit]
else:
self.size = None
self.force_resize = parameters['force_resize']
self.force_remove = parameters['force_remove']
self.force_remove_fenced = parameters['force_remove_fenced']
self.flexvol_name = parameters['flexvol_name']
self.vserver = parameters['vserver']
self.ostype = parameters['ostype']
self.space_reserve = parameters['space_reserve']
self.space_allocation = parameters['space_allocation']
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=self.vserver)
def get_lun(self):
"""
Return details about the LUN
:return: Details about the lun
:rtype: dict
"""
luns = []
tag = None
while True:
lun_info = netapp_utils.zapi.NaElement('lun-get-iter')
if tag:
lun_info.add_new_child('tag', tag, True)
query_details = netapp_utils.zapi.NaElement('lun-info')
query_details.add_new_child('vserver', self.vserver)
query_details.add_new_child('volume', self.flexvol_name)
query = netapp_utils.zapi.NaElement('query')
query.add_child_elem(query_details)
lun_info.add_child_elem(query)
result = self.server.invoke_successfully(lun_info, True)
if result.get_child_by_name('num-records') and int(result.get_child_content('num-records')) >= 1:
attr_list = result.get_child_by_name('attributes-list')
luns.extend(attr_list.get_children())
tag = result.get_child_content('next-tag')
if tag is None:
break
# The LUNs have been extracted.
# Find the specified lun and extract details.
return_value = None
for lun in luns:
path = lun.get_child_content('path')
_rest, _splitter, found_name = path.rpartition('/')
if found_name == self.name:
size = lun.get_child_content('size')
# Find out if the lun is attached
attached_to = None
lun_id = None
if lun.get_child_content('mapped') == 'true':
lun_map_list = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-map-list-info', **{'path': path})
result = self.server.invoke_successfully(
lun_map_list, enable_tunneling=True)
igroups = result.get_child_by_name('initiator-groups')
if igroups:
for igroup_info in igroups.get_children():
igroup = igroup_info.get_child_content(
'initiator-group-name')
attached_to = igroup
lun_id = igroup_info.get_child_content('lun-id')
return_value = {
'name': found_name,
'size': size,
'attached_to': attached_to,
'lun_id': lun_id
}
else:
continue
return return_value
def create_lun(self):
"""
Create LUN with requested name and size
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_create = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-create-by-size', **{'path': path,
'size': str(self.size),
'ostype': self.ostype,
'space-reservation-enabled': str(self.space_reserve),
'space-allocation-enabled': str(self.space_allocation)})
try:
self.server.invoke_successfully(lun_create, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as e:
self.module.fail_json(msg="Error provisioning lun %s of size %s: %s" % (self.name, self.size, to_native(e)),
exception=traceback.format_exc())
def delete_lun(self):
"""
Delete requested LUN
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-destroy', **{'path': path,
'force': str(self.force_remove),
'destroy-fenced-lun':
str(self.force_remove_fenced)})
try:
self.server.invoke_successfully(lun_delete, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as e:
self.module.fail_json(msg="Error deleting lun %s: %s" % (path, to_native(e)),
exception=traceback.format_exc())
def resize_lun(self):
"""
Resize requested LUN.
:return: True if LUN was actually re-sized, false otherwise.
:rtype: bool
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_resize = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-resize', **{'path': path,
'size': str(self.size),
'force': str(self.force_resize)})
try:
self.server.invoke_successfully(lun_resize, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as e:
if to_native(e.code) == "9042":
# Error 9042 denotes the new LUN size being the same as the
# old LUN size. This happens when there's barely any difference
# in the two sizes. For example, from 8388608 bytes to
# 8194304 bytes. This should go away if/when the default size
# requested/reported to/from the controller is changed to a
# larger unit (MB/GB/TB).
return False
else:
self.module.fail_json(msg="Error resizing lun %s: %s" % (path, to_native(e)),
exception=traceback.format_exc())
return True
def apply(self):
property_changed = False
size_changed = False
lun_exists = False
netapp_utils.ems_log_event("na_ontap_lun", self.server)
lun_detail = self.get_lun()
if lun_detail:
lun_exists = True
current_size = lun_detail['size']
if self.state == 'absent':
property_changed = True
elif self.state == 'present':
if not int(current_size) == self.size:
size_changed = True
property_changed = True
else:
if self.state == 'present':
property_changed = True
if property_changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
if not lun_exists:
self.create_lun()
else:
if size_changed:
# Ensure that size was actually changed. Please
# read notes in 'resize_lun' function for details.
size_changed = self.resize_lun()
if not size_changed:
property_changed = False
elif self.state == 'absent':
self.delete_lun()
changed = property_changed or size_changed
# TODO: include other details about the lun (size, etc.)
self.module.exit_json(changed=changed)
def main():
v = NetAppOntapLUN()
v.apply()
if __name__ == '__main__':
main()
| gpl-3.0 | -1,568,279,743,045,698,000 | 31.876847 | 120 | 0.548247 | false |
laserson/hdfs | test/test_util.py | 1 | 2041 | #!/usr/bin/env python
# encoding: utf-8
"""Test Hdfs client interactions with HDFS."""
from hdfs.util import *
from nose.tools import eq_, raises
import os
class TestConfig(object):
def test_rcpath(self):
rcpath = os.getenv('HDFSCLI_RCPATH')
try:
with temppath() as tpath:
os.environ['HDFSCLI_RCPATH'] = tpath
with open(tpath, 'w') as writer:
writer.write('[foo]\nbar=hello')
eq_(Config().parser.get('foo', 'bar'), 'hello')
finally:
if rcpath:
os['HDFSCLI_RCPATH'] = rcpath
else:
os.unsetenv('HDFSCLI_RCPATH')
def test_get_alias(self):
with temppath() as tpath:
with open(tpath, 'w') as writer:
writer.write('[foo_alias]\nurl=1\nauth=k\nroot=2\n')
config = Config(tpath)
eq_(
config.get_alias('foo'),
{'url': '1', 'auth': 'k', 'root': '2'}
)
def test_get_alias_defaults(self):
with temppath() as tpath:
with open(tpath, 'w') as writer:
writer.write('[foo_alias]\nurl=1\n')
config = Config(tpath)
eq_(
config.get_alias('foo'),
{'url': '1'},
)
@raises(HdfsError)
def test_missing_alias(self):
with temppath() as tpath:
with open(tpath, 'w') as writer:
writer.write('[foo_alias]\nurl=1\n')
Config(tpath).get_alias('bar')
def test_parse_boolean(self):
eq_(Config.parse_boolean(True), True)
eq_(Config.parse_boolean(False), False)
eq_(Config.parse_boolean(''), False)
eq_(Config.parse_boolean('False'), False)
eq_(Config.parse_boolean('true'), True)
eq_(Config.parse_boolean('yes'), True)
eq_(Config.parse_boolean(None), False)
class TestHuman(object):
def test_hsize(self):
eq_(hsize(0), ' 0 B')
eq_(hsize(1023), '1023 B')
eq_(hsize(1024), ' 1kB')
def test_htime(self):
eq_(htime(0), ' 0.0s')
eq_(htime(50), '50.0s')
eq_(htime(60), ' 1.0m')
eq_(htime(90), ' 1.5m')
eq_(htime(3600), ' 1.0h')
eq_(htime(3600 * 24 * 7 * 4 * 12 * 24), '24.0Y')
| mit | -8,508,430,482,940,297,000 | 25.506494 | 60 | 0.571779 | false |
fwpz/WeiPython | wechat/wechatUtil.py | 2 | 3806 | # -*- coding:utf-8 -*-
"""
# Author: Pegasus Wang ([email protected], http://ningning.today)
# Created Time : Wed Feb 18 18:18:10 2015
# File Name: wechatUtil.py
# Description:
# :copyright: (c) 2015 by Pegasus Wang.
# :license: MIT, see LICENSE for more details.
"""
import hashlib
from lxml import etree
def checkSignature(request):
"""check signature.
:param request: get method
:return: if success return True else False
"""
signature = request.GET.get(u'signature', None)
timestamp = request.GET.get(u'timestamp', None)
nonce = request.GET.get(u'nonce', None)
token = u'pegasuswang' # your wechat token
tmplist = [token, timestamp, nonce]
tmplist.sort()
tmpstr = '%s%s%s' % tuple(tmplist)
tmpstr = hashlib.sha1(tmpstr).hexdigest()
if tmpstr == signature:
return True
else:
return False
class MessageUtil(object):
"""MessageUtil has some methods to process message."""
# request message types
REQ_MESSAGE_TYPE_TEXT = u'text'
REQ_MESSAGE_TYPE_IMAGE = u'image'
REQ_MESSAGE_TYPE_VOICE = u'voice'
REQ_MESSAGE_TYPE_VIDEO = u'video'
REQ_MESSAGE_TYPE_LOCATION = u'location'
REQ_MESSAGE_TYPE_LINK = u'link'
REQ_MESSAGE_TYPE_EVENT = u'event'
# event types
EVENT_TYPE_SUBSCRIBE = u'subscribe'
EVENT_TYPE_UNSUBSCRIBE = u'unsubscribe'
EVENT_TYPE_SCAN = u'scan'
EVENT_TYPE_LOCATION = u'LOCATION'
EVENT_TYPE_CLICK = u'CLICK'
# reply message types
RESP_MESSAGE_TYPE_TEXT = u'text'
RESP_MESSAGE_TYPE_IMAGE = u'image'
RESP_MESSAGE_TYPE_VOICE = u'voice'
RESP_MESSAGE_TYPE_VIDEO = u'video'
RESP_MESSAGE_TYPE_MUSIC = u'music'
RESP_MESSAGE_TYPE_NEWS = u'news'
# message types
MESSAGETYPE = [u'Image', u'Voice', u'Video', u'Music', u'Articles']
@staticmethod
def parseXml(request):
"""parse request post xml message.
:param request: post request
:return: dict of xml message
"""
raw_xml = request.body.decode(u'UTF-8')
xmlstr = etree.fromstring(raw_xml)
dict_xml = {}
for child in xmlstr:
dict_xml[child.tag] = child.text.encode(u'UTF-8') # note
return dict_xml
@staticmethod
def class2xml(obj):
"""convert reply message class to xml.
:param obj: reply message class' object
:return: xml of the object
"""
root = etree.Element(u'xml')
for key, value in vars(obj).items():
if key in MessageUtil.MESSAGETYPE:
tmproot = etree.SubElement(root, key)
if key == u'Articles': # solve Article, it's special
for eachArticle in value:
etree.SubElement(tmproot, u'item')
for tmpkey, tmpvalue in vars(eachArticle).items():
tmpkey_ele = etree.SubElement(tmproot, tmpkey)
tmpkey_ele.text = etree.CDATA(unicode(tmpvalue))
else:
for tmpkey, tmpvalue in vars(obj.__getattribute__(key)).items():
tmpkey_ele = etree.SubElement(tmproot, tmpkey)
if u'time' in tmpkey.lower() or u'count' in tmpkey.lower():
tmpkey_ele.text = unicode(tmpvalue)
else: # CDATA tag for str
tmpkey_ele.text = etree.CDATA(unicode(tmpvalue))
else:
if u'time' in key.lower() or u'count' in key.lower():
etree.SubElement(root, key).text = unicode(value)
else:
etree.SubElement(root, key).text = etree.CDATA(unicode(value))
return etree.tostring(root, pretty_print=True, xml_declaration=False, encoding=u'utf-8')
| mit | -5,027,645,883,042,683,000 | 32.385965 | 96 | 0.588019 | false |
citrix-openstack-build/glance | glance/db/sqlalchemy/api.py | 1 | 39768 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2010-2011 OpenStack LLC.
# Copyright 2012 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Defines interface for DB access
"""
import logging
import time
from oslo.config import cfg
import sqlalchemy
import sqlalchemy.orm as sa_orm
import sqlalchemy.sql as sa_sql
from glance.common import exception
from glance.db.sqlalchemy import migration
from glance.db.sqlalchemy import models
import glance.openstack.common.log as os_logging
from glance.openstack.common import timeutils
_ENGINE = None
_MAKER = None
_MAX_RETRIES = None
_RETRY_INTERVAL = None
BASE = models.BASE
sa_logger = None
LOG = os_logging.getLogger(__name__)
STATUSES = ['active', 'saving', 'queued', 'killed', 'pending_delete',
'deleted']
sql_connection_opt = cfg.StrOpt('sql_connection',
default='sqlite:///glance.sqlite',
secret=True,
metavar='CONNECTION',
help=_('A valid SQLAlchemy connection '
'string for the registry database. '
'Default: %(default)s'))
db_opts = [
cfg.IntOpt('sql_idle_timeout', default=3600,
help=_('Period in seconds after which SQLAlchemy should '
'reestablish its connection to the database.')),
cfg.IntOpt('sql_max_retries', default=60,
help=_('The number of times to retry a connection to the SQL'
'server.')),
cfg.IntOpt('sql_retry_interval', default=1,
help=_('The amount of time to wait (in seconds) before '
'attempting to retry the SQL connection.')),
cfg.BoolOpt('db_auto_create', default=False,
help=_('A boolean that determines if the database will be '
'automatically created.')),
cfg.BoolOpt('sqlalchemy_debug', default=False,
help=_('Enable debug logging in sqlalchemy which prints '
'every query and result'))
]
CONF = cfg.CONF
CONF.register_opt(sql_connection_opt)
CONF.register_opts(db_opts)
CONF.import_opt('debug', 'glance.openstack.common.log')
def add_cli_options():
"""Allows passing sql_connection as a CLI argument."""
# NOTE(flaper87): Find a better place / way for this.
CONF.unregister_opt(sql_connection_opt)
CONF.register_cli_opt(sql_connection_opt)
def _ping_listener(dbapi_conn, connection_rec, connection_proxy):
"""
Ensures that MySQL connections checked out of the
pool are alive.
Borrowed from:
http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f
"""
try:
dbapi_conn.cursor().execute('select 1')
except dbapi_conn.OperationalError as ex:
if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
msg = 'Got mysql server has gone away: %s' % ex
LOG.warn(msg)
raise sqlalchemy.exc.DisconnectionError(msg)
else:
raise
def setup_db_env():
"""
Setup global configuration for database.
"""
global sa_logger, _IDLE_TIMEOUT, _MAX_RETRIES, _RETRY_INTERVAL, _CONNECTION
_IDLE_TIMEOUT = CONF.sql_idle_timeout
_MAX_RETRIES = CONF.sql_max_retries
_RETRY_INTERVAL = CONF.sql_retry_interval
_CONNECTION = CONF.sql_connection
sa_logger = logging.getLogger('sqlalchemy.engine')
if CONF.sqlalchemy_debug:
sa_logger.setLevel(logging.DEBUG)
def clear_db_env():
"""
Unset global configuration variables for database.
"""
global _ENGINE, _MAKER, _MAX_RETRIES, _RETRY_INTERVAL, _CONNECTION
_ENGINE = None
_MAKER = None
_MAX_RETRIES = None
_RETRY_INTERVAL = None
def _check_mutate_authorization(context, image_ref):
if not is_image_mutable(context, image_ref):
LOG.info(_("Attempted to modify image user did not own."))
msg = _("You do not own this image")
if image_ref.is_public:
exc_class = exception.ForbiddenPublicImage
else:
exc_class = exception.Forbidden
raise exc_class(msg)
def _get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session"""
global _MAKER
if not _MAKER:
get_engine()
_get_maker(autocommit, expire_on_commit)
assert(_MAKER)
session = _MAKER()
return session
def get_engine():
"""Return a SQLAlchemy engine."""
"""May assign _ENGINE if not already assigned"""
global _ENGINE, sa_logger, _CONNECTION, _IDLE_TIMEOUT, _MAX_RETRIES,\
_RETRY_INTERVAL
if not _ENGINE:
tries = _MAX_RETRIES
retry_interval = _RETRY_INTERVAL
connection_dict = sqlalchemy.engine.url.make_url(_CONNECTION)
engine_args = {
'pool_recycle': _IDLE_TIMEOUT,
'echo': False,
'convert_unicode': True}
try:
_ENGINE = sqlalchemy.create_engine(_CONNECTION, **engine_args)
if 'mysql' in connection_dict.drivername:
sqlalchemy.event.listen(_ENGINE, 'checkout', _ping_listener)
_ENGINE.connect = _wrap_db_error(_ENGINE.connect)
_ENGINE.connect()
except Exception as err:
msg = _("Error configuring registry database with supplied "
"sql_connection. Got error: %s") % err
LOG.error(msg)
raise
sa_logger = logging.getLogger('sqlalchemy.engine')
if CONF.sqlalchemy_debug:
sa_logger.setLevel(logging.DEBUG)
if CONF.db_auto_create:
LOG.info(_('auto-creating glance registry DB'))
models.register_models(_ENGINE)
try:
migration.version_control()
except exception.DatabaseMigrationError:
# only arises when the DB exists and is under version control
pass
else:
LOG.info(_('not auto-creating glance registry DB'))
return _ENGINE
def _get_maker(autocommit=True, expire_on_commit=False):
"""Return a SQLAlchemy sessionmaker."""
"""May assign __MAKER if not already assigned"""
global _MAKER, _ENGINE
assert _ENGINE
if not _MAKER:
_MAKER = sa_orm.sessionmaker(bind=_ENGINE,
autocommit=autocommit,
expire_on_commit=expire_on_commit)
return _MAKER
def _is_db_connection_error(args):
"""Return True if error in connecting to db."""
# NOTE(adam_g): This is currently MySQL specific and needs to be extended
# to support Postgres and others.
conn_err_codes = ('2002', '2003', '2006')
for err_code in conn_err_codes:
if args.find(err_code) != -1:
return True
return False
def _wrap_db_error(f):
"""Retry DB connection. Copied from nova and modified."""
def _wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError as e:
if not _is_db_connection_error(e.args[0]):
raise
remaining_attempts = _MAX_RETRIES
while True:
LOG.warning(_('SQL connection failed. %d attempts left.'),
remaining_attempts)
remaining_attempts -= 1
time.sleep(_RETRY_INTERVAL)
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError as e:
if (remaining_attempts == 0 or
not _is_db_connection_error(e.args[0])):
raise
except sqlalchemy.exc.DBAPIError:
raise
except sqlalchemy.exc.DBAPIError:
raise
_wrap.func_name = f.func_name
return _wrap
def image_create(context, values):
"""Create an image from the values dictionary."""
return _image_update(context, values, None, False)
def image_update(context, image_id, values, purge_props=False):
"""
Set the given properties on an image and update it.
:raises NotFound if image does not exist.
"""
return _image_update(context, values, image_id, purge_props)
def image_destroy(context, image_id):
"""Destroy the image or raise if it does not exist."""
session = _get_session()
with session.begin():
image_ref = _image_get(context, image_id, session=session)
# Perform authorization check
_check_mutate_authorization(context, image_ref)
image_ref.delete(session=session)
delete_time = image_ref.deleted_at
_image_locations_delete_all(context, image_ref.id, delete_time,
session)
_image_property_delete_all(context, image_id, delete_time, session)
_image_member_delete_all(context, image_id, delete_time, session)
_image_tag_delete_all(context, image_id, delete_time, session)
return _normalize_locations(image_ref)
def _normalize_locations(image):
undeleted_locations = filter(lambda x: not x.deleted, image['locations'])
image['locations'] = [{'url': loc['value'],
'metadata': loc['meta_data']}
for loc in undeleted_locations]
return image
def image_get(context, image_id, session=None, force_show_deleted=False):
image = _image_get(context, image_id, session=session,
force_show_deleted=force_show_deleted)
image = _normalize_locations(image.to_dict())
return image
def _image_get(context, image_id, session=None, force_show_deleted=False):
"""Get an image or raise if it does not exist."""
session = session or _get_session()
try:
query = session.query(models.Image)\
.options(sa_orm.joinedload(models.Image.properties))\
.options(sa_orm.joinedload(models.Image.locations))\
.filter_by(id=image_id)
# filter out deleted images if context disallows it
if not force_show_deleted and not _can_show_deleted(context):
query = query.filter_by(deleted=False)
image = query.one()
except sa_orm.exc.NoResultFound:
msg = (_("No image found with ID %s") % image_id)
LOG.debug(msg)
raise exception.NotFound(msg)
# Make sure they can look at it
if not is_image_visible(context, image):
msg = (_("Forbidding request, image %s not visible") % image_id)
LOG.debug(msg)
raise exception.Forbidden(msg)
return image
def is_image_mutable(context, image):
"""Return True if the image is mutable in this context."""
# Is admin == image mutable
if context.is_admin:
return True
# No owner == image not mutable
if image['owner'] is None or context.owner is None:
return False
# Image only mutable by its owner
return image['owner'] == context.owner
def is_image_sharable(context, image, **kwargs):
"""Return True if the image can be shared to others in this context."""
# Is admin == image sharable
if context.is_admin:
return True
# Only allow sharing if we have an owner
if context.owner is None:
return False
# If we own the image, we can share it
if context.owner == image['owner']:
return True
# Let's get the membership association
if 'membership' in kwargs:
membership = kwargs['membership']
if membership is None:
# Not shared with us anyway
return False
else:
members = image_member_find(context,
image_id=image['id'],
member=context.owner)
if members:
member = members[0]
else:
# Not shared with us anyway
return False
# It's the can_share attribute we're now interested in
return member['can_share']
def is_image_visible(context, image, status=None):
"""Return True if the image is visible in this context."""
# Is admin == image visible
if context.is_admin:
return True
# No owner == image visible
if image['owner'] is None:
return True
# Image is_public == image visible
if image['is_public']:
return True
# Perform tests based on whether we have an owner
if context.owner is not None:
if context.owner == image['owner']:
return True
# Figure out if this image is shared with that tenant
members = image_member_find(context,
image_id=image['id'],
member=context.owner,
status=status)
if members:
return True
# Private image
return False
def _paginate_query(query, model, limit, sort_keys, marker=None,
sort_dir=None, sort_dirs=None):
"""Returns a query with sorting / pagination criteria added.
Pagination works by requiring a unique sort_key, specified by sort_keys.
(If sort_keys is not unique, then we risk looping through values.)
We use the last row in the previous page as the 'marker' for pagination.
So we must return values that follow the passed marker in the order.
With a single-valued sort_key, this would be easy: sort_key > X.
With a compound-values sort_key, (k1, k2, k3) we must do this to repeat
the lexicographical ordering:
(k1 > X1) or (k1 == X1 && k2 > X2) or (k1 == X1 && k2 == X2 && k3 > X3)
We also have to cope with different sort_directions.
Typically, the id of the last row is used as the client-facing pagination
marker, then the actual marker object must be fetched from the db and
passed in to us as marker.
:param query: the query object to which we should add paging/sorting
:param model: the ORM model class
:param limit: maximum number of items to return
:param sort_keys: array of attributes by which results should be sorted
:param marker: the last item of the previous page; we returns the next
results after this value.
:param sort_dir: direction in which results should be sorted (asc, desc)
:param sort_dirs: per-column array of sort_dirs, corresponding to sort_keys
:rtype: sqlalchemy.orm.query.Query
:return: The query with sorting/pagination added.
"""
if 'id' not in sort_keys:
# TODO(justinsb): If this ever gives a false-positive, check
# the actual primary key, rather than assuming its id
LOG.warn(_('Id not in sort_keys; is sort_keys unique?'))
assert(not (sort_dir and sort_dirs))
# Default the sort direction to ascending
if sort_dirs is None and sort_dir is None:
sort_dir = 'asc'
# Ensure a per-column sort direction
if sort_dirs is None:
sort_dirs = [sort_dir for _sort_key in sort_keys]
assert(len(sort_dirs) == len(sort_keys))
# Add sorting
for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
sort_dir_func = {
'asc': sqlalchemy.asc,
'desc': sqlalchemy.desc,
}[current_sort_dir]
try:
sort_key_attr = getattr(model, current_sort_key)
except AttributeError:
raise exception.InvalidSortKey()
query = query.order_by(sort_dir_func(sort_key_attr))
default = '' # Default to an empty string if NULL
# Add pagination
if marker is not None:
marker_values = []
for sort_key in sort_keys:
v = getattr(marker, sort_key)
if v is None:
v = default
marker_values.append(v)
# Build up an array of sort criteria as in the docstring
criteria_list = []
for i in xrange(0, len(sort_keys)):
crit_attrs = []
for j in xrange(0, i):
model_attr = getattr(model, sort_keys[j])
attr = sa_sql.expression.case([(model_attr != None,
model_attr), ],
else_=default)
crit_attrs.append((attr == marker_values[j]))
model_attr = getattr(model, sort_keys[i])
attr = sa_sql.expression.case([(model_attr != None,
model_attr), ],
else_=default)
if sort_dirs[i] == 'desc':
crit_attrs.append((attr < marker_values[i]))
elif sort_dirs[i] == 'asc':
crit_attrs.append((attr > marker_values[i]))
else:
raise ValueError(_("Unknown sort direction, "
"must be 'desc' or 'asc'"))
criteria = sa_sql.and_(*crit_attrs)
criteria_list.append(criteria)
f = sa_sql.or_(*criteria_list)
query = query.filter(f)
if limit is not None:
query = query.limit(limit)
return query
def image_get_all(context, filters=None, marker=None, limit=None,
sort_key='created_at', sort_dir='desc',
member_status='accepted', is_public=None,
admin_as_user=False):
"""
Get all images that match zero or more filters.
:param filters: dict of filter keys and values. If a 'properties'
key is present, it is treated as a dict of key/value
filters on the image properties attribute
:param marker: image id after which to start page
:param limit: maximum number of images to return
:param sort_key: image attribute by which results should be sorted
:param sort_dir: direction in which results should be sorted (asc, desc)
:param member_status: only return shared images that have this membership
status
:param is_public: If true, return only public images. If false, return
only private and shared images.
:param admin_as_user: For backwards compatibility. If true, then return to
an admin the equivalent set of images which it would see
if it were a regular user
"""
filters = filters or {}
session = _get_session()
query_image = session.query(models.Image)
query_member = session.query(models.Image).join(models.Image.members)
if (not context.is_admin) or admin_as_user == True:
visibility_filters = [models.Image.is_public == True]
member_filters = [models.ImageMember.deleted == False]
if context.owner is not None:
if member_status == 'all':
visibility_filters.extend([
models.Image.owner == context.owner])
member_filters.extend([
models.ImageMember.member == context.owner])
else:
visibility_filters.extend([
models.Image.owner == context.owner])
member_filters.extend([
models.ImageMember.member == context.owner,
models.ImageMember.status == member_status])
query_image = query_image.filter(sa_sql.or_(*visibility_filters))
query_member = query_member.filter(sa_sql.and_(*member_filters))
query = query_image.union(query_member)
if 'visibility' in filters:
visibility = filters.pop('visibility')
if visibility == 'public':
query = query.filter(models.Image.is_public == True)
elif visibility == 'private':
query = query.filter(models.Image.is_public == False)
if context.owner is not None and ((not context.is_admin)
or admin_as_user == True):
query = query.filter(
models.Image.owner == context.owner)
else:
query_member = query_member.filter(
models.ImageMember.member == context.owner,
models.ImageMember.deleted == False)
query = query_member
if is_public is not None:
query = query.filter(models.Image.is_public == is_public)
if 'is_public' in filters:
spec = models.Image.properties.any(name='is_public',
value=filters.pop('is_public'),
deleted=False)
query = query.filter(spec)
showing_deleted = False
if 'checksum' in filters:
checksum = filters.pop('checksum')
query = query.filter(models.Image.checksum == checksum)
if 'changes-since' in filters:
# normalize timestamp to UTC, as sqlalchemy doesn't appear to
# respect timezone offsets
changes_since = timeutils.normalize_time(filters.pop('changes-since'))
query = query.filter(models.Image.updated_at > changes_since)
showing_deleted = True
if 'deleted' in filters:
deleted_filter = filters.pop('deleted')
query = query.filter_by(deleted=deleted_filter)
showing_deleted = deleted_filter
# TODO(bcwaldon): handle this logic in registry server
if not deleted_filter:
query = query.filter(models.Image.status != 'killed')
for (k, v) in filters.pop('properties', {}).items():
query = query.filter(models.Image.properties.any(name=k,
value=v,
deleted=False))
if 'tags' in filters:
tags = filters.pop('tags')
for tag in tags:
query = query.filter(models.Image.tags.any(value=tag,
deleted=False))
for (k, v) in filters.items():
if v is not None:
key = k
if k.endswith('_min') or k.endswith('_max'):
key = key[0:-4]
try:
v = int(v)
except ValueError:
msg = _("Unable to filter on a range "
"with a non-numeric value.")
raise exception.InvalidFilterRangeValue(msg)
if k.endswith('_min'):
query = query.filter(getattr(models.Image, key) >= v)
elif k.endswith('_max'):
query = query.filter(getattr(models.Image, key) <= v)
elif hasattr(models.Image, key):
query = query.filter(getattr(models.Image, key) == v)
else:
query = query.filter(models.Image.properties.any(name=key,
value=v))
marker_image = None
if marker is not None:
marker_image = _image_get(context, marker,
force_show_deleted=showing_deleted)
sort_keys = ['created_at', 'id']
sort_keys.insert(0, sort_key) if sort_key not in sort_keys else sort_keys
query = _paginate_query(query, models.Image, limit,
sort_keys,
marker=marker_image,
sort_dir=sort_dir)
query = query.options(sa_orm.joinedload(models.Image.properties))\
.options(sa_orm.joinedload(models.Image.locations))
return [_normalize_locations(image.to_dict()) for image in query.all()]
def _drop_protected_attrs(model_class, values):
"""
Removed protected attributes from values dictionary using the models
__protected_attributes__ field.
"""
for attr in model_class.__protected_attributes__:
if attr in values:
del values[attr]
def _image_get_disk_usage_by_owner(owner, session, image_id=None):
query = session.query(models.Image)
query = query.filter(models.Image.owner == owner)
if image_id is not None:
query = query.filter(models.Image.id != image_id)
query = query.filter(models.Image.size > 0)
images = query.all()
total = sum([i.size * len(i.locations) for i in images])
return total
def _validate_image(values):
"""
Validates the incoming data and raises a Invalid exception
if anything is out of order.
:param values: Mapping of image metadata to check
"""
status = values.get('status', None)
if not status:
msg = "Image status is required."
raise exception.Invalid(msg)
if status not in STATUSES:
msg = "Invalid image status '%s' for image." % status
raise exception.Invalid(msg)
return values
def _update_values(image_ref, values):
for k in values:
if getattr(image_ref, k) != values[k]:
setattr(image_ref, k, values[k])
def _image_update(context, values, image_id, purge_props=False):
"""
Used internally by image_create and image_update
:param context: Request context
:param values: A dict of attributes to set
:param image_id: If None, create the image, otherwise, find and update it
"""
#NOTE(jbresnah) values is altered in this so a copy is needed
values = values.copy()
session = _get_session()
with session.begin():
# Remove the properties passed in the values mapping. We
# handle properties separately from base image attributes,
# and leaving properties in the values mapping will cause
# a SQLAlchemy model error because SQLAlchemy expects the
# properties attribute of an Image model to be a list and
# not a dict.
properties = values.pop('properties', {})
location_data = values.pop('locations', None)
if image_id:
image_ref = _image_get(context, image_id, session=session)
# Perform authorization check
_check_mutate_authorization(context, image_ref)
else:
if values.get('size') is not None:
values['size'] = int(values['size'])
if 'min_ram' in values:
values['min_ram'] = int(values['min_ram'] or 0)
if 'min_disk' in values:
values['min_disk'] = int(values['min_disk'] or 0)
values['is_public'] = bool(values.get('is_public', False))
values['protected'] = bool(values.get('protected', False))
image_ref = models.Image()
# Need to canonicalize ownership
if 'owner' in values and not values['owner']:
values['owner'] = None
if image_id:
# Don't drop created_at if we're passing it in...
_drop_protected_attrs(models.Image, values)
#NOTE(iccha-sethi): updated_at must be explicitly set in case
# only ImageProperty table was modifited
values['updated_at'] = timeutils.utcnow()
image_ref.update(values)
# Validate the attributes before we go any further. From my
# investigation, the @validates decorator does not validate
# on new records, only on existing records, which is, well,
# idiotic.
values = _validate_image(image_ref.to_dict())
_update_values(image_ref, values)
try:
image_ref.save(session=session)
except sqlalchemy.exc.IntegrityError:
raise exception.Duplicate("Image ID %s already exists!"
% values['id'])
_set_properties_for_image(context, image_ref, properties, purge_props,
session)
if location_data is not None:
_image_locations_set(image_ref.id, location_data, session)
return image_get(context, image_ref.id)
def _image_locations_set(image_id, locations, session):
location_refs = session.query(models.ImageLocation)\
.filter_by(image_id=image_id)\
.filter_by(deleted=False)\
.all()
for location_ref in location_refs:
location_ref.delete(session=session)
for location in locations:
location_ref = models.ImageLocation(image_id=image_id,
value=location['url'],
meta_data=location['metadata'])
location_ref.save()
def _image_locations_delete_all(context, image_id, delete_time=None,
session=None):
"""Delete all image locations for given image"""
locs_updated_count = _image_child_entry_delete_all(models.ImageLocation,
image_id,
delete_time,
session)
return locs_updated_count
def _set_properties_for_image(context, image_ref, properties,
purge_props=False, session=None):
"""
Create or update a set of image_properties for a given image
:param context: Request context
:param image_ref: An Image object
:param properties: A dict of properties to set
:param session: A SQLAlchemy session to use (if present)
"""
orig_properties = {}
for prop_ref in image_ref.properties:
orig_properties[prop_ref.name] = prop_ref
for name, value in properties.iteritems():
prop_values = {'image_id': image_ref.id,
'name': name,
'value': value}
if name in orig_properties:
prop_ref = orig_properties[name]
_image_property_update(context, prop_ref, prop_values,
session=session)
else:
image_property_create(context, prop_values, session=session)
if purge_props:
for key in orig_properties.keys():
if key not in properties:
prop_ref = orig_properties[key]
image_property_delete(context, prop_ref.name,
image_ref.id, session=session)
def _image_child_entry_delete_all(child_model_cls, image_id, delete_time=None,
session=None):
"""Deletes all the child entries for the given image id.
Deletes all the child entries of the given child entry ORM model class
using the parent image's id.
The child entry ORM model class can be one of the following:
model.ImageLocation, model.ImageProperty, model.ImageMember and
model.ImageTag.
:param child_model_cls: the ORM model class.
:param image_id: id of the image whose child entries are to be deleted.
:param delete_time: datetime of deletion to be set.
If None, uses current datetime.
:param session: A SQLAlchemy session to use (if present)
:rtype: int
:return: The number of child entries got soft-deleted.
"""
session = session or _get_session()
query = session.query(child_model_cls) \
.filter_by(image_id=image_id) \
.filter_by(deleted=False)
delete_time = delete_time or timeutils.utcnow()
count = query.update({"deleted": True, "deleted_at": delete_time})
return count
def image_property_create(context, values, session=None):
"""Create an ImageProperty object"""
prop_ref = models.ImageProperty()
prop = _image_property_update(context, prop_ref, values, session=session)
return prop.to_dict()
def _image_property_update(context, prop_ref, values, session=None):
"""
Used internally by image_property_create and image_property_update
"""
_drop_protected_attrs(models.ImageProperty, values)
values["deleted"] = False
prop_ref.update(values)
prop_ref.save(session=session)
return prop_ref
def image_property_delete(context, prop_ref, image_ref, session=None):
"""
Used internally by image_property_create and image_property_update
"""
session = session or _get_session()
prop = session.query(models.ImageProperty).filter_by(image_id=image_ref,
name=prop_ref).one()
prop.delete(session=session)
return prop
def _image_property_delete_all(context, image_id, delete_time=None,
session=None):
"""Delete all image properties for given image"""
props_updated_count = _image_child_entry_delete_all(models.ImageProperty,
image_id,
delete_time,
session)
return props_updated_count
def image_member_create(context, values, session=None):
"""Create an ImageMember object"""
memb_ref = models.ImageMember()
_image_member_update(context, memb_ref, values, session=session)
return _image_member_format(memb_ref)
def _image_member_format(member_ref):
"""Format a member ref for consumption outside of this module"""
return {
'id': member_ref['id'],
'image_id': member_ref['image_id'],
'member': member_ref['member'],
'can_share': member_ref['can_share'],
'status': member_ref['status'],
'created_at': member_ref['created_at'],
'updated_at': member_ref['updated_at']
}
def image_member_update(context, memb_id, values):
"""Update an ImageMember object"""
session = _get_session()
memb_ref = _image_member_get(context, memb_id, session)
_image_member_update(context, memb_ref, values, session)
return _image_member_format(memb_ref)
def _image_member_update(context, memb_ref, values, session=None):
"""Apply supplied dictionary of values to a Member object."""
_drop_protected_attrs(models.ImageMember, values)
values["deleted"] = False
values.setdefault('can_share', False)
memb_ref.update(values)
memb_ref.save(session=session)
return memb_ref
def image_member_delete(context, memb_id, session=None):
"""Delete an ImageMember object"""
session = session or _get_session()
member_ref = _image_member_get(context, memb_id, session)
_image_member_delete(context, member_ref, session)
def _image_member_delete(context, memb_ref, session):
memb_ref.delete(session=session)
def _image_member_delete_all(context, image_id, delete_time=None,
session=None):
"""Delete all image members for given image"""
members_updated_count = _image_child_entry_delete_all(models.ImageMember,
image_id,
delete_time,
session)
return members_updated_count
def _image_member_get(context, memb_id, session):
"""Fetch an ImageMember entity by id"""
query = session.query(models.ImageMember)
query = query.filter_by(id=memb_id)
return query.one()
def image_member_find(context, image_id=None, member=None, status=None):
"""Find all members that meet the given criteria
:param image_id: identifier of image entity
:param member: tenant to which membership has been granted
"""
session = _get_session()
members = _image_member_find(context, session, image_id, member, status)
return [_image_member_format(m) for m in members]
def _image_member_find(context, session, image_id=None,
member=None, status=None):
query = session.query(models.ImageMember)
query = query.filter_by(deleted=False)
if not context.is_admin:
query = query.join(models.Image)
filters = [
models.Image.owner == context.owner,
models.ImageMember.member == context.owner,
]
query = query.filter(sa_sql.or_(*filters))
if image_id is not None:
query = query.filter(models.ImageMember.image_id == image_id)
if member is not None:
query = query.filter(models.ImageMember.member == member)
if status is not None:
query = query.filter(models.ImageMember.status == status)
return query.all()
# pylint: disable-msg=C0111
def _can_show_deleted(context):
"""
Calculates whether to include deleted objects based on context.
Currently just looks for a flag called deleted in the context dict.
"""
if hasattr(context, 'show_deleted'):
return context.show_deleted
if not hasattr(context, 'get'):
return False
return context.get('deleted', False)
def image_tag_set_all(context, image_id, tags):
session = _get_session()
existing_tags = set(image_tag_get_all(context, image_id, session))
tags = set(tags)
tags_to_create = tags - existing_tags
#NOTE(bcwaldon): we call 'reversed' here to ensure the ImageTag.id fields
# will be populated in the order required to reflect the correct ordering
# on a subsequent call to image_tag_get_all
for tag in reversed(list(tags_to_create)):
image_tag_create(context, image_id, tag, session)
tags_to_delete = existing_tags - tags
for tag in tags_to_delete:
image_tag_delete(context, image_id, tag, session)
def image_tag_create(context, image_id, value, session=None):
"""Create an image tag."""
session = session or _get_session()
tag_ref = models.ImageTag(image_id=image_id, value=value)
tag_ref.save(session=session)
return tag_ref['value']
def image_tag_delete(context, image_id, value, session=None):
"""Delete an image tag."""
session = session or _get_session()
query = session.query(models.ImageTag)\
.filter_by(image_id=image_id)\
.filter_by(value=value)\
.filter_by(deleted=False)
try:
tag_ref = query.one()
except sa_orm.exc.NoResultFound:
raise exception.NotFound()
tag_ref.delete(session=session)
def _image_tag_delete_all(context, image_id, delete_time=None, session=None):
"""Delete all image tags for given image"""
tags_updated_count = _image_child_entry_delete_all(models.ImageTag,
image_id,
delete_time,
session)
return tags_updated_count
def image_tag_get_all(context, image_id, session=None):
"""Get a list of tags for a specific image."""
session = session or _get_session()
tags = session.query(models.ImageTag)\
.filter_by(image_id=image_id)\
.filter_by(deleted=False)\
.order_by(sqlalchemy.asc(models.ImageTag.created_at))\
.all()
return [tag['value'] for tag in tags]
def user_get_storage_usage(context, owner_id, image_id=None, session=None):
session = session or _get_session()
total_size = _image_get_disk_usage_by_owner(
owner_id, session, image_id=image_id)
return total_size
| apache-2.0 | 8,173,759,760,428,653,000 | 34.76259 | 79 | 0.592134 | false |
dennybaa/st2contrib | packs/nagios/etc/st2service_handler.py | 8 | 5614 | #!/usr/bin/env python
import httplib
try:
import simplejson as json
except ImportError:
import json
import os
import sys
from urlparse import urljoin
try:
import requests
except ImportError:
raise ImportError('Missing dependency requests. Do ``pip install requests``.')
try:
import yaml
except ImportError:
raise ImportError('Missing dependency pyyaml. Do ``pip install pyyaml``.')
# ST2 configuration
ST2_CONFIG_FILE = './config.yaml'
ST2_API_BASE_URL = 'http://localhost:9101/v1'
ST2_AUTH_BASE_URL = 'http://localhost:9100'
ST2_USERNAME = None
ST2_PASSWORD = None
ST2_AUTH_TOKEN = None
ST2_AUTH_PATH = 'tokens'
ST2_WEBHOOKS_PATH = 'webhooks/st2/'
ST2_TRIGGERS_PATH = 'triggertypes/'
ST2_TRIGGERTYPE_PACK = 'nagios'
ST2_TRIGGERTYPE_NAME = 'service-state-change'
ST2_TRIGGERTYPE_REF = '.'.join([ST2_TRIGGERTYPE_PACK, ST2_TRIGGERTYPE_NAME])
STATE_MESSAGE = {
'OK': 'All is well on the Western front.',
'WARNING': 'We gots a warning yo!',
'UNKNOWN': 'It be unknown...',
'CRITICAL': 'Critical!'
}
REGISTERED_WITH_ST2 = False
OK_CODES = [httplib.OK, httplib.CREATED, httplib.ACCEPTED, httplib.CONFLICT]
def _create_trigger_type():
try:
url = _get_st2_triggers_url()
payload = {
'name': ST2_TRIGGERTYPE_NAME,
'pack': ST2_TRIGGERTYPE_PACK,
'description': 'Trigger type for nagios event handler.'
}
# sys.stdout.write('POST: %s: Body: %s\n' % (url, payload))
headers = {}
headers['Content-Type'] = 'application/json; charset=utf-8'
if ST2_AUTH_TOKEN:
headers['X-Auth-Token'] = ST2_AUTH_TOKEN
post_resp = requests.post(url, data=json.dumps(payload), headers=headers)
except:
sys.stderr.write('Unable to register trigger type with st2.')
raise
else:
status = post_resp.status_code
if status not in OK_CODES:
sys.stderr.write('Failed to register trigger type with st2. HTTP_CODE: %d\n' %
status)
raise
else:
sys.stdout.write('Registered trigger type with st2.\n')
def _get_auth_url():
return urljoin(ST2_AUTH_BASE_URL, ST2_AUTH_PATH)
def _get_auth_token():
global ST2_AUTH_TOKEN
auth_url = _get_auth_url()
try:
resp = requests.post(auth_url, json.dumps({'ttl': 5 * 60}),
auth=(ST2_USERNAME, ST2_PASSWORD))
except:
raise Exception('Cannot get auth token from st2. Will try unauthed.')
else:
ST2_AUTH_TOKEN = resp.json()['token']
def _register_with_st2():
global REGISTERED_WITH_ST2
try:
url = urljoin(_get_st2_triggers_url(), ST2_TRIGGERTYPE_REF)
# sys.stdout.write('GET: %s\n' % url)
if not ST2_AUTH_TOKEN:
_get_auth_token()
if ST2_AUTH_TOKEN:
get_resp = requests.get(url, headers={'X-Auth-Token': ST2_AUTH_TOKEN})
else:
get_resp = requests.get(url)
if get_resp.status_code != httplib.OK:
_create_trigger_type()
else:
body = json.loads(get_resp.text)
if len(body) == 0:
_create_trigger_type()
except:
raise
else:
REGISTERED_WITH_ST2 = True
def _get_st2_triggers_url():
url = urljoin(ST2_API_BASE_URL, ST2_TRIGGERS_PATH)
return url
def _get_st2_webhooks_url():
url = urljoin(ST2_API_BASE_URL, ST2_WEBHOOKS_PATH)
return url
def _post_event_to_st2(url, body):
headers = {}
headers['X-ST2-Integration'] = 'nagios.'
headers['Content-Type'] = 'application/json; charset=utf-8'
if ST2_AUTH_TOKEN:
headers['X-Auth-Token'] = ST2_AUTH_TOKEN
try:
# sys.stdout.write('POST: url: %s, body: %s\n' % (url, body))
r = requests.post(url, data=json.dumps(body), headers=headers)
except:
sys.stderr.write('Cannot connect to st2 endpoint.')
else:
status = r.status_code
if status not in OK_CODES:
sys.stderr.write('Failed posting nagios event to st2. HTTP_CODE: %d\n' % status)
else:
sys.stdout.write('Sent nagios event to st2. HTTP_CODE: %d\n' % status)
def _get_payload(host, service, event_id, state, state_type, attempt):
payload = {}
payload['host'] = host
payload['service'] = service
payload['event_id'] = event_id
payload['state'] = state
payload['state_type'] = state_type
payload['attempt'] = attempt
payload['msg'] = STATE_MESSAGE.get(state, 'Undefined state.')
return payload
def main(args):
event_id = args[1]
service = args[2]
state = args[3]
state_type = args[4]
attempt = args[5]
host = args[6]
payload = _get_payload(host, service, event_id, state, state_type, attempt)
body = {}
body['trigger'] = ST2_TRIGGERTYPE_REF
body['payload'] = payload
_post_event_to_st2(_get_st2_webhooks_url(), body)
if __name__ == '__main__':
try:
if not os.path.exists(ST2_CONFIG_FILE):
sys.stderr.write('Configuration file not found. Exiting.\n')
sys.exit(1)
with open(ST2_CONFIG_FILE) as f:
config = yaml.safe_load(f)
ST2_USERNAME = config['st2_username']
ST2_PASSWORD = config['st2_password']
ST2_API_BASE_URL = config['st2_api_base_url']
ST2_AUTH_BASE_URL = config['st2_auth_base_url']
if not REGISTERED_WITH_ST2:
_register_with_st2()
except:
sys.stderr.write('Failed registering with st2. Won\'t post event.\n')
else:
main(sys.argv)
| apache-2.0 | 4,769,322,794,573,068,000 | 27.938144 | 92 | 0.602066 | false |
Transkribus/TranskribusDU | TranskribusDU/tasks/TablePrototypes/DU_ABPTableRG.py | 1 | 29902 | # -*- coding: utf-8 -*-
"""
DU task for ABP Table: doing jointly row BIESO and horizontal grid lines
Copyright Naver Labs Europe(C) 2018 JL Meunier
Developed for the EU project READ. The READ project has received funding
from the European Union's Horizon 2020 research and innovation programme
under grant agreement No 674943.
"""
import sys, os
import math
from lxml import etree
import numpy as np
from sklearn.pipeline import Pipeline, FeatureUnion
try: #to ease the use without proper Python installation
import TranskribusDU_version
except ImportError:
sys.path.append( os.path.dirname(os.path.dirname( os.path.abspath(sys.argv[0]) )) )
import TranskribusDU_version
from common.trace import traceln
from tasks import _checkFindColDir, _exit
from tasks.DU_CRF_Task import DU_CRF_Task
from crf.Edge import Edge, SamePageEdge
from crf.Graph_MultiPageXml import Graph_MultiPageXml
from crf.NodeType_PageXml import NodeType_PageXml_type_woText
#from crf.FeatureDefinition_PageXml_std_noText import FeatureDefinition_PageXml_StandardOnes_noText
from crf.FeatureDefinition import FeatureDefinition
from crf.Transformer import Transformer, TransformerListByType
from crf.Transformer import EmptySafe_QuantileTransformer as QuantileTransformer
from crf.Transformer_PageXml import NodeTransformerXYWH_v2, NodeTransformerNeighbors, Node1HotFeatures
from crf.Transformer_PageXml import Edge1HotFeatures, EdgeBooleanFeatures_v2, EdgeNumericalSelector
from crf.PageNumberSimpleSequenciality import PageNumberSimpleSequenciality
from tasks.DU_ABPTableGrid import GridAnnotator
class GraphGrid(Graph_MultiPageXml):
"""
We specialize the class of graph because the computation of edges is quite specific
"""
# Grid stuff
#Dynamically add a grid
iGridStep_H = 33 #odd number is better
iGridStep_V = 33 #odd number is better
# Some grid line will be O or I simply because they are too short.
fMinPageCoverage = 0.5 # minimum proportion of the page crossed by a grid line
# we want to ignore col- and row- spans
iGridVisibility = 2 # a grid line sees N neighbours below
iBlockVisibility = 1 # a block sees N neighbouring grid lines
_lClassicNodeType = None
@classmethod
def setClassicNodeTypeList(cls, lNodeType):
"""
determine which type of node goes thru the classical way for determining
the edges (vertical or horizontal overlap, with occlusion, etc.)
"""
cls._lClassicNodeType = lNodeType
def parseDocFile(self, sFilename, iVerbose=0):
"""
Load that document as a CRF Graph.
Also set the self.doc variable!
Return a CRF Graph object
"""
self.doc = etree.parse(sFilename)
self.lNode, self.lEdge = list(), list()
self.lNodeBlock = [] # text node
self.lNodeGridLine = [] # grid line node
root = self.doc.getroot()
doer = GridAnnotator(self.iGridStep_H, self.iGridStep_V)
#map the groundtruth table separators, if any, to our grid
ltlHlV = doer.get_grid_GT_index_from_DOM(root, self.fMinPageCoverage)
#create DOM node reflecting the grid
# we add GridSeparator elements. Groundtruth ones have type="1"
#doer.add_grid_to_DOM(root, ltlHlV)
lClassicType = [nt for nt in self.getNodeTypeList() if nt in self._lClassicNodeType]
lSpecialType = [nt for nt in self.getNodeTypeList() if nt not in self._lClassicNodeType]
for pnum, page, domNdPage in self._iter_Page_DocNode(self.doc):
#now that we have the page, let's create the node for each type!
lClassicPageNode = [nd for nodeType in lClassicType for nd in nodeType._iter_GraphNode(self.doc, domNdPage, page) ]
lSpecialPageNode = [nd for nodeType in lSpecialType for nd in nodeType._iter_GraphNode(self.doc, domNdPage, page) ]
self.lNode.extend(lClassicPageNode) # e.g. the TextLine objects
self.lNodeBlock.extend(lClassicPageNode)
self.lNode.extend(lSpecialPageNode) # e.g. the grid lines!
self.lNodeGridLine.extend(lSpecialPageNode)
#no previous page to consider (for cross-page links...) => None
lClassicPageEdge = Edge.computeEdges(None, lClassicPageNode)
self.lEdge.extend(lClassicPageEdge)
# Now, compute edges between special and classic objects...
lSpecialPageEdge = self.computeSpecialEdges(lClassicPageNode,
lSpecialPageNode)
self.lEdge.extend(lSpecialPageEdge)
#if iVerbose>=2: traceln("\tPage %5d %6d nodes %7d edges"%(pnum, len(lPageNode), len(lPageEdge)))
if iVerbose>=2:
traceln("\tPage %5d"%(pnum))
traceln("\t block: %6d nodes %7d edges (to block)" %(pnum, len(lClassicPageNode), len(lClassicPageEdge)))
traceln("\t line: %6d nodes %7d edges (from block)"%(pnum, len(lSpecialPageNode), len(lSpecialPageEdge)))
if iVerbose: traceln("\t\t (%d nodes, %d edges)"%(len(self.lNode), len(self.lEdge)) )
return self
@classmethod
def computeSpecialEdges(cls, lClassicPageNode, lSpecialPageNode):
"""
return a list of edges
"""
raise Exception("Specialize this method")
class Edge_BL(Edge):
"""Edge block-to-Line"""
pass
class Edge_LL(Edge):
"""Edge line-to-Line"""
pass
class GraphGrid_H(GraphGrid):
"""
Only horizontal grid lines
"""
def getNodeListByType(self, iTyp):
if iTyp == 0:
return self.lNodeBlock
else:
return self.lNodeGridLine
def getEdgeListByType(self, typA, typB):
if typA == 0:
if typB == 0:
return (e for e in self.lEdge if isinstance(e, SamePageEdge))
else:
return (e for e in self.lEdge if isinstance(e, Edge_BL))
else:
if typB == 0:
return []
else:
return (e for e in self.lEdge if isinstance(e, Edge_LL))
@classmethod
def computeSpecialEdges(cls, lClassicPageNode, lSpecialPageNode):
"""
Compute:
- edges between each block and the grid line above/across/below the block
- edges between grid lines
return a list of edges
"""
# indexing the grid lines
dGridLineByIndex = {GridAnnotator.snapToGridIndex(nd.y1, cls.iGridStep_V):nd for nd in lSpecialPageNode}
for nd in lSpecialPageNode:
#print(nd, dGridLineByIndex[GridAnnotator.snapToGridIndex(nd.y1, cls.iGridStep_V)])
assert dGridLineByIndex[GridAnnotator.snapToGridIndex(nd.y1, cls.iGridStep_V)] == nd, "internal error inconsistent grid"
# block to grid line edges
lEdge = []
fLenNorm = float(cls.iGridStep_V * cls.iBlockVisibility)
imin, imax = 100, -1
assert lClassicPageNode, "ERROR: empty page!!??"
for ndBlock in lClassicPageNode:
### print("---- ", ndBlock)
# i1 = GridAnnotator.snapToGridIndex(nd.x1, cls.iGridStep_V)
# i2 = GridAnnotator.snapToGridIndex(nd.x2, cls.iGridStep_V)
i1 = int(math.floor(ndBlock.y1 / float(cls.iGridStep_V)))
i2 = int(math.ceil (ndBlock.y2 / float(cls.iGridStep_V)))
assert i2 >= i1
yBlkAvg = (ndBlock.y1 + ndBlock.y2)/2.0
#Also make visible the iBlockVisibility-1 previous grid lines, if any
for i in range(max(0, i1 - cls.iBlockVisibility + 1), i1+1):
edge = Edge_BL(ndBlock, dGridLineByIndex[i])
edge.len = (yBlkAvg - i * cls.iGridStep_V) / fLenNorm
edge._gridtype = -1
lEdge.append(edge)
imin = min(i, imin)
### print(ndBlock.y1, i, edge.len)
for i in range(max(0, i1+1), max(0, i2)):
ndLine = dGridLineByIndex[i]
edge = Edge_BL(ndBlock, ndLine)
edge.len = (yBlkAvg - i * cls.iGridStep_V) / fLenNorm
edge._gridtype = 0 # grid line is crossing the block
assert ndBlock.y1 < i*cls.iGridStep_V
assert i*cls.iGridStep_V < ndBlock.y2
### print(ndBlock.y1, ndBlock.y2, i, edge.len)
lEdge.append(edge)
imax = max(imax, i)
for i in range(max(0, i2), i2 + cls.iBlockVisibility):
try:
edge = Edge_BL(ndBlock, dGridLineByIndex[i])
except KeyError:
break # out of the grid
edge.len = (yBlkAvg - i * cls.iGridStep_V) / fLenNorm
edge._gridtype = +1
lEdge.append(edge)
imax = max(imax, i)
### print(ndBlock.y2, i, edge.len)
#now filter those edges
lEdge = cls._filterBadEdge(lEdge, imin, imax, dGridLineByIndex)
if False:
print("--- After filtering: %d edges", len(lEdge))
lSortedEdge = sorted(lEdge, key=lambda x: x.A.domid)
for edge in lSortedEdge:
print( "domid=%s y1=%s y2=%s"%(edge.A.domid, edge.A.y1, edge.A.y2)
+ " %s %s "%(["↑", "-", "↓"][1+edge._gridtype],
edge.B.y1 / cls.iGridStep_V))
# grid line to grid line edges
n = len(dGridLineByIndex)
for i in range(n):
A = dGridLineByIndex[i]
for j in range(i+1, min(n, i+cls.iGridVisibility+1)):
edge = Edge_LL(A, dGridLineByIndex[j])
edge.len = (j - i)
lEdge.append(edge)
return lEdge
@classmethod
def _filterBadEdge(cls, lEdge, imin, imax, dGridLineByIndex, fRatio=0.25):
"""
We get
- a list of block2Line edges
- the [imin, imax] interval of involved grid line index
- the dGridLineByIndex dictionary
But some block should not be connected to a line due to obstruction by
another blocks.
We filter out those edges...
return a sub-list of lEdge
"""
lKeepEdge = []
def _xoverlapSrcSrc(edge, lEdge):
"""
does the source node of edge overlap with the source node of any
edge of the list?
"""
A = edge.A
for _edge in lEdge:
if A.significantXOverlap(_edge.A, fRatio): return True
return False
def _yoverlapSrcSrc(edge, lEdge):
"""
does the source node of edge overlap with the source node of any
edge of the list?
"""
A = edge.A
for _edge in lEdge:
if A.significantYOverlap(_edge.A, fRatio): return True
return False
#take each line in turn
for i in range(imin, imax+1):
ndLine = dGridLineByIndex[i]
#--- process downward edges
lDownwardEdge = [edge for edge in lEdge \
if edge._gridtype == +1 and edge.B == ndLine]
if lDownwardEdge:
#sort edge by source block from closest to line block to farthest
lDownwardEdge.sort(key=lambda o: o.A.y2 - ndLine.y1, reverse=True)
lKeepDownwardEdge = [lDownwardEdge.pop(0)]
#now keep all edges whose source does not overlap vertically with
# the source of an edge that is kept
for edge in lDownwardEdge:
if not _xoverlapSrcSrc(edge, lKeepDownwardEdge):
lKeepDownwardEdge.append(edge)
lKeepEdge.extend(lKeepDownwardEdge)
#--- keep all crossing edges
lCrossingEdge = [edge for edge in lEdge \
if edge._gridtype == 0 and edge.B == ndLine]
lKeepEdge.extend(lCrossingEdge)
#--- process downward edges
lUpwardEdge = [edge for edge in lEdge \
if edge._gridtype == -1 and edge.B == ndLine]
if lUpwardEdge:
#sort edge by source block from closest to line block to farthest
lUpwardEdge.sort(key=lambda o: ndLine.y2 - o.A.y1, reverse=True)
lKeepUpwardEdge = [lUpwardEdge.pop(0)]
#now keep all edges whose source does not overlap vertically with
# the source of an edge that is kept
for edge in lUpwardEdge:
if not _xoverlapSrcSrc(edge, lKeepUpwardEdge):
lKeepUpwardEdge.append(edge)
lKeepEdge.extend(lKeepUpwardEdge)
return lKeepEdge
#------------------------------------------------------------------------------------------------------
class GridLine_NodeTransformer(Transformer):
"""
features of a grid line:
- horizontal or vertical.
"""
def transform(self, lNode):
#We allocate TWO more columns to store in it the tfidf and idf computed at document level.
#a = np.zeros( ( len(lNode), 10 ) , dtype=np.float64) # 4 possible orientations: 0, 1, 2, 3
a = np.zeros( ( len(lNode), 2 ) , dtype=np.float64) # 4 possible orientations: 0, 1, 2, 3
for i, blk in enumerate(lNode):
if abs(blk.x2 - blk.x1) > abs(blk.y1 - blk.y2):
a[i,0] = 1.0
else:
a[i,1] = 1.0
return a
class Block2GridLine_EdgeTransformer(Transformer):
"""
features of a block to grid line edge:
- below, crossing, above
"""
def transform(self, edge):
a = np.zeros( ( len(edge), 3 + 3 + 3) , dtype=np.float64) # 4 possible orientations: 0, 1, 2, 3
for i, edge in enumerate(edge):
z = 1 + edge._gridtype # _gridtype is -1 or 0 or 1
a[i, z] = 1.0
a[i, 3 + z] = edge.len # normalised on [0, 1] edge length
a[i, 6 + z] = edge.len * edge.len
return a
class GridLine2GridLine_EdgeTransformer(Transformer):
"""
features of a block to grid line edge:
- below, crossing, above
"""
def transform(self, edge):
a = np.zeros( ( len(edge), GraphGrid_H.iGridVisibility ) , dtype=np.float64) # 4 possible orientations: 0, 1, 2, 3
for i, edge in enumerate(edge):
a[i, edge.len - 1] = 1.0 # edge length (number of steps)
return a
class My_FeatureDefinition(FeatureDefinition):
"""
Multitype version:
so the node_transformer actually is a list of node_transformer of length n_class
the edge_transformer actually is a list of node_transformer of length n_class^2
We also inherit from FeatureDefinition_T !!!
"""
n_QUANTILES = 16
def __init__(self, **kwargs):
"""
set _node_transformer, _edge_transformer, tdifNodeTextVectorizer
"""
FeatureDefinition.__init__(self)
nbTypes = self._getTypeNumber(kwargs)
block_transformer = FeatureUnion( [ #CAREFUL IF YOU CHANGE THIS - see cleanTransformers method!!!!
("xywh", Pipeline([
('selector', NodeTransformerXYWH_v2()),
#v1 ('xywh', StandardScaler(copy=False, with_mean=True, with_std=True)) #use in-place scaling
('xywh', QuantileTransformer(n_quantiles=self.n_QUANTILES, copy=False)) #use in-place scaling
])
)
, ("neighbors", Pipeline([
('selector', NodeTransformerNeighbors()),
#v1 ('neighbors', StandardScaler(copy=False, with_mean=True, with_std=True)) #use in-place scaling
('neighbors', QuantileTransformer(n_quantiles=self.n_QUANTILES, copy=False)) #use in-place scaling
])
)
, ("1hot", Pipeline([
('1hot', Node1HotFeatures()) #does the 1-hot encoding directly
])
)
])
grid_line_transformer = GridLine_NodeTransformer()
self._node_transformer = TransformerListByType([block_transformer, grid_line_transformer])
edge_BB_transformer = FeatureUnion( [ #CAREFUL IF YOU CHANGE THIS - see cleanTransformers method!!!!
("1hot", Pipeline([
('1hot', Edge1HotFeatures(PageNumberSimpleSequenciality()))
])
)
, ("boolean", Pipeline([
('boolean', EdgeBooleanFeatures_v2())
])
)
, ("numerical", Pipeline([
('selector', EdgeNumericalSelector()),
#v1 ('numerical', StandardScaler(copy=False, with_mean=True, with_std=True)) #use in-place scaling
('numerical', QuantileTransformer(n_quantiles=self.n_QUANTILES, copy=False)) #use in-place scaling
])
)
] )
edge_BL_transformer = Block2GridLine_EdgeTransformer()
edge_LL_transformer = GridLine2GridLine_EdgeTransformer()
self._edge_transformer = TransformerListByType([edge_BB_transformer,
edge_BL_transformer,
edge_BL_transformer, # useless but required
edge_LL_transformer
])
self.tfidfNodeTextVectorizer = None #tdifNodeTextVectorizer
def fitTranformers(self, lGraph,lY=None):
"""
Fit the transformers using the graphs, but TYPE BY TYPE !!!
return True
"""
self._node_transformer[0].fit([nd for g in lGraph for nd in g.getNodeListByType(0)])
self._node_transformer[1].fit([nd for g in lGraph for nd in g.getNodeListByType(1)])
self._edge_transformer[0].fit([e for g in lGraph for e in g.getEdgeListByType(0, 0)])
self._edge_transformer[1].fit([e for g in lGraph for e in g.getEdgeListByType(0, 1)])
#self._edge_transformer[2].fit([e for g in lGraph for e in g.getEdgeListByType(1, 0)])
#self._edge_transformer[3].fit([e for g in lGraph for e in g.getEdgeListByType(1, 1)])
return True
class DU_ABPTableRG(DU_CRF_Task):
"""
We will do a CRF model for a DU task
, with the below labels
"""
sXmlFilenamePattern = "*.mpxml"
iGridVisibility = None
iBlockVisibility = None
#=== CONFIGURATION ====================================================================
@classmethod
def getConfiguredGraphClass(cls):
"""
In this class method, we must return a configured graph class
"""
# Textline labels
# Begin Inside End Single Other
lLabels_BIESO = ['B', 'I', 'E', 'S', 'O']
# Grid lines:
# Border Ignore Separator Outside
lLabels_BISO_Grid = ['B', 'I', 'S', 'O']
#DEFINING THE CLASS OF GRAPH WE USE
DU_GRAPH = GraphGrid_H
if cls.iGridVisibility is None:
traceln(" - grid2grid visibility is %d" % DU_GRAPH.iGridVisibility)
else:
traceln(" - set grid2grid visibility to %d" % cls.iGridVisibility)
DU_GRAPH.iGridVisibility = cls.iGridVisibility
if cls.iBlockVisibility is None:
traceln(" - block2grid visibility is %d" % DU_GRAPH.iBlockVisibility)
else:
traceln(" - set block2grid visibility to %d" % cls.iBlockVisibility)
DU_GRAPH.iBlockVisibility = cls.iBlockVisibility
# ROW
ntR = NodeType_PageXml_type_woText("row"
, lLabels_BIESO
, None
, False
, BBoxDeltaFun=lambda v: max(v * 0.066, min(5, v/3)) #we reduce overlap in this way
)
ntR.setLabelAttribute("DU_row")
ntR.setXpathExpr( (".//pc:TextLine" #how to find the nodes
, "./pc:TextEquiv") #how to get their text
)
DU_GRAPH.addNodeType(ntR)
# HEADER
ntGH = NodeType_PageXml_type_woText("gh"
, lLabels_BISO_Grid
, None
, False
, None # equiv. to: BBoxDeltaFun=lambda _: 0
)
ntGH.setLabelAttribute("type")
ntGH.setXpathExpr( ('.//pc:GridSeparator[@orient="0"]' #how to find the nodes
, "./pc:TextEquiv") #how to get their text
)
DU_GRAPH.addNodeType(ntGH)
DU_GRAPH.setClassicNodeTypeList( [ntR ])
return DU_GRAPH
def __init__(self, sModelName, sModelDir,
iGridVisibility = None,
iBlockVisibility = None,
sComment=None,
C=None, tol=None, njobs=None, max_iter=None,
inference_cache=None):
DU_ABPTableRG.iGridVisibility = iGridVisibility
DU_ABPTableRG.iBlockVisibility = iBlockVisibility
DU_CRF_Task.__init__(self
, sModelName, sModelDir
, dFeatureConfig = {'row_row':{}, 'row_gh':{},
'gh_row':{}, 'gh_gh':{},
'gh':{}, 'row':{}}
, dLearnerConfig = {
'C' : .1 if C is None else C
, 'njobs' : 4 if njobs is None else njobs
, 'inference_cache' : 50 if inference_cache is None else inference_cache
#, 'tol' : .1
, 'tol' : .05 if tol is None else tol
, 'save_every' : 50 #save every 50 iterations,for warm start
, 'max_iter' : 10 if max_iter is None else max_iter
}
, sComment=sComment
#,cFeatureDefinition=FeatureDefinition_PageXml_StandardOnes_noText
,cFeatureDefinition=My_FeatureDefinition
)
# if options.bBaseline:
# self.bsln_mdl = self.addBaseline_LogisticRegression() #use a LR model trained by GridSearch as baseline
#=== END OF CONFIGURATION =============================================================
# def predict(self, lsColDir):
# """
# Return the list of produced files
# """
# self.sXmlFilenamePattern = "*.mpxml"
# return DU_CRF_Task.predict(self, lsColDir)
#
# def runForExternalMLMethod(self, lsColDir, storeX, applyY, bRevertEdges=False):
# """
# Return the list of produced files
# """
# self.sXmlFilenamePattern = "*.mpxml"
# return DU_CRF_Task.runForExternalMLMethod(self, lsColDir, storeX, applyY, bRevertEdges)
# ----------------------------------------------------------------------------
def main(sModelDir, sModelName, options):
doer = DU_ABPTableRG(sModelName, sModelDir,
iGridVisibility = options.iGridVisibility,
iBlockVisibility = options.iBlockVisibility,
C = options.crf_C,
tol = options.crf_tol,
njobs = options.crf_njobs,
max_iter = options.max_iter,
inference_cache = options.crf_inference_cache)
if options.rm:
doer.rm()
return
lTrn, lTst, lRun, lFold = [_checkFindColDir(lsDir, bAbsolute=False) for lsDir in [options.lTrn, options.lTst, options.lRun, options.lFold]]
# if options.bAnnotate:
# doer.annotateDocument(lTrn)
# traceln('annotation done')
# sys.exit(0)
traceln("- classes: ", doer.getGraphClass().getLabelNameList())
## use. a_mpxml files
#doer.sXmlFilenamePattern = doer.sLabeledXmlFilenamePattern
if options.iFoldInitNum or options.iFoldRunNum or options.bFoldFinish:
if options.iFoldInitNum:
"""
initialization of a cross-validation
"""
splitter, ts_trn, lFilename_trn = doer._nfold_Init(lFold, options.iFoldInitNum, test_size=0.25, random_state=None, bStoreOnDisk=True)
elif options.iFoldRunNum:
"""
Run one fold
"""
oReport = doer._nfold_RunFoldFromDisk(options.iFoldRunNum, options.warm, options.pkl)
traceln(oReport)
elif options.bFoldFinish:
tstReport = doer._nfold_Finish()
traceln(tstReport)
else:
assert False, "Internal error"
#no more processing!!
exit(0)
#-------------------
if lFold:
loTstRpt = doer.nfold_Eval(lFold, 3, .25, None, options.pkl)
import graph.GraphModel
sReportPickleFilename = os.path.join(sModelDir, sModelName + "__report.txt")
traceln("Results are in %s"%sReportPickleFilename)
graph.GraphModel.GraphModel.gzip_cPickle_dump(sReportPickleFilename, loTstRpt)
elif lTrn:
doer.train_save_test(lTrn, lTst, options.warm, options.pkl)
try: traceln("Baseline best estimator: %s"%doer.bsln_mdl.best_params_) #for GridSearch
except: pass
traceln(" --- CRF Model ---")
traceln(doer.getModel().getModelInfo())
elif lTst:
doer.load()
tstReport = doer.test(lTst)
traceln(tstReport)
if options.bDetailedReport:
traceln(tstReport.getDetailledReport())
sReportPickleFilename = os.path.join(sModelDir, sModelName + "__detailled_report.txt")
graph.GraphModel.GraphModel.gzip_cPickle_dump(sReportPickleFilename, tstReport)
if lRun:
if options.storeX or options.applyY:
try: doer.load()
except: pass #we only need the transformer
lsOutputFilename = doer.runForExternalMLMethod(lRun, options.storeX, options.applyY, options.bRevertEdges)
else:
doer.load()
lsOutputFilename = doer.predict(lRun)
traceln("Done, see in:\n %s"%lsOutputFilename)
# ----------------------------------------------------------------------------
if __name__ == "__main__":
version = "v.01"
usage, description, parser = DU_CRF_Task.getBasicTrnTstRunOptionParser(sys.argv[0], version)
# parser.add_option("--annotate", dest='bAnnotate', action="store_true",default=False, help="Annotate the textlines with BIES labels")
#FOR GCN
parser.add_option("--revertEdges", dest='bRevertEdges', action="store_true", help="Revert the direction of the edges")
parser.add_option("--detail", dest='bDetailedReport', action="store_true", default=False,help="Display detailled reporting (score per document)")
parser.add_option("--baseline", dest='bBaseline', action="store_true", default=False, help="report baseline method")
parser.add_option("--line_see_line", dest='iGridVisibility', action="store", type=int, default=2, help="seeline2line: how many next grid lines does one line see?")
parser.add_option("--block_see_line", dest='iBlockVisibility', action="store", type=int, default=2, help="seeblock2line: how many next grid lines does one block see?")
# ---
#parse the command line
(options, args) = parser.parse_args()
# ---
try:
sModelDir, sModelName = args
except Exception as e:
traceln("Specify a model folder and a model name!")
_exit(usage, 1, e)
main(sModelDir, sModelName, options) | bsd-3-clause | -9,006,351,465,621,018,000 | 42.269175 | 173 | 0.530638 | false |
klausman/scion | python/sibra_server/main.py | 1 | 10677 | # Copyright 2016 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`main` --- SIBRA service daemon
=====================================
"""
# Stdlib
import logging
import threading
import time
from queue import Queue
# SCION
from lib.crypto.asymcrypto import get_sig_key
from lib.packet.ext_util import find_ext_hdr
from lib.packet.path_mgmt.seg_recs import PathRecordsReg
from lib.path_db import DBResult, PathSegmentDB
from lib.sibra.ext.steady import SibraExtSteady
from lib.sibra.state.state import SibraState
from lib.sibra.util import BWSnapshot
from lib.thread import thread_safety_net
from lib.types import (
ExtensionClass,
PathMgmtType as PMT,
PathSegmentType as PST,
PayloadClass,
ServiceType,
)
from lib.util import (
SCIONTime,
hex_str,
sleep_interval,
)
from lib.zk.id import ZkID
from lib.zk.zk import Zookeeper
from scion_elem.scion_elem import SCIONElement
from sibra_server.steady import (
SteadyPath,
SteadyPathErrorNoReservation,
)
from sibra_server.util import find_last_ifid
# How long to wait for path propagation before setting up steady paths over
# core links
STARTUP_WAIT = 30
class SibraServerBase(SCIONElement):
"""
Base class for the SIBRA service, which is responsible for managing steady
paths on all interfaces in the local AS.
"""
SERVICE_TYPE = ServiceType.SIBRA
PST_TYPE = None
def __init__(self, server_id, conf_dir, prom_export=None):
"""
:param str server_id: server identifier.
:param str conf_dir: configuration directory.
:param str prom_export: prometheus export address.
"""
super().__init__(server_id, conf_dir, prom_export=prom_export)
self.sendq = Queue()
self.signing_key = get_sig_key(self.conf_dir)
self.segments = PathSegmentDB(max_res_no=1)
# Maps of {ISD-AS: {steady path id: steady path}} for all incoming
# (srcs) and outgoing (dests) steady paths:
self.srcs = {}
self.dests = {}
# Map of SibraState objects by interface ID
self.link_states = {}
# Map of link types by interface ID
self.link_types = {}
self.lock = threading.Lock()
self.CTRL_PLD_CLASS_MAP = {
PayloadClass.PATH: {PMT.REG: self.handle_path_reg},
PayloadClass.SIBRA: {PayloadClass.SIBRA: self.handle_sibra_pkt},
}
self._find_links()
zkid = ZkID.from_values(self.addr.isd_as, self.id,
[(self.addr.host, self._port)]).pack()
self.zk = Zookeeper(self.addr.isd_as, self.SERVICE_TYPE, zkid,
self.topology.zookeepers)
self.zk.retry("Joining party", self.zk.party_setup)
def _find_links(self):
for br in self.topology.border_routers:
for ifid, intf in br.interfaces.items():
self.link_states[ifid] = SibraState(
intf.bandwidth, self.addr.isd_as)
self.link_types[ifid] = intf.link_type
def run(self):
threading.Thread(
target=thread_safety_net, args=(self.worker,),
name="SB.worker", daemon=True).start()
threading.Thread(
target=thread_safety_net, args=(self.sender,),
name="SB.sender", daemon=True).start()
super().run()
def worker(self):
# Cycle time should be << SIBRA_TICK, as it determines how often
# reservations are potentially renewed, and the expiration of old
# reservation blocks.
worker_cycle = 1.0
start = SCIONTime.get_time()
while self.run_flag.is_set():
sleep_interval(start, worker_cycle, "SB.worker cycle")
start = SCIONTime.get_time()
with self.lock:
self.manage_steady_paths()
def sender(self):
"""
Handle sending packets on behalf of Link/SteadyPath objects through the
local socket.
"""
while self.run_flag.is_set():
spkt = self.sendq.get()
dst, port = self.get_first_hop(spkt)
if not dst:
logging.error("Unable to determine first hop for packet:\n%s",
spkt)
continue
spkt.addrs.src.host = self.addr.host
logging.debug("Dst: %s Port: %s\n%s", dst, port, spkt)
self.send(spkt, dst, port)
def handle_path_reg(self, cpld, meta):
"""
Handle path registration packets from the local beacon service. First
determine which interface the segments use, then pass the segment to the
appropriate Link.
"""
pmgt = cpld.union
payload = pmgt.union
assert isinstance(payload, PathRecordsReg), type(payload)
meta.close()
name = PST.to_str(self.PST_TYPE)
with self.lock:
for type_, pcb in payload.iter_pcbs():
if type_ == self.PST_TYPE:
self._add_segment(pcb, name)
def _add_segment(self, pcb, name):
res = self.segments.update(pcb)
if res == DBResult.ENTRY_ADDED:
logging.info("%s Segment added: %s", name, pcb.short_desc())
elif res == DBResult.ENTRY_UPDATED:
logging.debug("%s Segment updated: %s", name, pcb.short_desc())
isd_as = pcb.first_ia()
if isd_as not in self.dests:
logging.debug("Found new destination ISD-AS: %s", isd_as)
self.dests[isd_as] = {}
for steady in self.dests[isd_as].values():
steady.update_seg(pcb)
def handle_sibra_pkt(self, pkt):
"""
Handle SIBRA packets. First determine which interface they came from,
then pass them to the appropriate Link.
"""
ext = find_ext_hdr(pkt, ExtensionClass.HOP_BY_HOP,
SibraExtSteady.EXT_TYPE)
if not ext:
logging.error("Packet contains no SIBRA extension header")
return
if not ext.steady:
logging.error("Received non-steady SIBRA packet:\n%s", pkt)
return
if not ext.req_block:
logging.error("Received non-request SIBRA packet:\n%s", pkt)
return
with self.lock:
if ext.fwd:
self._process_req(pkt, ext)
else:
self._process_reply(pkt, ext)
def _process_req(self, pkt, ext):
"""Process a steady path request."""
path_id = ext.path_ids[0]
self.srcs.setdefault(ext.src_ia, {})
if ext.setup and path_id in self.srcs[ext.src_ia]:
logging.error("Setup request for existing path id: %s\n%s",
hex_str(path_id), pkt)
return
elif not ext.setup and path_id not in self.srcs[ext.src_ia]:
logging.error("Renewal request for non-existant path id: %s\n%s",
hex_str(path_id), pkt)
return
ifid = find_last_ifid(pkt, ext)
if ifid not in self.link_states:
logging.error("Packet came from unknown interface '%s':\n%s",
ifid, pkt)
return
if not ext.accepted:
# Request was already rejected, so just send the packet back.
pkt.reverse()
self.sendq.put(pkt)
return
state = self.link_states[ifid]
req_info = ext.req_block.info
bwsnap = req_info.bw.to_snap()
bwhint = state.add_steady(path_id, req_info.index, bwsnap,
req_info.exp_tick, True, ext.setup)
if bwhint is not None:
# This shouldn't happen - if the local BR accepted the reservation,
# then there should be enough bandwidth available for it. This means
# our state is out of sync.
logging.critical("Requested: %s Available bandwidth: %s\n%s",
bwsnap, bwhint, pkt)
return
self.srcs[ext.src_ia][path_id] = None
# All is good, return the packet to the requestor.
pkt.reverse()
self.sendq.put(pkt)
def _process_reply(self, pkt, ext):
"""Process a reply to a steady path request."""
path_id = ext.path_ids[0]
dest = pkt.addrs.src.isd_as
steady = self.dests[dest].get(path_id, None)
if not steady:
logging.error("Unknown path ID: %s:\n%s",
hex_str(path_id), pkt)
return
steady.process_reply(pkt, ext)
def manage_steady_paths(self):
"""Create or renew steady paths to all destinations."""
now = time.time()
for isd_as, steadies in self.dests.items():
if not steadies and (now - self._startup >= STARTUP_WAIT):
self._steady_add(isd_as)
continue
for id_, steady in list(steadies.items()):
try:
steady.renew()
except SteadyPathErrorNoReservation:
del steadies[id_]
def _steady_add(self, isd_as):
seg = self._pick_seg(isd_as)
if not seg:
del self.dests[isd_as]
return
ifid = seg.last_hof().ingress_if
link_state = self.link_states[ifid]
link_type = self.link_types[ifid]
# FIXME(kormat): un-hardcode these bandwidths
bwsnap = BWSnapshot(500 * 1024, 500 * 1024)
steady = SteadyPath(self.addr, self._port, self.sendq, self.signing_key,
link_type, link_state, seg, bwsnap)
self.dests[isd_as][steady.id] = steady
logging.debug("Setting up steady path %s -> %s over %s",
self.addr.isd_as, isd_as, seg.short_desc())
steady.setup()
def _pick_seg(self, isd_as):
"""Select the segment to use for a steady path."""
# FIXME(kormat): this needs actual logic
# For now, we use the shortest path
segs = self.segments(first_ia=isd_as)
if segs:
return segs[0]
if not self._quiet_startup():
logging.warning("No segments to %s", isd_as)
| apache-2.0 | 2,730,924,266,326,352,400 | 36.996441 | 80 | 0.583778 | false |
lcdb/lcdblib | tests/test_stats.py | 1 | 2338 | from textwrap import dedent
import numpy as np
from lcdblib.stats import fisher
table = [12, 5, 29, 2]
def fix(s):
# Remove common leading whitespace and blank lines at top or bottom
ss = [i.rstrip() for i in s.splitlines(False)]
if len(ss[0]) == 0:
ss = ss[1:]
if len(ss[-1]) == 0:
ss = ss[:-1]
return dedent("\n".join(ss))
def test_2x2():
s = fisher.print_2x2_table(
table,
row_labels=['Selected', 'Not selected'],
col_labels=['Having the property', 'Not having the property']
)
assert s == fix("""
============ =================== ======================= =====
Having the property Not having the property total
============ =================== ======================= =====
Selected 12 5 17
Not selected 29 2 31
total 41 7 48
============ =================== ======================= =====
""")
def test_table_from_bool():
ind1 = np.ones(sum(table)) == 0
ind2 = np.ones(sum(table)) == 0
ind1[:table[0]] = True
ind2[:table[0]] = True
ind1[table[0]:table[0] + table[1]] = True
ind2[table[0] + table[1]:table[0] + table[1] + table[2]] = True
assert fisher.table_from_bool(ind1, ind2) == table
def test_fisher_tables():
s = fisher.fisher_tables(table, row_names=['has property', 'does not'],
col_names=['selected', 'not'], title='testing')
assert s == fix("""
testing
-------
============ ======== === =====
selected not total
============ ======== === =====
has property 12 5 17
does not 29 2 31
total 41 7 48
============ ======== === =====
============ ======== ==== =====
selected not total
============ ======== ==== =====
has property 0.71 0.29 1.00
does not 0.94 0.06 1.00
============ ======== ==== =====
============ ======== ====
selected not
============ ======== ====
has property 0.29 0.71
does not 0.71 0.29
total 1.00 1.00
============ ======== ====
odds ratio: 0.165517
2-sided pval: 0.0802686
""")
| mit | -9,026,949,973,392,746,000 | 29.763158 | 75 | 0.39136 | false |
legacysurvey/pipeline | bin/dr4/dr4-bootes-status.py | 2 | 1187 | from __future__ import print_function
import argparse
import os
import numpy as np
from astrometry.util.fits import fits_table, merge_tables
parser = argparse.ArgumentParser(description='Generate a legacypipe-compatible CCDs file from a set of reduced imaging.')
parser.add_argument('--camera', type=str, default='90prime')
parser.add_argument('--get_bricks_notdone', action='store_true', default=False)
args = parser.parse_args()
if args.get_bricks_notdone:
b=fits_table(os.path.join(os.environ['LEGACY_SURVEY_DIR'],'survey-bricks-%s.fits.gz' % args.camera))
don=np.loadtxt('bricks_done_%s.tmp' % args.camera,dtype=str)
fout= 'bricks_notdone_%s.tmp' % args.camera
if os.path.exists(fout):
os.remove(fout)
# Bricks not finished
with open(fout,'w') as fil:
for brick in list( set(b.brickname).difference( set(don) ) ):
fil.write('%s\n' % brick)
print('Wrote %s' % fout)
# All Bricks
fout= 'bricks_all_%s.tmp' % args.camera
if os.path.exists(fout):
exit()
with open(fout,'w') as fil:
for brick in b.brickname:
fil.write('%s\n' % brick)
print('Wrote %s' % fout)
| gpl-2.0 | -3,081,659,299,685,204,500 | 32.914286 | 122 | 0.649537 | false |
stgraber/snapcraft | snapcraft/tests/test_tour.py | 11 | 3414 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
from tempfile import TemporaryDirectory, NamedTemporaryFile
from unittest import mock
import snapcraft.main
from snapcraft.tests import TestCase
class TestExamplesCmd(TestCase):
def test_call_scaffold_with_parameter(self):
sys.argv = ['/usr/bin/snapcraft', 'tour', '/tmp/foo']
with mock.patch('snapcraft.main._scaffold_examples') as mock_cmd:
snapcraft.main.main()
mock_cmd.assert_called_once_with("/tmp/foo")
def test_call_scaffold_without_parameter(self):
sys.argv = ['/usr/bin/snapcraft', 'tour']
with mock.patch('snapcraft.main._scaffold_examples') as mock_cmd:
snapcraft.main.main()
mock_cmd.assert_called_once_with(
snapcraft.main._SNAPCRAFT_TOUR_DIR)
class TestScaffoldExample(TestCase):
@mock.patch('snapcraft.main.get_tourdir')
def test_copy_example_unexisting_path(self, mock_get_tourdir):
mock_get_tourdir.return_value = \
os.path.join(os.path.dirname(__file__), '..', '..', 'tour')
dest_path = os.path.join(self.path, 'foo')
snapcraft.main._scaffold_examples(dest_path)
self.assertTrue(os.path.isdir(dest_path), "dest path exists")
@mock.patch('snapcraft.main.get_tourdir')
def test_copy_example_existing_path(self, mock_get_tourdir):
mock_get_tourdir.return_value = \
os.path.join(os.path.dirname(__file__), '..', '..', 'tour')
# we create a path which isn't cwd
with TemporaryDirectory() as temp_dir:
snapcraft.main._scaffold_examples(temp_dir)
# we install in a subdirectory
dest_path = os.path.join(temp_dir, "snapcraft-tour")
self.assertTrue(os.path.isdir(dest_path), "dest path exists: {}")
@mock.patch('snapcraft.main.get_tourdir')
def test_copy_example_existing_default_path(self, mock_get_tourdir):
mock_get_tourdir.return_value = \
os.path.join(os.path.dirname(__file__), '..', '..', 'tour')
# we create the default dir name in cwd
default_dir = snapcraft.main._SNAPCRAFT_TOUR_DIR
os.makedirs(default_dir)
self.assertRaises(FileExistsError,
snapcraft.main._scaffold_examples,
default_dir)
@mock.patch('snapcraft.main.get_tourdir')
def test_copy_example_on_existing_file(self, mock_get_tourdir):
mock_get_tourdir.return_value = \
os.path.join(os.path.dirname(__file__), '..', '..', 'tour')
with NamedTemporaryFile() as temp_file:
self.assertRaises(NotADirectoryError,
snapcraft.main._scaffold_examples,
temp_file.name)
| gpl-3.0 | -5,176,485,900,614,102,000 | 37.795455 | 77 | 0.64382 | false |
wangmingjob/OnlineJudge | contest/migrations/0001_initial.py | 6 | 3312 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('group', '0004_merge'),
]
operations = [
migrations.CreateModel(
name='Contest',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(unique=True, max_length=40)),
('description', models.TextField()),
('mode', models.IntegerField()),
('show_rank', models.BooleanField()),
('show_user_submission', models.BooleanField()),
('password', models.CharField(max_length=30, null=True, blank=True)),
('contest_type', models.IntegerField()),
('start_time', models.DateTimeField()),
('end_time', models.DateTimeField()),
('create_time', models.DateTimeField(auto_now_add=True)),
('last_updated_time', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
('groups', models.ManyToManyField(to='group.Group')),
],
options={
'db_table': 'contest',
},
),
migrations.CreateModel(
name='ContestProblem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50)),
('description', models.TextField()),
('input_description', models.CharField(max_length=10000)),
('output_description', models.CharField(max_length=10000)),
('samples', models.TextField(blank=True)),
('test_case_id', models.CharField(max_length=40)),
('hint', models.TextField(null=True, blank=True)),
('create_time', models.DateTimeField(auto_now_add=True)),
('time_limit', models.IntegerField()),
('memory_limit', models.IntegerField()),
('visible', models.BooleanField(default=True)),
('total_submit_number', models.IntegerField(default=0)),
('total_accepted_number', models.IntegerField(default=0)),
('sort_index', models.CharField(max_length=30)),
('contest', models.ForeignKey(to='contest.Contest')),
('created_by', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'contest_problem',
},
),
migrations.CreateModel(
name='ContestProblemTestCase',
fields=[
('id', models.CharField(max_length=40, serialize=False, primary_key=True, db_index=True)),
('score', models.IntegerField()),
('problem', models.ForeignKey(to='contest.ContestProblem')),
],
options={
'db_table': 'contest_problem_test_case',
},
),
]
| mit | 4,021,260,478,137,278,000 | 43.756757 | 114 | 0.537742 | false |
erykoff/redmapper | redmapper/depth_fitting.py | 1 | 11700 | """Classes and routines for simple fits to galaxy catalog depth.
"""
import fitsio
import numpy as np
import esutil
import scipy.optimize
class DepthFunction(object):
"""
Class to implement function for fitting depth.
"""
def __init__(self,mag,magErr,zp,nSig):
"""
Instantiate DepthFunction class.
Parameters
----------
mag: `np.array`
Float array of magnitudes
magErr: `np.array`
Float array of magnitude errors
zp: `float`
Reference zeropoint
nSig: `float`
Number of sigma to compute depth limit
"""
self.const = 2.5/np.log(10.0)
self.mag = mag
self.magErr = magErr
self.zp = zp
self.nSig = nSig
self.max_p1 = 1e10
def __call__(self, x):
"""
Compute total cost function for f(x)
Parameters
----------
x: `np.array`, length 2
Float array of fit parameters
Returns
-------
t: `float`
Total cost function at parameters x
"""
if ((x[1] < 0.0) or
(x[1] > self.max_p1)):
return 1e10
f1lim = 10.**((x[0] - self.zp)/(-2.5))
fsky1 = ((f1lim**2. * x[1])/(self.nSig**2.) - f1lim)
if (fsky1 < 0.0):
return 1e10
tflux = x[1]*10.**((self.mag - self.zp)/(-2.5))
err = np.sqrt(fsky1*x[1] + tflux)
# apply the constant here, not to the magErr, which was dumb
t=np.sum(np.abs(self.const*(err/tflux) - self.magErr))
if not np.isfinite(t):
t=1e10
return t
# FIXME: want to be able to reuse code from utilities!
def applyErrorModel(pars, magIn, noNoise=False, lnscat=None):
"""
Apply error model to set of magnitudes
Parameters
----------
pars: `np.ndarray`
Error parameter structure
magIn: `np.array`
Float array with input magnitudes
noNoise: `bool`, optional
Do not apply noise? Default is False
lnscat: `float`, optional
Additional log-scatter. Default is None.
Returns
-------
mag: `np.array`
Float array of magnitudes
magErr: `np.array`
Float array of magnitude errors
"""
tFlux = pars['EXPTIME'][0]*10.**((magIn - pars['ZP'][0])/(-2.5))
noise = np.sqrt(pars['FSKY1'][0]*pars['EXPTIME'][0] + tFlux)
if lnscat is not None:
noise = np.exp(np.log(noise) + lnscat * np.random.normal(size=noise.size))
if (noNoise):
flux = tFlux
else:
flux = tFlux + noise*np.random.normal(magIn.size)
# Straight magnitudes
mag = pars['ZP'][0] - 2.5*np.log10(flux/pars['EXPTIME'][0])
magErr = (2.5/np.log(10.)) * (noise/flux)
return mag, magErr
def calcErrorModel(_mag, _magErr, nSig=10.0, doPlot=False, nTrial=100, calcErr=False,
useBoot=False, snCut=5.0, zp=22.5, oldIDL=False):
"""
Caluclate the error model for a given list of magnitudes and errors
Parameters
----------
_mag: `np.array`
Float array of input magnitudes
_magErr: `np.array`
Float array of input magnitude errors
nSig: `float`, optional
Number of sigma to compute maglim. Default is 10.0
doPlot: `bool`, optional
Plot results. Default is False.
nTrial: `int`, optional
Number of trials for bootstrap errors. Default is 100.
calcErr: `bool`, optional
Calculate parameter errors? Default is False.
useBoot: `bool`, optional
Use bootstrap error estimation? Default is False.
snCut: `float`, optional
Minimum signal/noise to use in the fit. Default is 5.0
zp: `float`, optional
Default reference zeropoint. Default is 22.5.
oldIDL: `bool`, optional
Use older (worse) IDL compatibility mode. Default is False.
Returns
-------
pars: `np.ndarray`
Error model parameters
val: `int`
0 for success. Alyways 0.
fig: `matplotlib.Figure`, if doPlot is True
ax: `matplotlib.Axis`, if doPlot is True
"""
const = 2.5/np.log(10.)
# first need to filter out really bad ones
ok,=np.where((np.isfinite(_mag)) &
(np.isfinite(_magErr)) &
(_magErr > 0.0))
mag=_mag[ok]
magErr=_magErr[ok]
if oldIDL:
# old IDL version...
st=np.argsort(mag)
gd,=np.where(mag < mag[st[np.int32(0.98*mag.size)]])
else:
# new python
st=np.argsort(mag)
gd,=np.where((mag < mag[st[np.int32(0.98*mag.size)]]) &
(magErr < const / snCut))
if (gd.size == 0):
if (doPlot):
return (-1,1,None,None)
else:
return (-1,1)
# extra const here?
dFunc = DepthFunction(mag[gd], magErr[gd], zp, nSig)
# get the reference limiting mag
test,=np.where((magErr[gd] > const/nSig) &
(magErr[gd] < 1.1*const/nSig))
if (test.size >= 3):
limmagStart = np.median(mag[gd[test]])
else:
# I don't like this alternative
st=np.argsort(mag[gd])
limmagStart = mag[gd[st[np.int32(0.95*st.size)]]]
# figure out where to start the effective exposure time
# go from 1.0 to 10000, logarithmically...
# note that this range (from IDL code) works for zp=22.5.
# For other zps, need to rescale here
expRange=np.array([1.0,10000.])/(10.**((zp - 22.5)/2.5))
nSteps=20
binSize=(np.log(expRange[1])-np.log(expRange[0]))/(nSteps-1)
expTimes=np.exp(np.arange(nSteps)*binSize)*expRange[0]
tTest=np.zeros(nSteps)
for i,expTime in enumerate(expTimes):
# call a function...
dFunc.max_p1 = expTime*2.
tTest[i] = dFunc([limmagStart, expTime])
ind = np.argmin(tTest)
p0=np.array([limmagStart, expTimes[ind]])
# try single fit
dFunc.max_p1 = 10.0*p0[1]
ret = scipy.optimize.fmin(dFunc, p0,disp=False, full_output=True,retall=False)
# check for convergence here...
if (ret[-1] > 0):
# could not converge
if (doPlot):
return (-1,1,None,None)
else:
return (-1,1)
p = ret[0]
pars=np.zeros(1,dtype=[('EXPTIME','f4'),
('ZP','f4'),
('LIMMAG','f4'),
('NSIG','f4'),
('FLUX1_LIM','f4'),
('FSKY1','f4'),
('LIMMAG_ERR','f4'),
('EXPTIME_ERR','f4'),
('FRAC_OUT','f4')])
pars['EXPTIME'] = p[1]
pars['ZP'] = dFunc.zp
pars['LIMMAG'] = p[0]
pars['NSIG'] = dFunc.nSig
pars['FLUX1_LIM'] = 10.**((p[0] - dFunc.zp)/(-2.5))
pars['FSKY1'] = (pars['FLUX1_LIM'][0]**2.*p[1])/(dFunc.nSig**2.) - pars['FLUX1_LIM'][0]
# compute frac_out, the fraction of outliers
testMag, testMagErr = applyErrorModel(pars, dFunc.mag, noNoise=True)
out,=np.where(np.abs(testMagErr - dFunc.magErr) > 0.005)
pars['FRAC_OUT'] = np.float64(out.size)/np.float64(gd.size)
if (calcErr):
limMags=np.zeros(nTrial,dtype=np.float32)
expTimes=np.zeros_like(limMags)
p0=p.copy()
for i in range(nTrial):
r=np.int32(np.random.random(gd.size)*gd.size)
dFunc.mag = mag[gd[r]]
dFunc.magErr = magErr[gd[r]]
ret = scipy.optimize.fmin(dFunc, p0, disp=False, full_output=True,retall=False)
if (ret[4] > 0) :
p = p0
else:
p = ret[0]
limMags[i] = p[0]
expTimes[i] = p[1]
# use IQD for errors
st=np.argsort(limMags)
pars['LIMMAG_ERR'] = (limMags[st[np.int32(0.75*nTrial)]] - limMags[st[np.int32(0.25*nTrial)]])/1.35
st=np.argsort(expTimes)
pars['EXPTIME_ERR'] = (expTimes[st[np.int32(0.75*nTrial)]] - expTimes[st[np.int32(0.25*nTrial)]])/1.35
if (useBoot):
pars['LIMMAG'] = np.median(limMags)
pars['EXPTIME'] = np.median(expTimes)
if (doPlot):
fig=plt.figure(1)
fig.clf()
ax=fig.add_subplot(111)
st=np.argsort(testMag)
if (not calcErr):
ax.plot(testMag[st], testMagErr[st], 'k-')
else:
testPars = pars.copy()
alphaColor = np.zeros(4)
alphaColor[0:3] = 0.5
alphaColor[3] = 0.5
for i in range(nTrial):
testPars['LIMMAG'] = limMags[i]
testPars['EXPTIME'] = expTimes[i]
testPars['FLUX1_LIM'] = 10.**((limMags[i] - dFunc.zp)/(-2.5))
testPars['FSKY1'] = (testPars['FLUX1_LIM'][0]**2.*expTimes[i])/(dFunc.nSig**2.) - testPars['FLUX1_LIM'][0]
mTest, mErrTest = applyErrorModel(testPars, testMag[st], noNoise=True)
ax.plot(mTest, mErrTest, '-',color=alphaColor)
ax.plot(testMag[st],testMagErr[st],'c--')
ax.plot(mag[gd], magErr[gd],'r.')
xlim = ax.get_xlim()
ylim = ax.get_ylim()
ax.plot([pars['LIMMAG'][0],pars['LIMMAG'][0]],[0,1],'k--')
ax.plot([0,100],[1.086/nSig,1.086/nSig],'k--')
ax.set_xlim(xlim)
ax.set_ylim(ylim)
return (pars,0,fig,ax)
else:
# success
return (pars,0)
class DepthLim(object):
"""
Class to compute depth limits from data, if external map is not available.
This class is used to compute depth in realtime from data.
"""
def __init__(self, mag, mag_err, max_gals=100000):
"""
Instantiate DepthLim object.
Upon initialization this will compute default global parameters that
will be used if a fit cannot be performed.
Parameters
----------
mag: `np.array`
Float array of magnitudes from a large region of sky
mag_err: `np.array`
Float array of magnitude errors from a large region of sky
max_gals: `int`
Maximum number of galaxies to sample to get global default fit.
Raises
------
RuntimeError:
If a global fit cannot be performed.
"""
# This gets a global fit, to use as a fallback
if mag.size < max_gals:
use = np.arange(mag.size)
else:
use = np.random.choice(np.arange(mag.size), size=max_gals, replace=False)
self.initpars, fail = calcErrorModel(mag[use], mag_err[use], calcErr=False)
if fail:
raise RuntimeError("Complete failure on getting limiting mag fit")
def calc_maskdepth(self, maskgals, mag, mag_err):
"""
Calculate mask depth empirically for a set of galaxies.
This will modify maskgals.limmag, maskgals.exptime, maskgals.zp,
maskgals.nsig to the fit values (or global if fit cannot be performed).
Parameters
----------
maskgals: `redmapper.Catalog`
maskgals catalog
mag: `np.array`
Float array of local magnitudes
mag_err: `np.array`
Float array of local magnitude errors
"""
limpars, fail = calcErrorModel(mag, mag_err, calcErr=False)
if fail:
maskgals.limmag[:] = self.initpars['LIMMAG']
maskgals.exptime[:] = self.initpars['EXPTIME']
maskgals.zp[0] = self.initpars['ZP']
maskgals.nsig[0] = self.initpars['NSIG']
else:
maskgals.limmag[:] = limpars['LIMMAG']
maskgals.exptime[:] = limpars['EXPTIME']
maskgals.zp[0] = limpars['ZP']
maskgals.nsig[0] = limpars['NSIG']
return
| apache-2.0 | -485,585,583,846,440,260 | 28.620253 | 122 | 0.544872 | false |
jabesq/home-assistant | homeassistant/components/litejet/__init__.py | 7 | 1402 | """Support for the LiteJet lighting system."""
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import discovery
from homeassistant.const import CONF_PORT
_LOGGER = logging.getLogger(__name__)
CONF_EXCLUDE_NAMES = 'exclude_names'
CONF_INCLUDE_SWITCHES = 'include_switches'
DOMAIN = 'litejet'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_PORT): cv.string,
vol.Optional(CONF_EXCLUDE_NAMES): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_INCLUDE_SWITCHES, default=False): cv.boolean,
})
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up the LiteJet component."""
from pylitejet import LiteJet
url = config[DOMAIN].get(CONF_PORT)
hass.data['litejet_system'] = LiteJet(url)
hass.data['litejet_config'] = config[DOMAIN]
discovery.load_platform(hass, 'light', DOMAIN, {}, config)
if config[DOMAIN].get(CONF_INCLUDE_SWITCHES):
discovery.load_platform(hass, 'switch', DOMAIN, {}, config)
discovery.load_platform(hass, 'scene', DOMAIN, {}, config)
return True
def is_ignored(hass, name):
"""Determine if a load, switch, or scene should be ignored."""
for prefix in hass.data['litejet_config'].get(CONF_EXCLUDE_NAMES, []):
if name.startswith(prefix):
return True
return False
| apache-2.0 | 5,228,681,563,367,983,000 | 28.208333 | 79 | 0.687589 | false |
newmediamedicine/indivo_server_1_0 | codingsystems/data/snomed.py | 1 | 1142 | """
SNOMED loading
Ben Adida
2010-08-25
"""
from django.utils import simplejson
from loadutils import create_codingsystem
import os.path
import csv
from codingsystems import models
def load(stream, codingsystem, delimiter='|'):
"""
load data from a file input stream.
"""
csv_reader = csv.reader(stream, delimiter = delimiter)
for row in csv_reader:
try:
snomed_cid, snomed_fsn, snomed_concept_status, umls_cui, occurrence, usage, first_in_subset, is_retired_from_subset, last_in_subset, replaced_by_snomed_cid = row
except ValueError:
continue
models.CodedValue.objects.create(system = codingsystem,
code = snomed_cid, umls_code = umls_cui,
physician_value = snomed_fsn, consumer_value = snomed_fsn)
def create_and_load_from(filepath):
if not os.path.isfile(filepath):
print "Can't load SNOMED, the file does not exist at %s" % filepath
return
codingsystem = create_codingsystem('snomed', 'SNOMED concept codes with UMLS')
load(open(filepath, "r"), codingsystem)
| gpl-3.0 | 1,867,449,296,014,082,000 | 28.282051 | 173 | 0.64711 | false |
shaun-h/PyDoc | Managers/UserContributedManager.py | 1 | 21125 | import requests
import re
import json
import ast
import os
import ui
import threading
import tarfile
import math
import time
import plistlib
import console
import shutil
import sqlite3
import base64
import clipboard
import os
import Image
import io
import copy
import yaml
from Managers import DBManager, TypeManager
from Utilities import LogThread
from distutils.version import LooseVersion
class UserContributed (object):
def __init__(self):
self.__version = ''
self.__globalversion = ''
self.__name = ''
self.__aliases = []
self.__icon = None
self.__id = ''
self.__path = None
self.__status = ''
self.__stats = ''
self.__archive = ''
self.__authorName = ''
self.__onlineid = ''
self.__imageData = ''
self.__hasVersions = False
self.__specificVersions = []
@property
def version(self):
return self.__version
@version.setter
def version(self, version):
self.__version = version
@property
def globalversion(self):
return self.__globalversion
@globalversion.setter
def globalversion(self, globalversion):
self.__globalversion = globalversion
@property
def name(self):
return self.__name
@name.setter
def name(self, name):
self.__name = name
@property
def aliases(self):
return self.__aliases
@aliases.setter
def aliases(self, aliases):
self.__aliases = aliases
@property
def image(self):
return self.__icon
@image.setter
def image(self, icon):
self.__icon = icon
@property
def id(self):
return self.__id
@id.setter
def id(self, id):
self.__id = id
@property
def onlineid(self):
return self.__onlineid
@onlineid.setter
def onlineid(self, id):
self.__onlineid = id
@property
def path(self):
return self.__path
@path.setter
def path(self, path):
self.__path = path
@property
def status(self):
return self.__status
@status.setter
def status(self, status):
self.__status = status
@property
def stats(self):
return self.__stats
@stats.setter
def stats(self, stats):
self.__stats = stats
@property
def archive(self):
return self.__archive
@archive.setter
def archive(self, archive):
self.__archive = archive
@property
def authorName(self):
return self.__authorName
@authorName.setter
def authorName(self, an):
self.__authorName = an
@property
def imageData(self):
return self.__imageData
@imageData.setter
def imageData(self, data):
self.__imageData = data
@property
def hasVersions(self):
return self.__hasVersions
@hasVersions.setter
def hasVersions(self, data):
self.__hasVersions = data
@property
def specificVersions(self):
return self.__specificVersions
@specificVersions.setter
def specificVersions(self, data):
self.__specificVersions = data
class UserContributedManager (object):
def __init__(self, serverManager, iconPath, typeIconPath):
self.typeManager = TypeManager.TypeManager(typeIconPath)
self.serverManager = serverManager
self.iconPath = iconPath
self.typeIconPath = typeIconPath
self.localServer = None
self.jsonServerLocation = 'zzz/user_contributed/build/index.json'
self.downloadServerLocation = 'zzz/user_contributed/build/%@/%$'
self.plistPath = 'Contents/Info.plist'
self.indexPath = 'Contents/Resources/docSet.dsidx'
self.userContributedFolder = 'Docsets/UserContributions'
self.headers = {'User-Agent': 'PyDoc-Pythonista'}
self.usercontributed = None
self.downloading = []
self.updateAvailable = []
self.workThreads = []
self.downloadThreads = []
self.uiUpdateThreads = []
self.lastDocsetGroup = None
self.__createUserContributedFolder()
self.createInitialSearchIndexAllDocsets()
def getAvailableUserContributed(self):
usercontributed = self.__getOnlineUserContributed()
for d in self.__getDownloadedUserContributed():
for c in usercontributed:
if c.name == d.name and c.version == d.version:
c.status = 'installed'
c.path = d.path
c.id = d.id
for d in self.updateAvailable:
for c in usercontributed:
if c.name == d.name:
c.status = "Update Available"
for d in self.__getDownloadingUserContributed():
for c in usercontributed:
if c.name == d.name and c.version == d.version:
c.status = d.status
c.version = d.version
c.hasVersions = d.hasVersions
try:
c.stats = d.stats
except KeyError:
c.stats = 'downloading'
return usercontributed
def __getOnlineUserContributed(self):
if self.usercontributed == None:
self.usercontributed = self.__getUserContributed()
return self.usercontributed
def __getDownloadedUserContributed(self):
dbManager = DBManager.DBManager()
t = dbManager.InstalledDocsetsByType('usercontributed')
ds = []
for d in t:
aa = UserContributed()
aa.name = d[1]
aa.id = d[0]
aa.path = os.path.join(os.path.abspath('.'),d[2])
imgData = str(d[4])
if not imgData == '':
imgdata = base64.standard_b64decode(imgData)
aa.image = ui.Image.from_data(imgdata)
else:
aa.image = self.__getIconWithName('Other')
od = yaml.load(d[6])
if type(od) is type({}):
aa.authorName = od['author']
aa.hasVersions = od['hasVersions']
else:
aa.authorName = od
aa.version = d[5]
ds.append(aa)
return ds
def __getDownloadingUserContributed(self):
return self.downloading
def getDownloadedUserContributed(self):
return self.__getDownloadedUserContributed()
def __getUserContributed(self):
server = self.serverManager.getDownloadServer(self.localServer)
url = server.url
if not url[-1] == '/':
url = url + '/'
url = url + self.jsonServerLocation
data = requests.get(url).text
data = ast.literal_eval(data)
usercontributed = []
defaultIcon = self.__getIconWithName('Other')
for k,d in data['docsets'].items():
u = UserContributed()
u.name = d['name']
if 'aliases' in d.keys():
u.aliases = d['aliases']
u.version = d['version']
u.archive = d['archive']
u.authorName = d['author']['name']
u.hasVersions = 'specific_versions' in d.keys()
if u.hasVersions:
u.specificVersions = d['specific_versions']
if 'icon' in d.keys():
imgdata = base64.standard_b64decode(d['icon'])
u.image = ui.Image.from_data(imgdata)
u.imageData = d['icon']
else:
u.image = defaultIcon
u.onlineid = k
u.status = 'online'
usercontributed.append(u)
return sorted(usercontributed, key=lambda x: x.name.lower())
def checkDocsetsForUpdates(self, docsets):
console.show_activity('Checking for updates...')
self.usercontributed = None
online = self.__getOnlineUserContributed()
for d in docsets:
if not d.hasVersions and d.status == 'installed':
console.show_activity('Checking ' + d.name + ' for update...')
for f in online:
if f.name == d.name:
if LooseVersion(str(d.version).replace('/','')) < LooseVersion(str(f.version).replace('/','')):
d.status = 'Update Available'
d.version = f.version
self.updateAvailable.append(d)
def getOnlineVersions(self, doc= None):
d = None
if doc == None:
d = self.lastDocsetGroup
else:
self.lastDocsetGroup = doc
d = doc
data = [d]
downloaded = self.getDownloadedUserContributed()
endCheck = []
for dad in downloaded:
if dad.name == d.name:
endCheck.append(dad)
toRemoveOrig = [i for i in endCheck if i.name==d.name and i.version == d.version]
for rt in toRemoveOrig:
endCheck.remove(rt)
for version in d.specificVersions:
if not '_comment' in version.keys():
da = copy.copy(d)
da.specificVersions = []
da.status = 'online'
da.version = version['version']
da.archive = version['archive'].replace('\\','')
da.path = None
for down in downloaded:
if da.name == down.name and da.version == down.version:
da.status = 'installed'
da.path = down.path
da.id = down.id
toRemoveFromEndCheck = [i for i in endCheck if i.name==da.name and i.version == da.version]
for rt in toRemoveFromEndCheck:
endCheck.remove(rt)
add = True
for toCheck in data:
if toCheck.name == da.name and toCheck.version == da.version:
add = False
if add:
data.append(da)
for e in endCheck:
e.status = 'installed'
data.append(e)
return sorted(data, key=lambda x: x.version, reverse=True)
def __getLocalIcon(self, path):
imgPath = os.path.join(os.path.abspath('.'),path,'icon.png')
if not os.path.exists(imgPath):
imgPath = os.path.join(os.path.abspath('.'), self.iconPath, 'Other.png')
return ui.Image.named(imgPath)
def __getIconWithName(self, name):
imgPath = os.path.join(os.path.abspath('.'), self.iconPath, name+'.png')
if not os.path.exists(imgPath):
imgPath = os.path.join(os.path.abspath('.'), self.iconPath, 'Other.png')
return ui.Image.named(imgPath)
def __createUserContributedFolder(self):
if not os.path.exists(self.userContributedFolder):
os.mkdir(self.userContributedFolder)
def downloadUserContributed(self, usercontributed, action, refresh_main_view):
if not usercontributed in self.downloading:
removeSoon = []
for d in self.updateAvailable:
if d.name == usercontributed.name:
removeSoon.append(d)
for d in removeSoon:
self.updateAvailable.remove(d)
usercontributed.status = 'downloading'
self.downloading.append(usercontributed)
action()
workThread = LogThread.LogThread(target=self.__determineUrlAndDownload, args=(usercontributed,action,refresh_main_view,))
self.workThreads.append(workThread)
workThread.start()
def __determineUrlAndDownload(self, usercontributed, action, refresh_main_view):
usercontributed.stats = 'getting download link'
action()
downloadLink = self.__getDownloadLink(usercontributed.onlineid, usercontributed.archive)
downloadThread = LogThread.LogThread(target=self.downloadFile, args=(downloadLink,usercontributed,refresh_main_view,))
self.downloadThreads.append(downloadThread)
downloadThread.start()
updateThread = LogThread.LogThread(target=self.updateUi, args=(action,downloadThread,))
self.uiUpdateThreads.append(updateThread)
updateThread.start()
def updateUi(self, action, t):
while t.is_alive():
action()
time.sleep(0.5)
action()
def __getDownloadLink(self, id, archive):
server = self.serverManager.getDownloadServer(self.localServer)
url = server.url
if not url[-1] == '/':
url = url + '/'
url = url + self.downloadServerLocation
url = url.replace('%@', id)
url = url.replace('%$', archive)
return url
def downloadFile(self, url, usercontributed, refresh_main_view):
local_filename = self.__downloadFile(url, usercontributed)
#self.__downloadFile(url+'.tarix', cheatsheet)
usercontributed.status = 'waiting for install'
self.installUserContributed(local_filename, usercontributed, refresh_main_view)
def __downloadFile(self, url, usercontributed):
local_filename = self.userContributedFolder+'/'+str(usercontributed.version).replace('/','_')+url.split('/')[-1]
r = requests.get(url, headers = self.headers, stream=True)
ret = None
if r.status_code == 200:
ret = local_filename
total_length = r.headers.get('content-length')
dl = 0
last = 0
if os.path.exists(local_filename):
os.remove(local_filename)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
dl += len(chunk)
f.write(chunk)
if not total_length == None:
done = 100 * dl / int(total_length)
usercontributed.stats = str(round(done,2)) + '% ' + str(self.convertSize(dl)) + ' / '+ str(self.convertSize(float(total_length)))
else:
usercontributed.stats = str(self.convertSize(dl))
r.close()
return ret
def installUserContributed(self, filename, usercontributed, refresh_main_view):
extract_location = os.path.join(self.userContributedFolder, '_'+usercontributed.name.replace('/','_'), '_'+usercontributed.version.replace('/','_'))
usercontributed.status = 'Preparing to install: This might take a while.'
tar = tarfile.open(filename, 'r:gz')
n = [name for name in tar.getnames() if '/' not in name][0]
m = os.path.join(extract_location, n)
tar.extractall(path=extract_location, members = self.track_progress(tar, usercontributed, len(tar.getmembers())))
tar.close()
encodedImg = usercontributed.imageData
dbManager = DBManager.DBManager()
otherAtt = {}
otherAtt['author'] = usercontributed.authorName
otherAtt['hasVersions'] = usercontributed.hasVersions
dbManager.DocsetInstalled(usercontributed.name, m, 'usercontributed', str(encodedImg), usercontributed.version, str(otherAtt))
os.remove(filename)
if usercontributed in self.downloading:
self.downloading.remove(usercontributed)
self.indexUserContributed(usercontributed, refresh_main_view, m)
def track_progress(self, members, usercontributed, totalFiles):
i = 0
for member in members:
i = i + 1
done = 100 * i / totalFiles
usercontributed.status = 'installing: ' + str(round(done,2)) + '% ' + str(i) + ' / '+ str(totalFiles)
yield member
def indexUserContributed(self, usercontributed, refresh_main_view, path):
usercontributed.status = 'indexing'
indexPath = os.path.join(path, self.indexPath)
conn = sqlite3.connect(indexPath)
sql = 'SELECT count(*) FROM sqlite_master WHERE type = \'table\' AND name = \'searchIndex\''
c = conn.execute(sql)
data = c.fetchone()
if int(data[0]) == 0:
sql = 'CREATE TABLE searchIndex(rowid INTEGER PRIMARY KEY, name TEXT, type TEXT, path TEXT)'
c = conn.execute(sql)
conn.commit()
sql = 'SELECT f.ZPATH, m.ZANCHOR, t.ZTOKENNAME, ty.ZTYPENAME, t.rowid FROM ZTOKEN t, ZTOKENTYPE ty, ZFILEPATH f, ZTOKENMETAINFORMATION m WHERE ty.Z_PK = t.ZTOKENTYPE AND f.Z_PK = m.ZFILE AND m.ZTOKEN = t.Z_PK ORDER BY t.ZTOKENNAME'
c = conn.execute(sql)
data = c.fetchall()
for t in data:
conn.execute("insert into searchIndex values (?, ?, ?, ?)", (t[4], t[2], self.typeManager.getTypeForName(t[3]).name, t[0] ))
conn.commit()
else:
sql = 'SELECT rowid, type FROM searchIndex'
c = conn.execute(sql)
data = c.fetchall()
for t in data:
newType = self.typeManager.getTypeForName(t[1])
if not newType == None and not newType.name == t[1]:
conn.execute("UPDATE searchIndex SET type=(?) WHERE rowid = (?)", (newType.name, t[0] ))
conn.commit()
indexSql = 'CREATE INDEX ix_searchIndex_name ON searchIndex(name)'
conn.execute(indexSql)
conn.close()
self.postProcess(usercontributed, refresh_main_view)
def createInitialSearchIndexAllDocsets(self):
docsets = self.getDownloadedUserContributed()
for d in docsets:
indexPath = os.path.join(d.path, self.indexPath)
conn = sqlite3.connect(indexPath)
conn = sqlite3.connect(indexPath)
indexSql = 'CREATE INDEX IF NOT EXISTS ix_searchIndex_name ON searchIndex(name)'
conn.execute(indexSql)
conn.close()
def postProcess(self, usercontributed, refresh_main_view):
usercontributed.status = 'installed'
refresh_main_view()
def convertSize(self, size):
if (size == 0):
return '0B'
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size,1024)))
p = math.pow(1024,i)
s = round(size/p,2)
return '%s %s' % (s,size_name[i])
def deleteUserContributed(self, usercontributed, post_action, confirm = True):
but = 1
if confirm:
but = console.alert('Are you sure?', 'Would you like to delete the docset, ' + usercontributed.name, 'Ok')
if but == 1:
dbmanager = DBManager.DBManager()
dbmanager.DocsetRemoved(usercontributed.id)
shutil.rmtree(usercontributed.path)
usercontributed.status = 'online'
usercontributed.path = None
if not post_action == None:
post_action()
def getTypesForUserContributed(self, usercontributed):
types = []
path = usercontributed.path
indexPath = os.path.join(path, self.indexPath)
conn = sqlite3.connect(indexPath)
sql = 'SELECT type FROM searchIndex GROUP BY type ORDER BY type COLLATE NOCASE'
c = conn.execute(sql)
data = c.fetchall()
conn.close()
for t in data:
types.append(self.typeManager.getTypeForName(t[0]))
return types
def getIndexesbyTypeAndNameForUserContributed(self, usercontributed, typeName, name):
indexes = []
path = usercontributed.path
indexPath = os.path.join(path, self.indexPath)
conn = sqlite3.connect(indexPath)
sql = 'SELECT type, name, path FROM searchIndex WHERE type = (?) AND name LIKE (?) ORDER BY name COLLATE NOCASE'
c = conn.execute(sql, (typeName, name,))
data = c.fetchall()
conn.close()
dTypes ={}
type = None
for t in data:
if t[0] in dTypes.keys():
type= dTypes[t[0]]
else:
type = self.typeManager.getTypeForName(t[0])
dTypes[t[0]] = type
indexes.append({'type':type, 'name':t[1],'path':t[2]})
return indexes
def getIndexesbyNameForUserContributed(self, usercontributed, name):
indexes = []
path = usercontributed.path
indexPath = os.path.join(path, self.indexPath)
conn = sqlite3.connect(indexPath)
sql = 'SELECT type, name, path FROM searchIndex WHERE name LIKE (?) ORDER BY name COLLATE NOCASE'
c = conn.execute(sql, (name,))
data = c.fetchall()
conn.close()
dTypes ={}
type = None
for t in data:
if t[0] in dTypes.keys():
type= dTypes[t[0]]
else:
type = self.typeManager.getTypeForName(t[0])
dTypes[t[0]] = type
indexes.append({'type':type, 'name':t[1],'path':t[2]})
return indexes
def getIndexesbyTypeForUserContributed(self, usercontributed, type):
indexes = []
path = usercontributed.path
indexPath = os.path.join(path, self.indexPath)
conn = sqlite3.connect(indexPath)
sql = 'SELECT type, name, path FROM searchIndex WHERE type = (?) ORDER BY name COLLATE NOCASE'
c = conn.execute(sql, (type.name,))
data = c.fetchall()
conn.close()
dTypes ={}
type = None
for t in data:
if t[0] in dTypes.keys():
type= dTypes[t[0]]
else:
type = self.typeManager.getTypeForName(t[0])
dTypes[t[0]] = type
indexes.append({'type':type, 'name':t[1],'path':t[2]})
return indexes
def getIndexesForUserContributed(self, usercontributed):
indexes = []
path = usercontributed.path
indexPath = os.path.join(path, self.indexPath)
conn = sqlite3.connect(indexPath)
sql = 'SELECT type, name, path FROM searchIndex ORDER BY name COLLATE NOCASE'
c = conn.execute(sql)
data = c.fetchall()
conn.close()
dTypes ={}
type = None
for t in data:
if t[0] in dTypes.keys():
type= dTypes[t[0]]
else:
type = self.typeManager.getTypeForName(t[0])
dTypes[t[0]] = type
indexes.append({'type':type, 'image':self.__getTypeIconWithName(t[0]), 'name':t[1],'path':t[2]})
return types
def getIndexesbyNameForAllUserContributed(self, name):
if name == None or name == '':
return {}
else:
docsets = self.getDownloadedUserContributed()
indexes = {}
for d in docsets:
ind = self.getIndexesbyNameForDocsetSearch(d, name)
for k in ind:
if not k in indexes.keys():
indexes[k] = []
indexes[k].extend(ind[k])
return indexes
def getIndexesbyNameForDocsetSearch(self, docset, name):
if name == None or name == '':
return []
else:
ind = {}
path = docset.path
indexPath = os.path.join(path, self.indexPath)
conn = sqlite3.connect(indexPath)
sql = 'SELECT type, name, path FROM searchIndex WHERE name LIKE (?) ORDER BY name COLLATE NOCASE'
c = conn.execute(sql, (name, ))
data = {'first' : c.fetchall()}
sql = 'SELECT type, name, path FROM searchIndex WHERE name LIKE (?) AND name NOT LIKE (?) ORDER BY name COLLATE NOCASE'
c = conn.execute(sql, (name.replace(' ','%'), name, ))
data['second'] = c.fetchall()
sql = 'SELECT type, name, path FROM searchIndex WHERE name LIKE (?) AND name NOT LIKE (?) AND name NOT LIKE (?) ORDER BY name COLLATE NOCASE'
c = conn.execute(sql, (name.replace(' ','%')+'%', name.replace(' ','%'), name, ))
data['third'] = c.fetchall()
sql = 'SELECT type, name, path FROM searchIndex WHERE name LIKE (?) AND name NOT LIKE (?) AND name NOT LIKE (?) AND name NOT LIKE (?) ORDER BY name COLLATE NOCASE'
c = conn.execute(sql, ('%'+name.replace(' ','%')+'%',name.replace(' ','%')+'%',name.replace(' ','%'), name, ))
data['fourth'] = c.fetchall()
conn.close()
dTypes = {}
for k in data:
ind[k] = []
for t in data[k]:
url = 'file://' + os.path.join(path, 'Contents/Resources/Documents', t[2])
url = url.replace(' ', '%20')
type = None
if t[0] in dTypes.keys():
type= dTypes[t[0]]
else:
type = self.typeManager.getTypeForName(t[0])
dTypes[t[0]] = type
ind[k].append({'name':t[1], 'path':url, 'icon':docset.image,'docsetname':docset.name,'type':type, 'callbackOverride':'', 'docset': docset, 'hasVersions':docset.hasVersions,'version':docset.version})
return ind
if __name__ == '__main__':
import ServerManager
c = UserContributedManager(ServerManager.ServerManager(), '../Images/icons', '../Images/types')
print(c.getAvailableUserContributed())
| mit | -8,551,626,419,558,266,000 | 30.34273 | 234 | 0.678249 | false |
parksandwildlife/borgcollector | tablemanager/migrations/0024_auto_20160226_1239.py | 2 | 1290 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import tablemanager.models
class Migration(migrations.Migration):
dependencies = [
('tablemanager', '0023_auto_20160219_1402'),
]
operations = [
migrations.AddField(
model_name='datasource',
name='password',
field=models.CharField(max_length=320, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='datasource',
name='sql',
field=tablemanager.models.SQLField(default="CREATE SERVER {{self.name}} FOREIGN DATA WRAPPER oracle_fdw OPTIONS (dbserver '//<hostname>/<sid>');"),
preserve_default=True,
),
migrations.AddField(
model_name='datasource',
name='type',
field=models.CharField(default='FileSystem', help_text='The type of data source', max_length=32, choices=[('FileSystem', 'FileSystem'), ('Oracle', 'Oracle')]),
preserve_default=True,
),
migrations.AddField(
model_name='datasource',
name='user',
field=models.CharField(max_length=320, null=True),
preserve_default=True,
),
]
| bsd-3-clause | -906,098,760,160,801,000 | 32.076923 | 171 | 0.581395 | false |
mozilla/kuma | kuma/scrape/management/commands/__init__.py | 1 | 1882 | """Common methods for scraping management commands."""
import logging
from argparse import ArgumentTypeError
from urllib.parse import urlparse
from django.core.management.base import BaseCommand
from kuma.scrape.scraper import Scraper
class ScrapeCommand(BaseCommand):
"""Common base class for scraping management commands."""
def make_scraper(self, **options):
"""Create a Scraper instance for management commands."""
return Scraper(**options)
def parse_url_or_path(self, url_or_path):
if url_or_path.startswith('http'):
bits = urlparse(url_or_path)
host = bits.netloc
path = bits.path.rstrip('/')
ssl = (bits.scheme == 'https')
else:
host = 'wiki.developer.mozilla.org'
ssl = True
path = url_or_path.rstrip('/')
return host, ssl, path
def setup_logging(self, verbosity):
"""Update logger for desired verbosity."""
if verbosity == 0:
level = logging.WARNING
elif verbosity == 1: # default
level = logging.INFO
elif verbosity >= 2:
level = logging.DEBUG
formatter = logging.Formatter('%(levelname)s: %(message)s')
console = logging.StreamHandler(self.stderr)
console.setLevel(level)
console.setFormatter(formatter)
logger = logging.getLogger('kuma.scraper')
logger.setLevel(level)
logger.addHandler(console)
logger.propagate = False
def int_all_type(self, value):
"""A command argument that can take an integer or 'all'."""
if value.strip().lower() == 'all':
return 'all'
try:
as_int = int(value)
except ValueError:
msg = "%r should be 'all' or an integer" % value
raise ArgumentTypeError(msg)
return as_int
| mpl-2.0 | -1,177,034,831,692,715,000 | 32.017544 | 67 | 0.600956 | false |
Witia1/olympia | apps/translations/tests/test_models.py | 14 | 26237 | # -*- coding: utf-8 -*-
from contextlib import nested
import django
from django.conf import settings
from django.db import connections, reset_queries
from django.test import TransactionTestCase
from django.test.utils import override_settings
from django.utils import translation
from django.utils.functional import lazy
import jinja2
import multidb
import pytest
from mock import patch
from nose import SkipTest
from nose.tools import eq_
from pyquery import PyQuery as pq
from amo.tests import BaseTestCase
from testapp.models import TranslatedModel, UntranslatedModel, FancyModel
from translations import widgets
from translations.query import order_by_translation
from translations.models import (LinkifiedTranslation, NoLinksTranslation,
NoLinksNoMarkupTranslation,
PurifiedTranslation, Translation,
TranslationSequence)
pytestmark = pytest.mark.django_db
def ids(qs):
return [o.id for o in qs]
class TranslationFixturelessTestCase(BaseTestCase):
"We want to be able to rollback stuff."
def test_whitespace(self):
t = Translation(localized_string=' khaaaaaan! ', id=999)
t.save()
eq_('khaaaaaan!', t.localized_string)
class TranslationSequenceTestCase(BaseTestCase):
"""
Make sure automatic translation sequence generation works
as expected.
"""
def test_empty_translations_seq(self):
"""Make sure we can handle an empty translation sequence table."""
TranslationSequence.objects.all().delete()
newtrans = Translation.new('abc', 'en-us')
newtrans.save()
assert newtrans.id > 0, (
'Empty translation table should still generate an ID.')
def test_single_translation_sequence(self):
"""Make sure we only ever have one translation sequence."""
TranslationSequence.objects.all().delete()
eq_(TranslationSequence.objects.count(), 0)
for i in range(5):
newtrans = Translation.new(str(i), 'en-us')
newtrans.save()
eq_(TranslationSequence.objects.count(), 1)
def test_translation_sequence_increases(self):
"""Make sure translation sequence increases monotonically."""
newtrans1 = Translation.new('abc', 'en-us')
newtrans1.save()
newtrans2 = Translation.new('def', 'de')
newtrans2.save()
assert newtrans2.pk > newtrans1.pk, (
'Translation sequence needs to keep increasing.')
class TranslationTestCase(BaseTestCase):
fixtures = ['testapp/test_models.json']
def setUp(self):
super(TranslationTestCase, self).setUp()
self.redirect_url = settings.REDIRECT_URL
self.redirect_secret_key = settings.REDIRECT_SECRET_KEY
settings.REDIRECT_URL = None
settings.REDIRECT_SECRET_KEY = 'sekrit'
translation.activate('en-US')
def tearDown(self):
settings.REDIRECT_URL = self.redirect_url
settings.REDIRECT_SECRET_KEY = self.redirect_secret_key
super(TranslationTestCase, self).tearDown()
def test_meta_translated_fields(self):
assert not hasattr(UntranslatedModel._meta, 'translated_fields')
eq_(set(TranslatedModel._meta.translated_fields),
set([TranslatedModel._meta.get_field('no_locale'),
TranslatedModel._meta.get_field('name'),
TranslatedModel._meta.get_field('description')]))
eq_(set(FancyModel._meta.translated_fields),
set([FancyModel._meta.get_field('purified'),
FancyModel._meta.get_field('linkified')]))
def test_fetch_translations(self):
"""Basic check of fetching translations in the current locale."""
o = TranslatedModel.objects.get(id=1)
self.trans_eq(o.name, 'some name', 'en-US')
self.trans_eq(o.description, 'some description', 'en-US')
def test_fetch_no_translations(self):
"""Make sure models with no translations aren't harmed."""
o = UntranslatedModel.objects.get(id=1)
eq_(o.number, 17)
def test_fetch_translation_de_locale(self):
"""Check that locale fallbacks work."""
try:
translation.activate('de')
o = TranslatedModel.objects.get(id=1)
self.trans_eq(o.name, 'German!! (unst unst)', 'de')
self.trans_eq(o.description, 'some description', 'en-US')
finally:
translation.deactivate()
def test_create_translation(self):
o = TranslatedModel.objects.create(name='english name')
def get_model():
return TranslatedModel.objects.get(id=o.id)
self.trans_eq(o.name, 'english name', 'en-US')
eq_(o.description, None)
# Make sure the translation id is stored on the model, not the autoid.
eq_(o.name.id, o.name_id)
# Check that a different locale creates a new row with the same id.
translation.activate('de')
german = get_model()
self.trans_eq(o.name, 'english name', 'en-US')
german.name = u'Gemütlichkeit name'
german.description = u'clöüserw description'
german.save()
self.trans_eq(german.name, u'Gemütlichkeit name', 'de')
self.trans_eq(german.description, u'clöüserw description', 'de')
# ids should be the same, autoids are different.
eq_(o.name.id, german.name.id)
assert o.name.autoid != german.name.autoid
# Check that de finds the right translation.
fresh_german = get_model()
self.trans_eq(fresh_german.name, u'Gemütlichkeit name', 'de')
self.trans_eq(fresh_german.description, u'clöüserw description', 'de')
# Check that en-US has the right translations.
translation.deactivate()
english = get_model()
self.trans_eq(english.name, 'english name', 'en-US')
english.debug = True
eq_(english.description, None)
english.description = 'english description'
english.save()
fresh_english = get_model()
self.trans_eq(
fresh_english.description, 'english description', 'en-US')
eq_(fresh_english.description.id, fresh_german.description.id)
def test_update_translation(self):
o = TranslatedModel.objects.get(id=1)
translation_id = o.name.autoid
o.name = 'new name'
o.save()
o = TranslatedModel.objects.get(id=1)
self.trans_eq(o.name, 'new name', 'en-US')
# Make sure it was an update, not an insert.
eq_(o.name.autoid, translation_id)
def test_create_with_dict(self):
# Set translations with a dict.
strings = {'en-US': 'right language', 'de': 'wrong language'}
o = TranslatedModel.objects.create(name=strings)
# Make sure we get the English text since we're in en-US.
self.trans_eq(o.name, 'right language', 'en-US')
# Check that de was set.
translation.activate('de')
o = TranslatedModel.objects.get(id=o.id)
self.trans_eq(o.name, 'wrong language', 'de')
# We're in de scope, so we should see the de text.
de = TranslatedModel.objects.create(name=strings)
self.trans_eq(o.name, 'wrong language', 'de')
# Make sure en-US was still set.
translation.deactivate()
o = TranslatedModel.objects.get(id=de.id)
self.trans_eq(o.name, 'right language', 'en-US')
def test_update_with_dict(self):
def get_model():
return TranslatedModel.objects.get(id=1)
# There's existing en-US and de strings.
strings = {'de': None, 'fr': 'oui'}
# Don't try checking that the model's name value is en-US. It will be
# one of the other locales, but we don't know which one. You just set
# the name to a dict, deal with it.
m = get_model()
m.name = strings
m.save()
# en-US was not touched.
self.trans_eq(get_model().name, 'some name', 'en-US')
# de was updated to NULL, so it falls back to en-US.
translation.activate('de')
self.trans_eq(get_model().name, 'some name', 'en-US')
# fr was added.
translation.activate('fr')
self.trans_eq(get_model().name, 'oui', 'fr')
def test_dict_with_hidden_locale(self):
with self.settings(HIDDEN_LANGUAGES=('xxx',)):
o = TranslatedModel.objects.get(id=1)
o.name = {'en-US': 'active language', 'xxx': 'hidden language',
'de': 'another language'}
o.save()
ts = Translation.objects.filter(id=o.name_id)
eq_(sorted(ts.values_list('locale', flat=True)),
['de', 'en-US', 'xxx'])
def test_dict_bad_locale(self):
m = TranslatedModel.objects.get(id=1)
m.name = {'de': 'oof', 'xxx': 'bam', 'es': 'si'}
m.save()
ts = Translation.objects.filter(id=m.name_id)
eq_(sorted(ts.values_list('locale', flat=True)),
['de', 'en-US', 'es'])
def test_sorting(self):
"""Test translation comparisons in Python code."""
b = Translation.new('bbbb', 'de')
a = Translation.new('aaaa', 'de')
c = Translation.new('cccc', 'de')
eq_(sorted([c, a, b]), [a, b, c])
def test_sorting_en(self):
q = TranslatedModel.objects.all()
expected = [4, 1, 3]
eq_(ids(order_by_translation(q, 'name')), expected)
eq_(ids(order_by_translation(q, '-name')), list(reversed(expected)))
def test_sorting_mixed(self):
translation.activate('de')
q = TranslatedModel.objects.all()
expected = [1, 4, 3]
eq_(ids(order_by_translation(q, 'name')), expected)
eq_(ids(order_by_translation(q, '-name')), list(reversed(expected)))
def test_sorting_by_field(self):
field = TranslatedModel._meta.get_field('default_locale')
TranslatedModel.get_fallback = classmethod(lambda cls: field)
translation.activate('de')
q = TranslatedModel.objects.all()
expected = [3, 1, 4]
eq_(ids(order_by_translation(q, 'name')), expected)
eq_(ids(order_by_translation(q, '-name')), list(reversed(expected)))
del TranslatedModel.get_fallback
def test_new_purified_field(self):
# This is not a full test of the html sanitizing. We expect the
# underlying bleach library to have full tests.
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m = FancyModel.objects.create(purified=s)
doc = pq(m.purified.localized_string_clean)
assert doc('a[href="http://xxx.com"][rel="nofollow"]')[0].text == 'yay'
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
eq_(m.purified.localized_string, s)
def test_new_linkified_field(self):
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m = FancyModel.objects.create(linkified=s)
doc = pq(m.linkified.localized_string_clean)
assert doc('a[href="http://xxx.com"][rel="nofollow"]')[0].text == 'yay'
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert not doc('i')
assert '<i>' in m.linkified.localized_string_clean
eq_(m.linkified.localized_string, s)
def test_update_purified_field(self):
m = FancyModel.objects.get(id=1)
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m.purified = s
m.save()
doc = pq(m.purified.localized_string_clean)
assert doc('a[href="http://xxx.com"][rel="nofollow"]')[0].text == 'yay'
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
eq_(m.purified.localized_string, s)
def test_update_linkified_field(self):
m = FancyModel.objects.get(id=1)
s = '<a id=xx href="http://xxx.com">yay</a> <i>http://yyy.com</i>'
m.linkified = s
m.save()
doc = pq(m.linkified.localized_string_clean)
assert doc('a[href="http://xxx.com"][rel="nofollow"]')[0].text == 'yay'
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert '<i>' in m.linkified.localized_string_clean
eq_(m.linkified.localized_string, s)
def test_purified_field_str(self):
m = FancyModel.objects.get(id=1)
stringified = u'%s' % m.purified
doc = pq(stringified)
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert doc('i')[0].text == 'x'
def test_linkified_field_str(self):
m = FancyModel.objects.get(id=1)
stringified = u'%s' % m.linkified
doc = pq(stringified)
assert doc('a[href="http://yyy.com"][rel="nofollow"]')[0].text == (
'http://yyy.com')
assert not doc('i')
assert '<i>' in stringified
def test_purifed_linkified_fields_in_template(self):
m = FancyModel.objects.get(id=1)
env = jinja2.Environment()
t = env.from_string('{{ m.purified }}=={{ m.linkified }}')
s = t.render({'m': m})
eq_(s, u'%s==%s' % (m.purified.localized_string_clean,
m.linkified.localized_string_clean))
def test_outgoing_url(self):
"""
Make sure linkified field is properly bounced off our outgoing URL
redirector.
"""
s = 'I like http://example.org/awesomepage.html .'
with self.settings(REDIRECT_URL='http://example.com/'):
m = FancyModel.objects.create(linkified=s)
"""
eq_(m.linkified.localized_string_clean,
'I like <a rel="nofollow" href="http://example.com/'
'40979175e3ef6d7a9081085f3b99f2f05447b22ba790130517dd62b7ee59ef94/'
'http%3A//example.org/'
'awesomepage.html">http://example.org/awesomepage'
'.html</a> .')
"""
doc = pq(m.linkified.localized_string_clean)
link = doc('a')[0]
assert link.attrib['href'] == (
"http://example.com/40979175e3ef6d7a9081085f3b99f2f05447b22ba7"
"90130517dd62b7ee59ef94/http%3A//example.org/awesomepage.html")
assert link.attrib['rel'] == "nofollow"
assert link.text == "http://example.org/awesomepage.html"
eq_(m.linkified.localized_string, s)
def test_require_locale(self):
obj = TranslatedModel.objects.get(id=1)
eq_(unicode(obj.no_locale), 'blammo')
eq_(obj.no_locale.locale, 'en-US')
# Switch the translation to a locale we wouldn't pick up by default.
obj.no_locale.locale = 'fr'
obj.no_locale.save()
obj = TranslatedModel.objects.get(id=1)
eq_(unicode(obj.no_locale), 'blammo')
eq_(obj.no_locale.locale, 'fr')
def test_delete_set_null(self):
"""
Test that deleting a translation sets the corresponding FK to NULL,
if it was the only translation for this field.
"""
obj = TranslatedModel.objects.get(id=1)
trans_id = obj.description.id
eq_(Translation.objects.filter(id=trans_id).count(), 1)
obj.description.delete()
obj = TranslatedModel.objects.no_cache().get(id=1)
eq_(obj.description_id, None)
eq_(obj.description, None)
eq_(Translation.objects.no_cache().filter(id=trans_id).exists(), False)
@patch.object(TranslatedModel, 'get_fallback', create=True)
def test_delete_keep_other_translations(self, get_fallback):
# To make sure both translations for the name are used, set the
# fallback to the second locale, which is 'de'.
get_fallback.return_value = 'de'
obj = TranslatedModel.objects.get(id=1)
orig_name_id = obj.name.id
eq_(obj.name.locale.lower(), 'en-us')
eq_(Translation.objects.filter(id=orig_name_id).count(), 2)
obj.name.delete()
obj = TranslatedModel.objects.no_cache().get(id=1)
eq_(Translation.objects.no_cache().filter(id=orig_name_id).count(), 1)
# We shouldn't have set name_id to None.
eq_(obj.name_id, orig_name_id)
# We should find a Translation.
eq_(obj.name.id, orig_name_id)
eq_(obj.name.locale, 'de')
class TranslationMultiDbTests(TransactionTestCase):
fixtures = ['testapp/test_models.json']
def setUp(self):
super(TranslationMultiDbTests, self).setUp()
translation.activate('en-US')
def tearDown(self):
self.cleanup_fake_connections()
super(TranslationMultiDbTests, self).tearDown()
@property
def mocked_dbs(self):
return {
'default': settings.DATABASES['default'],
'slave-1': settings.DATABASES['default'].copy(),
'slave-2': settings.DATABASES['default'].copy(),
}
def cleanup_fake_connections(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
for key in ('default', 'slave-1', 'slave-2'):
connections[key].close()
@override_settings(DEBUG=True)
def test_translations_queries(self):
# Make sure we are in a clean environnement.
reset_queries()
TranslatedModel.objects.get(pk=1)
eq_(len(connections['default'].queries), 3)
@override_settings(DEBUG=True)
def test_translations_reading_from_multiple_db(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
reset_queries()
with patch('multidb.get_slave', lambda: 'slave-2'):
TranslatedModel.objects.get(pk=1)
eq_(len(connections['default'].queries), 0)
eq_(len(connections['slave-1'].queries), 0)
eq_(len(connections['slave-2'].queries), 3)
@override_settings(DEBUG=True)
def test_translations_reading_from_multiple_db_using(self):
raise SkipTest('Will need a django-queryset-transform patch to work')
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
reset_queries()
with patch('multidb.get_slave', lambda: 'slave-2'):
TranslatedModel.objects.using('slave-1').get(pk=1)
eq_(len(connections['default'].queries), 0)
eq_(len(connections['slave-1'].queries), 3)
eq_(len(connections['slave-2'].queries), 0)
@override_settings(DEBUG=True)
def test_translations_reading_from_multiple_db_pinning(self):
with patch.object(django.db.connections, 'databases', self.mocked_dbs):
# Make sure we are in a clean environnement.
reset_queries()
with nested(patch('multidb.get_slave', lambda: 'slave-2'),
multidb.pinning.use_master):
TranslatedModel.objects.get(pk=1)
eq_(len(connections['default'].queries), 3)
eq_(len(connections['slave-1'].queries), 0)
eq_(len(connections['slave-2'].queries), 0)
class PurifiedTranslationTest(BaseTestCase):
def test_output(self):
assert isinstance(PurifiedTranslation().__html__(), unicode)
def test_raw_text(self):
s = u' This is some text '
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(), 'This is some text')
def test_allowed_tags(self):
s = u'<b>bold text</b> or <code>code</code>'
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(), u'<b>bold text</b> or <code>code</code>')
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script>'
x = PurifiedTranslation(localized_string=s)
eq_(x.__html__(), '<script>some naughty xss</script>')
def test_internal_link(self):
s = u'<b>markup</b> <a href="http://addons.mozilla.org/foo">bar</a>'
x = PurifiedTranslation(localized_string=s)
doc = pq(x.__html__())
links = doc('a[href="http://addons.mozilla.org/foo"][rel="nofollow"]')
assert links[0].text == 'bar'
assert doc('b')[0].text == 'markup'
@patch('amo.urlresolvers.get_outgoing_url')
def test_external_link(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<b>markup</b> <a href="http://example.com">bar</a>'
x = PurifiedTranslation(localized_string=s)
doc = pq(x.__html__())
links = doc('a[href="http://external.url"][rel="nofollow"]')
assert links[0].text == 'bar'
assert doc('b')[0].text == 'markup'
@patch('amo.urlresolvers.get_outgoing_url')
def test_external_text_link(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<b>markup</b> http://example.com'
x = PurifiedTranslation(localized_string=s)
doc = pq(x.__html__())
links = doc('a[href="http://external.url"][rel="nofollow"]')
assert links[0].text == 'http://example.com'
assert doc('b')[0].text == 'markup'
class LinkifiedTranslationTest(BaseTestCase):
@patch('amo.urlresolvers.get_outgoing_url')
def test_allowed_tags(self, get_outgoing_url_mock):
get_outgoing_url_mock.return_value = 'http://external.url'
s = u'<a href="http://example.com">bar</a>'
x = LinkifiedTranslation(localized_string=s)
doc = pq(x.__html__())
links = doc('a[href="http://external.url"][rel="nofollow"]')
assert links[0].text == 'bar'
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script> <b>bold</b>'
x = LinkifiedTranslation(localized_string=s)
eq_(x.__html__(),
'<script>some naughty xss</script> '
'<b>bold</b>')
class NoLinksTranslationTest(BaseTestCase):
def test_allowed_tags(self):
s = u'<b>bold text</b> or <code>code</code>'
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), u'<b>bold text</b> or <code>code</code>')
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script>'
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), '<script>some naughty xss</script>')
def test_links_stripped(self):
# Link with markup.
s = u'a <a href="http://example.com">link</a> with markup'
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), u'a with markup')
# Text link.
s = u'a text http://example.com link'
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), u'a text link')
# Text link, markup link, allowed tags, forbidden tags and bad markup.
s = (u'a <a href="http://example.com">link</a> with markup, a text '
u'http://example.com link, <b>with allowed tags</b>, '
u'<script>forbidden tags</script> and <http://bad.markup.com')
x = NoLinksTranslation(localized_string=s)
eq_(x.__html__(), u'a with markup, a text link, '
u'<b>with allowed tags</b>, '
u'<script>forbidden tags</script> and')
class NoLinksNoMarkupTranslationTest(BaseTestCase):
def test_forbidden_tags(self):
s = u'<script>some naughty xss</script> <b>bold</b>'
x = NoLinksNoMarkupTranslation(localized_string=s)
eq_(x.__html__(),
'<script>some naughty xss</script> '
'<b>bold</b>')
def test_links_stripped(self):
# Link with markup.
s = u'a <a href="http://example.com">link</a> with markup'
x = NoLinksNoMarkupTranslation(localized_string=s)
eq_(x.__html__(), u'a with markup')
# Text link.
s = u'a text http://example.com link'
x = NoLinksNoMarkupTranslation(localized_string=s)
eq_(x.__html__(), u'a text link')
# Text link, markup link, forbidden tags and bad markup.
s = (u'a <a href="http://example.com">link</a> with markup, a text '
u'http://example.com link, <b>with forbidden tags</b>, '
u'<script>forbidden tags</script> and <http://bad.markup.com')
x = NoLinksNoMarkupTranslation(localized_string=s)
eq_(x.__html__(), u'a with markup, a text link, '
u'<b>with forbidden tags</b>, '
u'<script>forbidden tags</script> and')
def test_translation_bool():
def t(s):
return Translation(localized_string=s)
assert bool(t('text')) is True
assert bool(t(' ')) is False
assert bool(t('')) is False
assert bool(t(None)) is False
def test_translation_unicode():
def t(s):
return Translation(localized_string=s)
eq_(unicode(t('hello')), 'hello')
eq_(unicode(t(None)), '')
def test_widget_value_from_datadict():
data = {'f_en-US': 'woo', 'f_de': 'herr', 'f_fr_delete': ''}
actual = widgets.TransMulti().value_from_datadict(data, [], 'f')
expected = {'en-US': 'woo', 'de': 'herr', 'fr': None}
eq_(actual, expected)
def test_comparison_with_lazy():
x = Translation(localized_string='xxx')
lazy_u = lazy(lambda x: x, unicode)
x == lazy_u('xxx')
lazy_u('xxx') == x
def test_cache_key():
# Test that we are not taking the db into account when building our
# cache keys for django-cache-machine. See bug 928881.
eq_(Translation._cache_key(1, 'default'),
Translation._cache_key(1, 'slave'))
# Test that we are using the same cache no matter what Translation class
# we use.
eq_(PurifiedTranslation._cache_key(1, 'default'),
Translation._cache_key(1, 'default'))
eq_(LinkifiedTranslation._cache_key(1, 'default'),
Translation._cache_key(1, 'default'))
| bsd-3-clause | -4,317,369,250,341,583,000 | 37.122093 | 83 | 0.597873 | false |
getsentry/raven-aiohttp | raven_aiohttp.py | 1 | 7539 | """
raven_aiohttp
~~~~~~~~~~~~~
:copyright: (c) 2010-2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import abc
import asyncio
import socket
import aiohttp
from raven.conf import defaults
from raven.exceptions import APIError, RateLimited
from raven.transport.base import AsyncTransport
from raven.transport.http import HTTPTransport
try:
from asyncio import ensure_future
except ImportError:
ensure_future = getattr(asyncio, 'async')
try:
from raven.transport.base import has_newstyle_transports
except ImportError:
has_newstyle_transports = False
__version__ = '0.7.0'
class AioHttpTransportBase(
AsyncTransport,
HTTPTransport,
metaclass=abc.ABCMeta
):
def __init__(self, parsed_url=None, *, verify_ssl=True,
timeout=defaults.TIMEOUT,
keepalive=True, family=socket.AF_INET, loop=None):
self._keepalive = keepalive
self._family = family
if loop is None:
loop = asyncio.get_event_loop()
self._loop = loop
if has_newstyle_transports:
if parsed_url is not None:
raise TypeError('Transport accepts no URLs for this version '
'of raven.')
super().__init__(timeout, verify_ssl)
else:
super().__init__(parsed_url, timeout, verify_ssl)
if self.keepalive:
self._client_session = self._client_session_factory()
self._closing = False
@property
def keepalive(self):
return self._keepalive
@property
def family(self):
return self._family
def _client_session_factory(self):
connector = aiohttp.TCPConnector(verify_ssl=self.verify_ssl,
family=self.family,
loop=self._loop)
return aiohttp.ClientSession(connector=connector,
loop=self._loop)
@asyncio.coroutine
def _do_send(self, url, data, headers, success_cb, failure_cb):
if self.keepalive:
session = self._client_session
else:
session = self._client_session_factory()
resp = None
try:
resp = yield from session.post(
url,
data=data,
compress=False,
headers=headers,
timeout=self.timeout
)
code = resp.status
if code != 200:
msg = resp.headers.get('x-sentry-error')
if code == 429:
try:
retry_after = resp.headers.get('retry-after')
retry_after = int(retry_after)
except (ValueError, TypeError):
retry_after = 0
failure_cb(RateLimited(msg, retry_after))
else:
failure_cb(APIError(msg, code))
else:
success_cb()
except asyncio.CancelledError:
# do not mute asyncio.CancelledError
raise
except Exception as exc:
failure_cb(exc)
finally:
if resp is not None:
resp.release()
if not self.keepalive:
yield from session.close()
@abc.abstractmethod
def _async_send(self, url, data, headers, success_cb, failure_cb): # pragma: no cover
pass
@abc.abstractmethod
@asyncio.coroutine
def _close(self): # pragma: no cover
pass
def async_send(self, url, data, headers, success_cb, failure_cb):
if self._closing:
failure_cb(RuntimeError(
'{} is closed'.format(self.__class__.__name__)))
return
self._async_send(url, data, headers, success_cb, failure_cb)
@asyncio.coroutine
def _close_coro(self, *, timeout=None):
try:
yield from asyncio.wait_for(
self._close(), timeout=timeout, loop=self._loop)
except asyncio.TimeoutError:
pass
finally:
if self.keepalive:
yield from self._client_session.close()
def close(self, *, timeout=None):
if self._closing:
@asyncio.coroutine
def dummy():
pass
return dummy()
self._closing = True
return self._close_coro(timeout=timeout)
if not has_newstyle_transports:
oldstyle_async_send = async_send
def async_send(self, *args, **kwargs):
return self.oldstyle_async_send(self._url, *args, **kwargs)
class AioHttpTransport(AioHttpTransportBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._tasks = set()
def _async_send(self, url, data, headers, success_cb, failure_cb):
coro = self._do_send(url, data, headers, success_cb, failure_cb)
task = ensure_future(coro, loop=self._loop)
self._tasks.add(task)
task.add_done_callback(self._tasks.remove)
@asyncio.coroutine
def _close(self):
yield from asyncio.gather(
*self._tasks,
return_exceptions=True,
loop=self._loop
)
assert len(self._tasks) == 0
class QueuedAioHttpTransport(AioHttpTransportBase):
def __init__(self, *args, workers=1, qsize=1000, **kwargs):
super().__init__(*args, **kwargs)
self._queue = asyncio.Queue(maxsize=qsize, loop=self._loop)
self._workers = set()
for _ in range(workers):
worker = ensure_future(self._worker(), loop=self._loop)
self._workers.add(worker)
worker.add_done_callback(self._workers.remove)
@asyncio.coroutine
def _worker(self):
while True:
data = yield from self._queue.get()
try:
if data is ...:
self._queue.put_nowait(...)
break
url, data, headers, success_cb, failure_cb = data
yield from self._do_send(url, data, headers, success_cb,
failure_cb)
finally:
self._queue.task_done()
def _async_send(self, url, data, headers, success_cb, failure_cb):
data = url, data, headers, success_cb, failure_cb
try:
self._queue.put_nowait(data)
except asyncio.QueueFull as exc:
skipped = self._queue.get_nowait()
self._queue.task_done()
*_, failure_cb = skipped
failure_cb(RuntimeError(
'QueuedAioHttpTransport internal queue is full'))
self._queue.put_nowait(data)
@asyncio.coroutine
def _close(self):
try:
self._queue.put_nowait(...)
except asyncio.QueueFull as exc:
skipped = self._queue.get_nowait()
self._queue.task_done()
*_, failure_cb = skipped
failure_cb(RuntimeError(
'QueuedAioHttpTransport internal queue was full'))
self._queue.put_nowait(...)
yield from asyncio.gather(
*self._workers,
return_exceptions=True,
loop=self._loop
)
assert len(self._workers) == 0
assert self._queue.qsize() == 1
try:
assert self._queue.get_nowait() is ...
finally:
self._queue.task_done()
| bsd-3-clause | 4,017,273,378,549,687,000 | 27.665399 | 90 | 0.543175 | false |
sagemathinc/smc | src/scripts/storage_gluster.py | 3 | 28864 | #!/usr/bin/env python
###############################################################################
#
# CoCalc: Collaborative Calculation in the Cloud
#
# Copyright (C) 2016, Sagemath Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import argparse, cPickle, hashlib, json, logging, os, sys, time, random
from uuid import UUID
log = None
# This is so we can import salvus/salvus/daemon.py
sys.path.append('/home/salvus/salvus/salvus/')
def check_uuid(uuid):
if UUID(uuid).version != 4:
raise RuntimeError("invalid uuid")
def uid(uuid):
# We take the sha-512 of the uuid just to make it harder to force a collision. Thus even if a
# user could somehow generate an account id of their choosing, this wouldn't help them get the
# same uid as another user.
n = hash(hashlib.sha512(uuid).digest()) % (
4294967294 - 1000
) # 2^32-2=max uid, as keith determined by a program + experimentation.
return n + 1001
def cmd(s, exit_on_error=True):
log.debug(s)
#s += ' &>/dev/null'
t = time.time()
if os.system(s):
if exit_on_error:
raise RuntimeError("Error running '%s'" % s)
log.debug("time: %s seconds" % (time.time() - t))
def cmd2(s):
log.debug(s)
from subprocess import Popen, PIPE
out = Popen(
s, stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=not isinstance(s, list))
e = out.wait()
x = out.stdout.read() + out.stderr.read()
log.debug(x)
return x, e
def path_to_project(storage, project_id):
return os.path.join(storage, project_id[:2], project_id[2:4], project_id)
def migrate_project_to_storage(src, storage, min_size_mb, max_size_mb,
new_only):
info_json = os.path.join(src, '.sagemathcloud', 'info.json')
if not os.path.exists(info_json):
log.debug("Skipping since %s does not exist" % info_json)
return
project_id = json.loads(open(info_json).read())['project_id']
projectid = project_id.replace('-', '')
target = path_to_project(storage, project_id)
try:
if os.path.exists(target):
if new_only:
log.debug(
"skipping %s (%s) since it already exists (and new_only=True)"
% (src, project_id))
return
mount_project(storage=storage, project_id=project_id, force=False)
else:
# create
os.makedirs(target)
os.chdir(target)
current_size_mb = int(
os.popen("du -s '%s'" % src).read().split()[0]) // 1000 + 1
size = min(max_size_mb, max(min_size_mb, current_size_mb))
# Using many small img files might seem like a good idea. It isn't, since mount takes massively longer, etc.
#img_size_mb = 128
#images = ['%s/%s.img'%(target, i) for i in range(size//img_size_mb + 1)]
#for img in images:
# cmd("truncate -s %sM %s"%(img_size_mb,img))
#images = ' '.join(images)
images = '%s/%s.img' % (target, 0)
cmd("truncate -s %sM %s" % (size, images))
cmd("zpool create -m /home/%s project-%s %s" %
(projectid, project_id, images))
cmd("zfs set compression=gzip project-%s" % project_id)
cmd("zfs set dedup=on project-%s" % project_id)
cmd("zfs set snapdir=visible project-%s" % project_id)
# rsync data over
double_verbose = False
cmd("time rsync -axH%s --delete --exclude .forever --exclude .bup %s/ /home/%s/"
% ('v' if double_verbose else '', src, projectid),
exit_on_error=False)
id = uid(project_id)
cmd("chown %s:%s -R /home/%s/" % (id, id, projectid))
cmd("df -h /home/%s; zfs get compressratio project-%s; zpool get dedupratio project-%s"
% (projectid, project_id, project_id))
finally:
unmount_project(project_id=project_id)
def mount_project(storage, project_id, force):
check_uuid(project_id)
id = uid(project_id)
target = path_to_project(storage, project_id)
out, e = cmd2("zpool import %s project-%s -d %s" % ('-f' if force else '',
project_id, target))
if e:
if 'a pool with that name is already created' in out:
# no problem
pass
else:
print "could not get pool"
sys.exit(1)
projectid = project_id.replace('-', '')
# the -o makes it so in the incredibly unlikely event of a collision, no big deal.
cmd("groupadd -g %s -o %s" % (id, projectid), exit_on_error=False)
cmd("useradd -u %s -g %s -o -d /home/%s/ %s" % (id, id, projectid,
projectid),
exit_on_error=False) # error if user already exists is fine.
def unmount_project(project_id):
check_uuid(project_id)
projectid = project_id.replace('-', '')
cmd("pkill -9 -u %s" % projectid, exit_on_error=False)
cmd("deluser --force %s" % projectid, exit_on_error=False)
time.sleep(.5)
out, e = cmd2("zpool export project-%s" % project_id)
if e:
if 'no such pool' not in out:
# not just a problem due to pool not being mounted.
print "Error unmounting pool -- %s" % out
sys.exit(1)
def tinc_address():
return os.popen('ifconfig tun0|grep "inet addr"').read().split()[1].split(
':')[1].strip()
def info_json(path):
if not os.path.exists('locations.dat'):
sys.stderr.write(
'Please run this from a node with db access to create locations.dat\n\t\techo "select location,project_id from projects limit 30000;" | cqlsh_connect 10.1.3.2 |grep "{" > locations.dat'
)
sys.exit(1)
db = {}
host = tinc_address()
log.info("parsing database...")
for x in open('locations.dat').readlines():
if x.strip():
location, project_id = x.split('|')
location = json.loads(location.strip())
project_id = project_id.strip()
if location['host'] == host:
if location['username'] in db:
log.warning("WARNING: collision -- %s, %s" % (location,
project_id))
db[location['username']] = {
'location': location,
'project_id': project_id,
'base_url': ''
}
v = [os.path.abspath(x) for x in path]
for i, path in enumerate(v):
log.info("** %s of %s" % (i + 1, len(v)))
SMC = os.path.join(path, '.sagemathcloud')
if not os.path.exists(SMC):
log.warning(
"Skipping '%s' since no .sagemathcloud directory" % path)
continue
f = os.path.join(path, '.sagemathcloud', 'info.json')
username = os.path.split(path)[-1]
if not os.path.exists(f):
if username not in db:
log.warning("Skipping '%s' since not in database!" % username)
else:
s = json.dumps(db[username], separators=(',', ':'))
log.info("writing '%s': '%s'" % (f, s))
open(f, 'w').write(s)
os.system('chmod a+rw %s' % f)
def modtime(f):
try:
return int(os.stat(f).st_mtime)
except:
return 0 # 1970...
def copy_file_efficiently(src, dest):
"""
Copy a possibly sparse file from a brick to a mounted glusterfs volume, if the dest is older.
This for now -- later we might use a different method when the file is above a certain
size threshold (?). However, I can't think of any possible better method, really; anything
involving computing a diff between the two files would require *reading* them, so already
takes way too long (in sharp contrast to the ever-clever bup, which uses a blum filter!).
This will raise a RuntimeError if something goes wrong.
"""
import uuid
s0, s1 = os.path.split(dest)
if s1.startswith('.glusterfs'):
# never copy around/sync any of the temp files we create below.
return
# The clock of the destination is used when doing this copy, so it's
# *critical* that the clocks be in sync. Run ntp!!!!!
dest_modtime = modtime(dest)
if dest_modtime >= modtime(src):
return
if not os.path.exists(s0):
os.makedirs(s0)
lock = os.path.join(s0, ".glusterfs-lock-%s" % s1)
dest0 = os.path.join(s0, ".glusterfs-tmp-%s-%s" % (str(uuid.uuid4()), s1))
now = time.time()
recent = now - 5 * 60 # recent time = 5 minutes ago
if os.path.exists(lock):
log.debug(
"another daemon is either copying the same file right now (or died)."
)
# If mod time of the lock is recent, just give up.
t = modtime(lock)
if t >= recent:
return # recent lock
# check that dest0 exists and has mod time < 5 minutes; otherwise, take control.
if os.path.exists(dest0) and modtime(dest0) >= recent:
return
if os.stat(src).st_mode == 33280:
log.info(
"skipping copy since source '%s' suddenly became special link file",
src)
return
log.info("sync: %s --> %s" % (src, dest))
t = time.time()
try:
log.info(cmd2('ls -lhs "%s"' % src)[0])
cmd("touch '%s'; cp -av '%s' '%s'" % (lock, src, dest0),
exit_on_error=True)
# check that modtime of dest is *still* older, i.e., that somehow somebody didn't
# just step in and change it.
if modtime(dest) == dest_modtime:
# modtime was unchanged.
cmd("mv -v '%s' '%s'" % (dest0, dest), exit_on_error=True)
finally:
# remove the tmp file instead of leaving it there all corrupted.
if os.path.exists(dest0):
try:
os.unlink(dest0)
except:
pass
if os.path.exists(lock):
try:
os.unlink(lock)
except:
pass
total_time = time.time() - t
log.info("time: %s" % total_time)
return total_time
def sync(src, dest):
"""
copy all older files from src/ to dest/.
-- src/ = underyling *brick* path for some glusterfs host
-- dest/ = remote mounted glusterfs filesystem
"""
src = os.path.abspath(src)
dest = os.path.abspath(dest)
cache_file = "/var/lib/glusterd/glustersync/cache.pickle"
if not os.path.exists("/var/lib/glusterd/glustersync"):
os.makedirs("/var/lib/glusterd/glustersync")
if os.path.exists(cache_file):
cache_all = cPickle.loads(open(cache_file).read())
else:
cache_all = {}
if dest not in cache_all:
cache_all[dest] = {}
cache = cache_all[dest]
log.info("sync: '%s' --> '%s'" % (src, dest))
import stat
def walktree(top):
#log.info("scanning '%s'", top)
v = os.listdir(top)
random.shuffle(v)
for i, f in enumerate(v):
if f == '.glusterfs':
# skip the glusterfs meta-data
continue
if len(v) > 10:
log.debug("%s(%s/%s): %s", top, i + 1, len(v), f)
pathname = os.path.join(top, f)
src_name = os.path.join(src, pathname)
dest_name = os.path.join(dest, pathname)
st = os.stat(src_name)
if st.st_mode == 33280:
# glusterfs meta-info file to indicate a moved file...
continue
if stat.S_ISDIR(st.st_mode):
# It's a directory: create in target if necessary, then recurse...
## !! we skip creation; this is potentially expensive and isn't needed for our application.
##if not os.path.exists(dest_name):
## try:
## os.makedirs(dest_name)
## except OSError:
## if not os.path.exists(dest_name):
## raise RuntimeError("unable to make directory '%s'"%dest_name)
try:
walktree(pathname)
except OSError, mesg:
log.warning("error walking '%s': %s", pathname, mesg)
elif stat.S_ISREG(st.st_mode):
mtime = int(st.st_mtime)
if cache.get(src_name, {'mtime': 0})['mtime'] >= mtime:
continue
try:
copy_file_efficiently(
src_name, dest_name
) # checks dest modtime before actually doing copy.
cache[src_name] = {
'mtime': mtime,
'size_mb': st.st_blocks // 2000
}
except RuntimeError, mesg:
log.warning("error copying %s to %s; skipping.", src_name,
dest_name)
else:
# Unknown file type, print a message
log.warning("unknown file type: %s", pathname)
os.chdir(src)
walktree('.')
s = cPickle.dumps(cache_all)
open(cache_file, 'w').write(s)
def sync_watch(sources, dests, min_sync_time):
### WARNING -- this code does not work very well, and is sort of pointless. AVOID!
"""
Watch filesystem trees and on modification or creation, cp file, possibly creating directories.
The frequency of copying is limited in various ways.
This uses inotify so that it is event driven. You must increase the number of watched files
that are allowed! "sudo sysctl fs.inotify.max_user_watches=10000000" and in /etc/sysctl.conf:
fs.inotify.max_user_watches=10000000
- sources = list of underyling *brick* path for some glusterfs host
- dests = list of paths of remote mounted glusterfs filesystems
- min_sync_time = never sync a file more frequently than this many seconds; no matter what, we
also wait at least twice the time it takes to sync out the file before syncing it again.
"""
sources = [os.path.abspath(src) for src in sources]
dests = [os.path.abspath(dest) for dest in dests]
next_sync = {} # soonest time when may again sync a given file
modified_files = set([])
received_files = set([])
def add(pathname):
try:
if os.stat(pathname).st_mode == 33280:
# ignore gluster special files
log.debug("ignoring gluster special file: '%s'", pathname)
return
except:
pass
log.debug("inotify: %s" % pathname)
s = os.path.split(pathname)
if s[1].startswith('.glusterfs-lock-'):
received_files.add(
os.path.join(s[0], s[1][len('.glusterfs-lock-'):]))
elif s[1].startswith('.glusterfs'):
return
elif os.path.isfile(pathname):
modified_files.add(pathname)
def handle_modified_files():
if not modified_files:
return
log.debug("handling modified_files=%s", modified_files)
log.debug("received_files=%s", received_files)
now = time.time()
do_later = []
for path in modified_files:
if path in sources: # ignore changes to the sources directories
continue
if path in received_files: # recently copied to us.
continue
if path not in next_sync or now >= next_sync[path]:
src = None
for s in sources:
if path.startswith(s):
src = s
break
if not src:
log.warning(
"not copying '%s' -- must be under a source: %s" %
(path, sources))
continue
t0 = time.time()
for dest in dests:
dest_path = os.path.join(dest, path[len(src) + 1:])
log.info("copy('%s', '%s')" % (path, dest_path))
try:
copy_file_efficiently(path, dest_path)
except Exception, msg:
log.warning("problem syncing %s to %s! -- %s" %
(path, dest_path, msg))
# no matter what, we wait at least twice the time (from now) that it takes to sync out the file before syncing it again.
next_sync[path] = time.time() + max(2 * (time.time() - t0),
min_sync_time)
else:
pass
#log.debug("waiting until later to sync (too frequent): '%s' "%path)
do_later.append(path)
modified_files.clear()
received_files.clear()
modified_files.update(do_later)
import pyinotify
wm = pyinotify.WatchManager() # Watch Manager
mask = pyinotify.IN_CREATE | pyinotify.IN_MOVED_TO | pyinotify.IN_MODIFY | pyinotify.IN_CLOSE_WRITE
class EventHandler(pyinotify.ProcessEvent):
def process_IN_CREATE(self, event):
print "Creating:", event.pathname
if os.path.isdir(event.pathname):
# created a directory -- add it to the watch list
watchers.append(wm.add_watch(event.pathname, mask))
add(event.pathname)
def process_IN_MOVED_TO(self, event):
print "File moved to:", event.pathname
add(event.pathname)
def process_IN_MODIFY(self, event):
print "Modified:", event.pathname
add(event.pathname)
def process_IN_CLOSE_WRITE(self, event):
print "Close write:", event.pathname
add(event.pathname)
handler = EventHandler()
# we get inotify events for *at most* timeout seconds, then handle them all
notifier = pyinotify.Notifier(wm, handler, timeout=1)
t = time.time()
watchers = []
for src in sources:
log.info("adding watches to '%s' (this could take several minutes)..."
% src)
dot_gluster = os.path.join(src, '.glusterfs')
watchers.append(
wm.add_watch(
src,
mask,
rec=True,
exclude_filter=pyinotify.ExcludeFilter(['^' + dot_gluster])))
log.info("watch added (%s seconds): listening for file events..." %
(time.time() - t))
def check_for_events():
#print "check_for_events"
notifier.process_events()
while notifier.check_events(
): #loop in case more events appear while we are processing
notifier.read_events()
notifier.process_events()
while True:
check_for_events()
handle_modified_files()
time.sleep(1)
def volume_info():
# parse 'gluster volume info' as a python object.
s, e = cmd2('unset PYTHONPATH; unset PYTHONHOME; gluster volume info')
if e:
raise RuntimeError(e)
v = {}
for x in s.split("\nVolume Name: "):
z = x.strip().splitlines()
if z:
name = z[0]
m = {'bricks': []}
for k in z[1:]:
i = k.find(':')
if i == -1:
continue
key = k[:i].strip()
val = k[i + 1:].strip()
if val:
if key.startswith('Brick'):
m['bricks'].append(val)
else:
m[key] = val
v[name] = m
return v
def ip_address(dest):
# get the ip address that is used to communicate with the given destination
import misc
return misc.local_ip_address(dest)
def mount_target_volumes(volume_name):
info = volume_info()
dests = []
ip = None
mount = cmd2('mount')[0]
for name, data in volume_info().iteritems():
if name.startswith('dc'):
v = name.split('-')
if len(v) >= 2 and v[1] == volume_name:
use = True
for brick in data['bricks']:
brick_ip, path = brick.split(':')
if ip_address(brick_ip) == brick_ip:
# this volume is partly hosted on this computer, hence not a target.
use = False
break
if use:
# ensure volume is mounted and add to list
if 'mnt/%s' % name not in mount:
cmd("mkdir -p '/mnt/%s'; mount -t glusterfs localhost:'/%s' '/mnt/%s'"
% (name, name, name))
dests.append('/mnt/%s' % name)
return dests
def find_bricks(volume_name):
bricks = []
ip = None
for name, data in volume_info().iteritems():
if name.startswith('dc'):
v = name.split('-')
if len(v) >= 2 and v[1] == volume_name:
for brick in data['bricks']:
brick_ip, path = brick.split(':')
if ip_address(brick_ip) == brick_ip:
bricks.append(path)
return bricks
def setup_log(loglevel='DEBUG', logfile=''):
logging.basicConfig()
global log
log = logging.getLogger('storage')
if loglevel:
level = getattr(logging, loglevel.upper())
log.setLevel(level)
if logfile:
log.addHandler(logging.FileHandler(logfile))
log.info("logger started")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Project storage")
parser.add_argument(
"--loglevel",
dest='loglevel',
type=str,
default='INFO',
help="log level: useful options include INFO, WARNING and DEBUG")
parser.add_argument(
"--logfile",
dest="logfile",
type=str,
default='',
help="store log in this file (default: '' = don't log to a file)")
subparsers = parser.add_subparsers(help='sub-command help')
def migrate(args):
if not args.storage:
args.storage = os.environ['SALVUS_STORAGE']
v = [os.path.abspath(x) for x in args.src]
for i, src in enumerate(v):
log.info("\n** %s of %s" % (i + 1, len(v)))
migrate_project_to_storage(
src=src,
storage=args.storage,
min_size_mb=args.min_size_mb,
max_size_mb=10000,
new_only=args.new_only)
parser_migrate = subparsers.add_parser(
'migrate', help='migrate to or update project in storage pool')
parser_migrate.add_argument(
"--storage",
help=
"the directory where project image directories are stored (default: $SALVUS_STORAGE enviro var)",
type=str,
default='')
parser_migrate.add_argument(
"--min_size_mb",
help="min size of zfs image in megabytes (default: 512)",
type=int,
default=512)
parser_migrate.add_argument(
"--new_only",
help="if image already created, do nothing (default: False)",
default=False,
action="store_const",
const=True)
parser_migrate.add_argument(
"src", help="the current project home directory", type=str, nargs="+")
parser_migrate.set_defaults(func=migrate)
def mount(args):
if not args.storage:
args.storage = os.environ['SALVUS_STORAGE']
mount_project(
storage=args.storage, project_id=args.project_id, force=args.f)
parser_mount = subparsers.add_parser(
'mount', help='mount a project that is available in the storage pool')
parser_mount.add_argument(
"--storage",
help=
"the directory where project image directories are stored (default: $SALVUS_STORAGE enviro var)",
type=str,
default='')
parser_mount.add_argument("project_id", help="the project id", type=str)
parser_mount.add_argument(
"-f",
help="force (default: False)",
default=False,
action="store_const",
const=True)
parser_mount.set_defaults(func=mount)
def unmount(args):
unmount_project(project_id=args.project_id)
parser_unmount = subparsers.add_parser(
'umount',
help='unmount a project that is available in the storage pool')
parser_unmount.add_argument("project_id", help="the project id", type=str)
parser_unmount.set_defaults(func=unmount)
def _info_json(args):
info_json(path=args.path)
parser_migrate = subparsers.add_parser(
'info_json',
help='query database, then write info.json file if there is none')
parser_migrate.add_argument(
"path",
help="path to a project home directory (old non-pooled)",
type=str,
nargs="+")
parser_migrate.set_defaults(func=_info_json)
def _sync(args):
if not args.dest:
args.dest = ','.join(mount_target_volumes(args.volume))
if not args.src:
args.src = ','.join(find_bricks(args.volume))
def main():
while True:
try:
if args.watch:
sync_watch(
sources=args.src.split(','),
dests=args.dest.split(','),
min_sync_time=args.min_sync_time)
else:
for src in args.src.split(','):
for dest in args.dest.split(','):
sync(src=src, dest=dest)
except KeyboardInterrupt:
return
except Exception, mesg:
print mesg
if not args.daemon:
return
time.sleep(5)
if args.daemon:
if not args.pidfile:
raise RuntimeError(
"in --daemon mode you *must* specify --pidfile")
import daemon
daemon.daemonize(args.pidfile)
main()
parser_sync = subparsers.add_parser(
'sync',
help=
'Cross data center project sync: simply uses the local "cp" command and local mounts of the glusterfs, but provides massive speedups due to sparseness of image files'
)
parser_sync.add_argument(
"--watch",
help=
"after running once, use inotify to watch for changes to the src filesystem and cp when they occur",
default=False,
action="store_const",
const=True)
parser_sync.add_argument(
"--min_sync_time",
help=
"never copy a file more frequently than this (default: 30 seconds)",
type=int,
default=30)
parser_sync.add_argument(
"--daemon",
help="daemon mode; will repeatedly sync",
dest="daemon",
default=False,
action="store_const",
const=True)
parser_sync.add_argument(
"--pidfile",
dest="pidfile",
type=str,
default='',
help="store pid in this file when daemonized")
parser_sync.add_argument(
"--dest",
help=
"comma separated list of destinations; if not given, all remote gluster volumes with name dc[n]-volume are mounted and targeted",
type=str,
default='')
parser_sync.add_argument(
"--src",
help=
"comma separated paths to bricks; if not given, local bricks for dc[n]-volume are used",
type=str,
default='')
parser_sync.add_argument(
"--volume",
help=
"if there are volumes dc0-projects, dc1-projects, dc2-projects, then pass option --volume=projects (default: 'projects')",
default='projects')
parser_sync.set_defaults(func=_sync)
args = parser.parse_args()
setup_log(loglevel=args.loglevel, logfile=args.logfile)
args.func(args)
else:
setup_log()
| agpl-3.0 | -1,152,436,700,362,561,500 | 35.08 | 197 | 0.541159 | false |
opengeogroep/inasafe | safe/storage/test_clipping.py | 9 | 12702 | import unittest
import numpy
import os
from os.path import join
from safe.common.testing import TESTDATA
from safe.common.polygon import (is_inside_polygon, inside_polygon,
populate_polygon,
generate_random_points_in_bbox)
from safe.storage.vector import Vector
from safe.storage.core import read_layer
from safe.storage.clipping import clip_raster_by_polygons
from safe.storage.geometry import Polygon
from safe.common.utilities import unique_filename
# FIXME (Ole): Move this along with contents of clipping.py to
# common and consolidate
class Test_Clipping(unittest.TestCase):
"""Tests for clipping module
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_clip_points_by_polygons(self):
"""Points can be clipped by polygons (real data)
"""
# Name input files
point_name = join(TESTDATA, 'population_5x5_jakarta_points.shp')
point_layer = read_layer(point_name)
points = numpy.array(point_layer.get_geometry())
attrs = point_layer.get_data()
# Loop through polygons
for filename in ['polygon_0.shp', 'polygon_1.shp', 'polygon_2.shp',
'polygon_3.shp', 'polygon_4.shp',
'polygon_5.shp', 'polygon_6.shp']:
polygon_layer = read_layer(join(TESTDATA, filename))
polygon = polygon_layer.get_geometry()[0]
# Clip
indices = inside_polygon(points, polygon)
# Sanity
for point in points[indices, :]:
assert is_inside_polygon(point, polygon)
# Explicit tests
if filename == 'polygon_0.shp':
assert len(indices) == 6
elif filename == 'polygon_1.shp':
assert len(indices) == 2
assert numpy.allclose(points[indices[0], :],
[106.8125, -6.1875])
assert numpy.allclose(points[indices[1], :],
[106.8541667, -6.1875])
assert numpy.allclose(attrs[indices[0]]['value'],
331941.6875)
assert numpy.allclose(attrs[indices[1]]['value'],
496445.8125)
elif filename == 'polygon_2.shp':
assert len(indices) == 7
elif filename == 'polygon_3.shp':
assert len(indices) == 0 # Degenerate
elif filename == 'polygon_4.shp':
assert len(indices) == 0 # Degenerate
elif filename == 'polygon_5.shp':
assert len(indices) == 8
elif filename == 'polygon_6.shp':
assert len(indices) == 6
test_clip_points_by_polygons.slow = True
def test_clip_raster_by_polygons(self):
"""Raster grids can be clipped by polygon layers
# See qgis project in test data: raster_point_and_clipping_test.qgs
"""
# Name input files
poly = join(TESTDATA, 'kabupaten_jakarta_singlepart.shp')
grid = join(TESTDATA, 'population_5x5_jakarta.asc')
# Get layers using API
P = read_layer(poly)
R = read_layer(grid)
M = len(P)
N = len(R)
assert N == 56
# Clip
C = clip_raster_by_polygons(R, P)
assert len(C) == M
# Check points inside polygon
tot = 0
for c in C:
tot += len(c)
assert tot == 14
# Check that points are inside the right polygon
for i, polygon in enumerate(P.get_geometry()):
points = C[i][0]
values = C[i][1]
# Sanity first
for point in points:
assert is_inside_polygon(point, polygon)
# Specific tests against raster pixel values inside polygons
# The values are read from qgis
if i == 0:
assert len(points) == 6
assert numpy.allclose(values[0], 200951)
assert numpy.allclose(values[1], 283237)
assert numpy.allclose(values[2], 278385)
assert numpy.allclose(values[3], 516061)
assert numpy.allclose(values[4], 207414)
assert numpy.allclose(values[5], 344466)
elif i == 1:
assert len(points) == 2
msg = ('Got wrong coordinates %s, expected %s'
% (str(points[0, :]), str([106.8125, -6.1875])))
assert numpy.allclose(points[0, :], [106.8125, -6.1875]), msg
assert numpy.allclose(points[1, :], [106.8541667, -6.1875])
assert numpy.allclose(values[0], 331942)
assert numpy.allclose(values[1], 496446)
elif i == 2:
assert len(points) == 7
assert numpy.allclose(values[0], 268579)
assert numpy.allclose(values[1], 155795)
assert numpy.allclose(values[2], 403674)
assert numpy.allclose(values[3], 259280)
assert numpy.allclose(values[4], 284526)
assert numpy.allclose(values[5], 334370)
assert numpy.allclose(values[6], 143325)
elif i == 3:
assert len(points) == 0 # Degenerate
elif i == 4:
assert len(points) == 0 # Degenerate
elif i == 5:
assert len(points) == 8
assert numpy.allclose(values[0], 279103)
assert numpy.allclose(values[1], 205762)
assert numpy.allclose(values[2], 428705)
assert numpy.allclose(values[3], 331093)
assert numpy.allclose(values[4], 227514)
assert numpy.allclose(values[5], 249308)
assert numpy.allclose(values[6], 215739)
assert numpy.allclose(values[7], 147447)
elif i == 6:
assert len(points) == 6
assert numpy.allclose(values[0], 61836.4)
assert numpy.allclose(values[1], 165723)
assert numpy.allclose(values[2], 151307)
assert numpy.allclose(values[3], 343787)
assert numpy.allclose(values[4], 303627)
assert numpy.allclose(values[5], 225232)
# Generate layer objects
values = [{'value': x} for x in C[i][1]]
point_layer = Vector(data=values, geometry=points,
projection=P.get_projection())
if len(point_layer) > 0:
# Geometry is only defined for layers that are not degenerate
assert point_layer.is_point_data
polygon_layer = Vector(geometry=[polygon],
projection=P.get_projection())
assert polygon_layer.is_polygon_data
# Generate spatial data for visualisation with e.g. QGIS
if False:
point_layer.write_to_file('points_%i.shp' % i)
polygon_layer.write_to_file('polygon_%i.shp' % i)
test_clip_raster_by_polygons.slow = True
def test_clip_points_by_polygons_with_holes0(self):
"""Points can be clipped by polygons with holes
"""
# Define an outer ring
outer_ring = numpy.array([[106.79, -6.233],
[106.80, -6.24],
[106.78, -6.23],
[106.77, -6.21],
[106.79, -6.233]])
# Define inner rings
inner_rings = [numpy.array([[106.77827, -6.2252],
[106.77775, -6.22378],
[106.78, -6.22311],
[106.78017, -6.22530],
[106.77827, -6.2252]])[::-1],
numpy.array([[106.78652, -6.23215],
[106.78642, -6.23075],
[106.78746, -6.23143],
[106.78831, -6.23307],
[106.78652, -6.23215]])[::-1]]
v = Vector(geometry=[Polygon(outer_ring=outer_ring,
inner_rings=inner_rings)])
assert v.is_polygon_data
# Write it to file
tmp_filename = unique_filename(suffix='.shp')
v.write_to_file(tmp_filename)
# Read polygon it back
L = read_layer(tmp_filename)
P = L.get_geometry(as_geometry_objects=True)[0]
outer_ring = P.outer_ring
inner_ring0 = P.inner_rings[0]
inner_ring1 = P.inner_rings[1]
# Make some test points
points = generate_random_points_in_bbox(outer_ring, 1000, seed=13)
# Clip to outer ring, excluding holes
indices = inside_polygon(points, P.outer_ring, holes=P.inner_rings)
# Sanity
for point in points[indices, :]:
# Must be inside outer ring
assert is_inside_polygon(point, outer_ring)
# But not in any of the inner rings
assert not is_inside_polygon(point, inner_ring0)
assert not is_inside_polygon(point, inner_ring1)
if False:
# Store for visual check
pol = Vector(geometry=[P])
tmp_filename = unique_filename(suffix='.shp')
pol.write_to_file(tmp_filename)
print 'Polygon with holes written to %s' % tmp_filename
pts = Vector(geometry=points[indices, :])
tmp_filename = unique_filename(suffix='.shp')
pts.write_to_file(tmp_filename)
print 'Clipped points written to %s' % tmp_filename
def test_clip_points_by_polygons_with_holes_real(self):
"""Points can be clipped by polygons with holes (real data)
"""
# Read real polygon with holes
filename = '%s/%s' % (TESTDATA, 'donut.shp')
L = read_layer(filename)
# --------------------------------------------
# Pick one polygon that has 2 inner rings
P = L.get_geometry(as_geometry_objects=True)[1]
outer_ring = P.outer_ring
inner_ring0 = P.inner_rings[0]
inner_ring1 = P.inner_rings[1]
# Make some test points
points_in_bbox = generate_random_points_in_bbox(outer_ring, 1000)
points_in_inner_ring0 = populate_polygon(inner_ring0, 2, seed=13)
points_in_inner_ring1 = populate_polygon(inner_ring1, 2, seed=17)
points = numpy.concatenate((points_in_bbox,
points_in_inner_ring0,
points_in_inner_ring1))
# Clip
indices = inside_polygon(points, P.outer_ring, holes=P.inner_rings)
# Sanity
for point in points[indices, :]:
# Must be inside outer ring
assert is_inside_polygon(point, outer_ring)
# But not in any of the inner rings
assert not is_inside_polygon(point, inner_ring0)
assert not is_inside_polygon(point, inner_ring1)
# ---------------------------------------------------------
# Pick a polygon that has 1 inner ring (nice visualisation)
P = L.get_geometry(as_geometry_objects=True)[9]
outer_ring = P.outer_ring
inner_ring = P.inner_rings[0]
# Make some test points
points = generate_random_points_in_bbox(outer_ring, 500)
# Clip
indices = inside_polygon(points, P.outer_ring, holes=P.inner_rings)
# Sanity
for point in points[indices, :]:
# Must be inside outer ring
assert is_inside_polygon(point, outer_ring)
# But not in the inner ring
assert not is_inside_polygon(point, inner_ring)
# Store for visual check (nice one!)
# Uncomment os.remove if you want see the layers
pol = Vector(geometry=[P])
tmp_filename = unique_filename(suffix='.shp')
pol.write_to_file(tmp_filename)
#print 'Polygon with holes written to %s' % tmp_filename
os.remove(tmp_filename)
pts = Vector(geometry=points[indices, :])
tmp_filename = unique_filename(suffix='.shp')
pts.write_to_file(tmp_filename)
#print 'Clipped points written to %s' % tmp_filename
os.remove(tmp_filename)
test_clip_points_by_polygons_with_holes_real.slow = True
if __name__ == '__main__':
suite = unittest.makeSuite(Test_Clipping, 'test')
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite)
| gpl-3.0 | -5,716,524,877,034,174,000 | 37.374622 | 77 | 0.529444 | false |
patricklaw/pants | src/python/pants/backend/python/util_rules/pex_from_targets_test.py | 3 | 10222 | # Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import json
import subprocess
import sys
from dataclasses import dataclass
from pathlib import Path, PurePath
from textwrap import dedent
from typing import Any, Dict, Iterable, List, cast
import pytest
from _pytest.tmpdir import TempPathFactory
from pants.backend.python.target_types import PythonLibrary, PythonRequirementLibrary
from pants.backend.python.util_rules import pex_from_targets
from pants.backend.python.util_rules.pex import Pex, PexPlatforms, PexRequest, PexRequirements
from pants.backend.python.util_rules.pex_from_targets import PexFromTargetsRequest
from pants.build_graph.address import Address
from pants.engine.internals.scheduler import ExecutionError
from pants.testutil.rule_runner import QueryRule, RuleRunner
from pants.util.contextutil import pushd
from pants.util.ordered_set import OrderedSet
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
rules=[
*pex_from_targets.rules(),
QueryRule(PexRequest, (PexFromTargetsRequest,)),
],
target_types=[PythonLibrary, PythonRequirementLibrary],
)
@dataclass(frozen=True)
class Project:
name: str
version: str
build_deps = ["setuptools==54.1.2", "wheel==0.36.2"]
def create_project_dir(workdir: Path, project: Project) -> PurePath:
project_dir = workdir / "projects" / project.name
project_dir.mkdir(parents=True)
(project_dir / "pyproject.toml").write_text(
dedent(
f"""\
[build-system]
requires = {build_deps}
build-backend = "setuptools.build_meta"
"""
)
)
(project_dir / "setup.cfg").write_text(
dedent(
f"""\
[metadata]
name = {project.name}
version = {project.version}
"""
)
)
return project_dir
def create_dists(workdir: Path, project: Project, *projects: Project) -> PurePath:
project_dirs = [create_project_dir(workdir, proj) for proj in (project, *projects)]
pex = workdir / "pex"
subprocess.run(
args=[
sys.executable,
"-m",
"pex",
*project_dirs,
*build_deps,
"--include-tools",
"-o",
pex,
],
check=True,
)
find_links = workdir / "find-links"
subprocess.run(
args=[
sys.executable,
"-m",
"pex.tools",
pex,
"repository",
"extract",
"--find-links",
find_links,
],
check=True,
)
return find_links
def info(rule_runner: RuleRunner, pex: Pex) -> Dict[str, Any]:
rule_runner.scheduler.write_digest(pex.digest)
completed_process = subprocess.run(
args=[
sys.executable,
"-m",
"pex.tools",
pex.name,
"info",
],
cwd=rule_runner.build_root,
stdout=subprocess.PIPE,
check=True,
)
return cast(Dict[str, Any], json.loads(completed_process.stdout))
def requirements(rule_runner: RuleRunner, pex: Pex) -> List[str]:
return cast(List[str], info(rule_runner, pex)["requirements"])
def test_constraints_validation(tmp_path_factory: TempPathFactory, rule_runner: RuleRunner) -> None:
find_links = create_dists(
tmp_path_factory.mktemp("sdists"),
Project("Foo-Bar", "1.0.0"),
Project("Bar", "5.5.5"),
Project("baz", "2.2.2"),
Project("QUX", "3.4.5"),
)
# Turn the project dir into a git repo, so it can be cloned.
foorl_dir = create_project_dir(tmp_path_factory.mktemp("git"), Project("foorl", "9.8.7"))
with pushd(str(foorl_dir)):
subprocess.check_call(["git", "init"])
subprocess.check_call(["git", "config", "user.name", "dummy"])
subprocess.check_call(["git", "config", "user.email", "[email protected]"])
subprocess.check_call(["git", "add", "--all"])
subprocess.check_call(["git", "commit", "-m", "initial commit"])
subprocess.check_call(["git", "branch", "9.8.7"])
# This string won't parse as a Requirement if it doesn't contain a netloc,
# so we explicitly mention localhost.
url_req = f"foorl@ git+file://localhost{foorl_dir.as_posix()}@9.8.7"
rule_runner.add_to_build_file(
"",
dedent(
f"""
python_requirement_library(name="foo", requirements=["foo-bar>=0.1.2"])
python_requirement_library(name="bar", requirements=["bar==5.5.5"])
python_requirement_library(name="baz", requirements=["baz"])
python_requirement_library(name="foorl", requirements=["{url_req}"])
python_library(name="util", sources=[], dependencies=[":foo", ":bar"])
python_library(name="app", sources=[], dependencies=[":util", ":baz", ":foorl"])
"""
),
)
rule_runner.create_file(
"constraints1.txt",
dedent(
"""
# Comment.
--find-links=https://duckduckgo.com
Foo._-BAR==1.0.0 # Inline comment.
bar==5.5.5
baz==2.2.2
qux==3.4.5
# Note that pip does not allow URL requirements in constraints files,
# so there is no mention of foorl here.
"""
),
)
def get_pex_request(
constraints_file: str | None,
resolve_all_constraints: bool | None,
*,
direct_deps_only: bool = False,
additional_args: Iterable[str] = (),
) -> PexRequest:
args = ["--backend-packages=pants.backend.python"]
request = PexFromTargetsRequest(
[Address("", target_name="app")],
output_filename="demo.pex",
internal_only=True,
direct_deps_only=direct_deps_only,
additional_args=additional_args,
)
if resolve_all_constraints is not None:
args.append(f"--python-setup-resolve-all-constraints={resolve_all_constraints!r}")
if constraints_file:
args.append(f"--python-setup-requirement-constraints={constraints_file}")
args.append("--python-repos-indexes=[]")
args.append(f"--python-repos-repos={find_links}")
rule_runner.set_options(args, env_inherit={"PATH"})
pex_request = rule_runner.request(PexRequest, [request])
assert OrderedSet(additional_args).issubset(OrderedSet(pex_request.additional_args))
return pex_request
additional_args = ["--no-strip-pex-env"]
pex_req1 = get_pex_request(
"constraints1.txt",
resolve_all_constraints=False,
additional_args=additional_args,
)
assert pex_req1.requirements == PexRequirements(
["foo-bar>=0.1.2", "bar==5.5.5", "baz", url_req]
)
assert pex_req1.repository_pex is None
pex_req1_direct = get_pex_request(
"constraints1.txt", resolve_all_constraints=False, direct_deps_only=True
)
assert pex_req1_direct.requirements == PexRequirements(["baz", url_req])
assert pex_req1_direct.repository_pex is None
pex_req2 = get_pex_request(
"constraints1.txt",
resolve_all_constraints=True,
additional_args=additional_args,
)
assert pex_req2.requirements == PexRequirements(
["foo-bar>=0.1.2", "bar==5.5.5", "baz", url_req]
)
assert pex_req2.repository_pex is not None
assert not info(rule_runner, pex_req2.repository_pex)["strip_pex_env"]
repository_pex = pex_req2.repository_pex
assert ["Foo._-BAR==1.0.0", "bar==5.5.5", "baz==2.2.2", "foorl", "qux==3.4.5"] == requirements(
rule_runner, repository_pex
)
pex_req2_direct = get_pex_request(
"constraints1.txt",
resolve_all_constraints=True,
direct_deps_only=True,
additional_args=additional_args,
)
assert pex_req2_direct.requirements == PexRequirements(["baz", url_req])
assert pex_req2_direct.repository_pex == repository_pex
assert not info(rule_runner, pex_req2.repository_pex)["strip_pex_env"]
pex_req3_direct = get_pex_request(
"constraints1.txt",
resolve_all_constraints=True,
direct_deps_only=True,
)
assert pex_req3_direct.requirements == PexRequirements(["baz", url_req])
assert pex_req3_direct.repository_pex is not None
assert pex_req3_direct.repository_pex != repository_pex
assert info(rule_runner, pex_req3_direct.repository_pex)["strip_pex_env"]
with pytest.raises(ExecutionError) as err:
get_pex_request(None, resolve_all_constraints=True)
assert len(err.value.wrapped_exceptions) == 1
assert isinstance(err.value.wrapped_exceptions[0], ValueError)
assert (
"`[python-setup].resolve_all_constraints` is enabled, so "
"`[python-setup].requirement_constraints` must also be set."
) in str(err.value)
# Shouldn't error, as we don't explicitly set --resolve-all-constraints.
get_pex_request(None, resolve_all_constraints=None)
def test_issue_12222(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"constraints.txt": "foo==1.0\nbar==1.0",
"BUILD": dedent(
"""
python_requirement_library(name="foo",requirements=["foo"])
python_requirement_library(name="bar",requirements=["bar"])
python_library(name="lib",sources=[],dependencies=[":foo"])
"""
),
}
)
request = PexFromTargetsRequest(
[Address("", target_name="lib")],
output_filename="demo.pex",
internal_only=False,
platforms=PexPlatforms(["some-platform-x86_64"]),
)
rule_runner.set_options(
[
"--python-setup-requirement-constraints=constraints.txt",
"--python-setup-resolve-all-constraints",
]
)
result = rule_runner.request(PexRequest, [request])
assert result.repository_pex is None
assert result.requirements == PexRequirements(["foo"])
| apache-2.0 | -3,392,319,371,602,630,000 | 33.073333 | 100 | 0.60497 | false |
KohlsTechnology/ansible | lib/ansible/modules/database/mssql/mssql_db.py | 25 | 6853 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Vedit Firat Arig <[email protected]>
# Outline and parts are reused from Mark Theunissen's mysql_db module
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: mssql_db
short_description: Add or remove MSSQL databases from a remote host.
description:
- Add or remove MSSQL databases from a remote host.
version_added: "2.2"
options:
name:
description:
- name of the database to add or remove
required: true
aliases: [ db ]
login_user:
description:
- The username used to authenticate with
login_password:
description:
- The password used to authenticate with
login_host:
description:
- Host running the database
login_port:
description:
- Port of the MSSQL server. Requires login_host be defined as other then localhost if login_port is used
default: 1433
state:
description:
- The database state
default: present
choices: [ "present", "absent", "import" ]
target:
description:
- Location, on the remote host, of the dump file to read from or write to. Uncompressed SQL
files (C(.sql)) files are supported.
autocommit:
description:
- Automatically commit the change only if the import succeed. Sometimes it is necessary to use autocommit=true, since some content can't be changed
within a transaction.
type: bool
default: 'no'
notes:
- Requires the pymssql Python package on the remote host. For Ubuntu, this
is as easy as pip install pymssql (See M(pip).)
requirements:
- python >= 2.7
- pymssql
author: Vedit Firat Arig
'''
EXAMPLES = '''
# Create a new database with name 'jackdata'
- mssql_db:
name: jackdata
state: present
# Copy database dump file to remote host and restore it to database 'my_db'
- copy:
src: dump.sql
dest: /tmp
- mssql_db:
name: my_db
state: import
target: /tmp/dump.sql
'''
RETURN = '''
#
'''
import os
try:
import pymssql
except ImportError:
mssql_found = False
else:
mssql_found = True
from ansible.module_utils.basic import AnsibleModule
def db_exists(conn, cursor, db):
cursor.execute("SELECT name FROM master.sys.databases WHERE name = %s", db)
conn.commit()
return bool(cursor.rowcount)
def db_create(conn, cursor, db):
cursor.execute("CREATE DATABASE [%s]" % db)
return db_exists(conn, cursor, db)
def db_delete(conn, cursor, db):
try:
cursor.execute("ALTER DATABASE [%s] SET single_user WITH ROLLBACK IMMEDIATE" % db)
except:
pass
cursor.execute("DROP DATABASE [%s]" % db)
return not db_exists(conn, cursor, db)
def db_import(conn, cursor, module, db, target):
if os.path.isfile(target):
backup = open(target, 'r')
try:
sqlQuery = "USE [%s]\n" % db
for line in backup:
if line is None:
break
elif line.startswith('GO'):
cursor.execute(sqlQuery)
sqlQuery = "USE [%s]\n" % db
else:
sqlQuery += line
cursor.execute(sqlQuery)
conn.commit()
finally:
backup.close()
return 0, "import successful", ""
else:
return 1, "cannot find target file", "cannot find target file"
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['db']),
login_user=dict(default=''),
login_password=dict(default='', no_log=True),
login_host=dict(required=True),
login_port=dict(default='1433'),
target=dict(default=None),
autocommit=dict(type='bool', default=False),
state=dict(
default='present', choices=['present', 'absent', 'import'])
)
)
if not mssql_found:
module.fail_json(msg="pymssql python module is required")
db = module.params['name']
state = module.params['state']
autocommit = module.params['autocommit']
target = module.params["target"]
login_user = module.params['login_user']
login_password = module.params['login_password']
login_host = module.params['login_host']
login_port = module.params['login_port']
login_querystring = login_host
if login_port != "1433":
login_querystring = "%s:%s" % (login_host, login_port)
if login_user != "" and login_password == "":
module.fail_json(msg="when supplying login_user arguments login_password must be provided")
try:
conn = pymssql.connect(user=login_user, password=login_password, host=login_querystring, database='master')
cursor = conn.cursor()
except Exception as e:
if "Unknown database" in str(e):
errno, errstr = e.args
module.fail_json(msg="ERROR: %s %s" % (errno, errstr))
else:
module.fail_json(msg="unable to connect, check login_user and login_password are correct, or alternatively check your "
"@sysconfdir@/freetds.conf / ${HOME}/.freetds.conf")
conn.autocommit(True)
changed = False
if db_exists(conn, cursor, db):
if state == "absent":
try:
changed = db_delete(conn, cursor, db)
except Exception as e:
module.fail_json(msg="error deleting database: " + str(e))
elif state == "import":
conn.autocommit(autocommit)
rc, stdout, stderr = db_import(conn, cursor, module, db, target)
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
else:
if state == "present":
try:
changed = db_create(conn, cursor, db)
except Exception as e:
module.fail_json(msg="error creating database: " + str(e))
elif state == "import":
try:
changed = db_create(conn, cursor, db)
except Exception as e:
module.fail_json(msg="error creating database: " + str(e))
conn.autocommit(autocommit)
rc, stdout, stderr = db_import(conn, cursor, module, db, target)
if rc != 0:
module.fail_json(msg="%s" % stderr)
else:
module.exit_json(changed=True, db=db, msg=stdout)
module.exit_json(changed=changed, db=db)
if __name__ == '__main__':
main()
| gpl-3.0 | 7,879,831,622,681,457,000 | 28.925764 | 153 | 0.594338 | false |
icedaq/dockerfiles | home/files/vim/bundle/deoplete-go/rplugin/python3/deoplete/sources/deoplete_go/clang_index.py | 1 | 13162 | class Clang_Index(object):
kinds = dict({
# Declarations
1: 't', # CXCursor_UnexposedDecl # A declaration whose specific kind
# is not exposed via this interface
2: 'struct', # CXCursor_StructDecl (A C or C++ struct)
3: 'union', # CXCursor_UnionDecl (A C or C++ union)
4: 'class', # CXCursor_ClassDecl (A C++ class)
5: 'enumeration', # CXCursor_EnumDecl (An enumeration)
# CXCursor_FieldDecl (A field (in C) or non-static data member
6: 'member',
# (in C++) in a struct, union, or C++ class)
# CXCursor_EnumConstantDecl (An enumerator constant)
7: 'enumerator constant',
8: 'function', # CXCursor_FunctionDecl (A function)
9: 'variable', # CXCursor_VarDecl (A variable)
# CXCursor_ParmDecl (A function or method parameter)
10: 'method parameter',
11: '11', # CXCursor_ObjCInterfaceDecl (An Objective-C @interface)
# CXCursor_ObjCCategoryDecl (An Objective-C @interface for a
12: '12',
# category)
13: '13', # CXCursor_ObjCProtocolDecl
# (An Objective-C @protocol declaration)
# CXCursor_ObjCPropertyDecl (An Objective-C @property declaration)
14: '14',
15: '15', # CXCursor_ObjCIvarDecl (An Objective-C instance variable)
16: '16', # CXCursor_ObjCInstanceMethodDecl
# (An Objective-C instance method)
17: '17', # CXCursor_ObjCClassMethodDecl
# (An Objective-C class method)
18: '18', # CXCursor_ObjCImplementationDec
# (An Objective-C @implementation)
19: '19', # CXCursor_ObjCCategoryImplDecll
# (An Objective-C @implementation for a category)
20: 'typedef', # CXCursor_TypedefDecl (A typedef)
21: 'class method', # CXCursor_CXXMethod (A C++ class method)
22: 'namespace', # CXCursor_Namespace (A C++ namespace)
# CXCursor_LinkageSpec (A linkage specification,e.g. Extern "C")
23: '23',
24: 'constructor', # CXCursor_Constructor (A C++ constructor)
25: 'destructor', # CXCursor_Destructor (A C++ destructor)
# CXCursor_ConversionFunction (A C++ conversion function)
26: 'conversion function',
# CXCursor_TemplateTypeParameter (A C++ template type parameter)
27: 'a',
# CXCursor_NonTypeTemplateParameter (A C++ non-type template parameter)
28: 'a',
# CXCursor_TemplateTemplateParameter (A C++ template template
# parameter)
29: 'a',
# CXCursor_FunctionTemplate (A C++ function template)
30: 'function template',
# CXCursor_ClassTemplate (A C++ class template)
31: 'class template',
32: '32', # CXCursor_ClassTemplatePartialSpecialization
# (A C++ class template partial specialization)
# CXCursor_NamespaceAlias (A C++ namespace alias declaration)
33: 'n',
# CXCursor_UsingDirective (A C++ using directive)
34: 'using directive',
# CXCursor_UsingDeclaration (A C++ using declaration)
35: 'using declaration',
# CXCursor_TypeAliasDecl (A C++ alias declaration)
36: 'alias declaration',
# CXCursor_ObjCSynthesizeDecl (An Objective-C synthesize definition)
37: '37',
# CXCursor_ObjCDynamicDecl (An Objective-C dynamic definition)
38: '38',
39: '39', # CXCursor_CXXAccessSpecifier (An access specifier)
# References
40: '40', # CXCursor_ObjCSuperClassRef
41: '41', # CXCursor_ObjCProtocolRef
42: '42', # CXCursor_ObjCClassRef
43: '43', # CXCursor_TypeRef
44: '44', # CXCursor_CXXBaseSpecifier
45: '45', # CXCursor_TemplateRef
# (A reference to a class template, function template, template
# template parameter, or class template partial
# specialization)
# CXCursor_NamespaceRef (A ref to a namespace or namespace alias)
46: '46',
# CXCursor_MemberRef (A reference to a member of a struct, union,
47: '47',
# or class that occurs in some non-expression context,
# e.g., a designated initializer)
48: '48', # CXCursor_LabelRef (A reference to a labeled statement)
49: '49', # CXCursor_OverloadedDeclRef
# (A reference to a set of overloaded functions or function
# templates that has not yet been resolved to a specific
# function or function template)
50: '50', # CXCursor_VariableRef
# Error conditions
# 70: '70', # CXCursor_FirstInvalid
70: '70', # CXCursor_InvalidFile
71: '71', # CXCursor_NoDeclFound
72: 'u', # CXCursor_NotImplemented
73: '73', # CXCursor_InvalidCode
# Expressions
# CXCursor_UnexposedExpr (An expression whose specific kind is
100: '100',
# not exposed via this interface)
# CXCursor_DeclRefExpr (An expression that refers to some value
101: '101',
# declaration, such as a function, varible, or
# enumerator)
# CXCursor_MemberRefExpr (An expression that refers to a member
102: '102',
# of a struct, union, class, Objective-C class, etc)
103: '103', # CXCursor_CallExpr (An expression that calls a function)
# CXCursor_ObjCMessageExpr (An expression that sends a message
104: '104',
# to an Objective-C object or class)
# CXCursor_BlockExpr (An expression that represents a block
105: '105',
# literal)
106: '106', # CXCursor_IntegerLiteral (An integer literal)
# CXCursor_FloatingLiteral (A floating point number literal)
107: '107',
108: '108', # CXCursor_ImaginaryLiteral (An imaginary number literal)
109: '109', # CXCursor_StringLiteral (A string literal)
110: '110', # CXCursor_CharacterLiteral (A character literal)
# CXCursor_ParenExpr (A parenthesized expression, e.g. "(1)")
111: '111',
# CXCursor_UnaryOperator (This represents the unary-expression's
112: '112',
# (except sizeof and alignof))
# CXCursor_ArraySubscriptExpr ([C99 6.5.2.1] Array Subscripting)
113: '113',
# CXCursor_BinaryOperator (A builtin binary operation expression
114: '114',
# such as "x + y" or "x <= y")
# CXCursor_CompoundAssignOperator (Compound assignment such as
115: '115',
# "+=")
116: '116', # CXCursor_ConditionalOperator (The ?: ternary operator)
# CXCursor_CStyleCastExpr (An explicit cast in C (C99 6.5.4) or
117: '117',
# C-style cast in C++ (C++ [expr.cast]), which uses the
# syntax (Type)expr)
118: '118', # CXCursor_CompoundLiteralExpr ([C99 6.5.2.5])
# CXCursor_InitListExpr (Describes an C or C++ initializer list)
119: '119',
# CXCursor_AddrLabelExpr (The GNU address of label extension,
120: '120',
# representing &&label)
121: '121', # CXCursor_StmtExpr (This is the GNU Statement Expression
# extension: ({int X=4; X;})
# CXCursor_GenericSelectionExpr (brief Represents a C11 generic
122: '122',
# selection)
# CXCursor_GNUNullExpr (Implements the GNU __null extension)
123: '123',
# CXCursor_CXXStaticCastExpr (C++'s static_cast<> expression)
124: '124',
# CXCursor_CXXDynamicCastExpr (C++'s dynamic_cast<> expression)
125: '125',
# CXCursor_CXXReinterpretCastExpr (C++'s reinterpret_cast<>
126: '126',
# expression)
# CXCursor_CXXConstCastExpr (C++'s const_cast<> expression)
127: '127',
# CXCursor_CXXFunctionalCastExpr (Represents an explicit C++ type
128: '128',
# conversion that uses "functional" notion
# (C++ [expr.type.conv]))
129: '129', # CXCursor_CXXTypeidExpr (A C++ typeid expression
# (C++ [expr.typeid]))
# CXCursor_CXXBoolLiteralExpr (brief [C++ 2.13.5] C++ Boolean
130: '130',
# Literal)
# CXCursor_CXXNullPtrLiteralExpr ([C++0x 2.14.7] C++ Pointer
131: '131',
# Literal)
# CXCursor_CXXThisExpr (Represents the "this" expression in C+)
132: '132',
133: '133', # CXCursor_CXXThrowExpr ([C++ 15] C++ Throw Expression)
# CXCursor_CXXNewExpr (A new expression for memory allocation
134: '134',
# and constructor calls)
135: '135', # CXCursor_CXXDeleteExpr (A delete expression for memory
# deallocation and destructor calls)
136: '136', # CXCursor_UnaryExpr (A unary expression)
# CXCursor_ObjCStringLiteral (An Objective-C string literal
137: '137',
# i.e. @"foo")
# CXCursor_ObjCEncodeExpr (An Objective-C @encode expression)
138: '138',
# CXCursor_ObjCSelectorExpr (An Objective-C @selector expression)
139: '139',
# CXCursor_ObjCProtocolExpr (An Objective-C @protocol expression)
140: '140',
# CXCursor_ObjCBridgedCastExpr (An Objective-C "bridged" cast
141: '141',
# expression, which casts between Objective-C pointers
# and C pointers, transferring ownership in the process)
# CXCursor_PackExpansionExpr (Represents a C++0x pack expansion
142: '142',
# that produces a sequence of expressions)
# CXCursor_SizeOfPackExpr (Represents an expression that computes
143: '143',
# the length of a parameter pack)
# CXCursor_LambdaExpr (Represents a C++ lambda expression that
144: '144',
# produces a local function object)
# CXCursor_ObjCBoolLiteralExpr (Objective-c Boolean Literal)
145: '145',
# Statements
# CXCursor_UnexposedStmt (A statement whose specific kind is not
200: '200',
# exposed via this interface)
201: '201', # CXCursor_LabelStmt (A labelled statement in a function)
202: '202', # CXCursor_CompoundStmt (A group of statements like
# { stmt stmt }.
203: '203', # CXCursor_CaseStmt (A case statment)
204: '204', # CXCursor_DefaultStmt (A default statement)
205: '205', # CXCursor_IfStmt (An if statemen)
206: '206', # CXCursor_SwitchStmt (A switch statement)
207: '207', # CXCursor_WhileStmt (A while statement)
208: '208', # CXCursor_DoStmt (A do statement)
209: '209', # CXCursor_ForStmt (A for statement)
210: '210', # CXCursor_GotoStmt (A goto statement)
211: '211', # CXCursor_IndirectGotoStmt (An indirect goto statement)
212: '212', # CXCursor_ContinueStmt (A continue statement)
213: '213', # CXCursor_BreakStmt (A break statement)
214: '214', # CXCursor_ReturnStmt (A return statement)
# CXCursor_GCCAsmStmt (A GCC inline assembly statement extension)
215: '215',
# CXCursor_ObjCAtTryStmt (Objective-C's overall try-catch-finally
216: '216',
# statement.
# CXCursor_ObjCAtCatchStmt (Objective-C's catch statement)
217: '217',
# CXCursor_ObjCAtFinallyStmt (Objective-C's finally statement)
218: '218',
# CXCursor_ObjCAtThrowStmt (Objective-C's throw statement)
219: '219',
# CXCursor_ObjCAtSynchronizedStmt (Objective-C's synchronized
220: '220',
# statement)
# CXCursor_ObjCAutoreleasePoolStmt (Objective-C's autorelease
221: '221',
# pool statement)
# CXCursor_ObjCForCollectionStmt (Objective-C's collection
222: '222',
# statement)
223: '223', # CXCursor_CXXCatchStmt (C++'s catch statement)
224: '224', # CXCursor_CXXTryStmt (C++'s try statement)
225: '225', # CXCursor_CXXForRangeStmt (C++'s for (*: *) statement)
# CXCursor_SEHTryStmt (Windows Structured Exception Handling's
226: '226',
# try statement)
# CXCursor_SEHExceptStmt (Windows Structured Exception Handling's
227: '227',
# except statement.
228: '228', # CXCursor_SEHFinallyStmt (Windows Structured Exception
# Handling's finally statement)
# CXCursor_MSAsmStmt (A MS inline assembly statement extension)
229: '229',
230: '230', # CXCursor_NullStmt (The null satement ";": C99 6.8.3p3)
# CXCursor_DeclStmt (Adaptor class for mixing declarations with
231: '231',
# statements and expressions)
# Translation unit
300: '300', # CXCursor_TranslationUnit (Cursor that represents the
# translation unit itself)
# Attributes
# CXCursor_UnexposedAttr (An attribute whose specific kind is
400: '400',
# not exposed via this interface)
401: '401', # CXCursor_IBActionAttr
402: '402', # CXCursor_IBOutletAttr
403: '403', # CXCursor_IBOutletCollectionAttr
404: '404', # CXCursor_CXXFinalAttr
405: '405', # CXCursor_CXXOverrideAttr
406: '406', # CXCursor_AnnotateAttr
407: '407', # CXCursor_AsmLabelAttr
# Preprocessing
500: '500', # CXCursor_PreprocessingDirective
501: 'd', # CXCursor_MacroDefinition
502: '502', # CXCursor_MacroInstantiation
503: '503', # CXCursor_InclusionDirective
# Modules
600: '600', # CXCursor_ModuleImportDecl (A module import declaration)
})
| gpl-3.0 | 1,332,748,181,302,769,000 | 43.921502 | 80 | 0.629008 | false |
GammaC0de/pyload | src/pyload/core/utils/fs.py | 1 | 9701 | # -*- coding: utf-8 -*-
# AUTHOR: vuolter
import hashlib
import io
import os
import shutil
# import portalocker
# import psutil
from .convert import to_bytes, to_str
try:
import send2trash
except ImportError:
send2trash = None
try:
import magic
except ImportError:
magic = None
from filetype import guess_mime
try:
import zlib
except ImportError:
zlib = None
def free_space(path):
availspace = None
if os.name == "nt":
import ctypes
free_bytes = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes)
)
availspace = free_bytes.value
else:
s = os.statvfs(path)
availspace = s.f_frsize * s.f_bavail
return availspace
def _shdo(func, src, dst, overwrite=None, ref=None):
mtime = os.path.getmtime
try:
if os.path.isfile(dst):
if overwrite is None and mtime(src) <= mtime(dst):
return
elif not overwrite:
return
if os.name == "nt":
os.remove(dst)
func(src, dst)
if isinstance(ref, list):
del ref[:]
except (IOError, OSError):
pass
def _shdorc(func, filenames, src_dir, dst_dir, overwrite=None):
join = os.path.join
for fname in filenames:
src_file = join(src_dir, fname)
dst_file = join(dst_dir, fname)
_shdo(func, src_file, dst_file, overwrite)
def _copyrc(src, dst, overwrite, preserve_metadata):
copy = shutil.copy2 if preserve_metadata else shutil.copy
copytree = shutil.copytree
exists = os.path.exists
for src_dir, dirnames, filenames in os.walk(src):
dst_dir = src_dir.replace(src, dst, 1)
if exists(dst_dir):
_shdorc(copy, filenames, src_dir, dst_dir, overwrite)
else:
_shdo(copytree, src_dir, dst_dir, overwrite, dirnames)
def copy(src, dst, overwrite=None, preserve_metadata=True):
if not os.path.isdir(dst) or not os.path.isdir(src):
return _shdo(shutil.copytree, src, dst, overwrite)
return _copyrc(src, dst, overwrite, preserve_metadata)
def exists(path, strict=False):
"""Case-sensitive os.path.exists."""
if not strict:
return os.path.exists(path)
if os.path.exists(path):
dirpath, name = os.path.split(path)
return name in os.listdir(dirpath)
return False
def filesize(filename):
return os.stat(filename).st_size
def filetype(filename):
try:
return magic.from_file(filename, mime=True)
except AttributeError:
pass
return guess_mime(filename)
def encode(path):
try:
return os.fsencode(path)
except AttributeError:
return to_bytes(path)
def decode(path):
try:
return os.fsdecode(path)
except AttributeError:
return to_str(path)
def fullpath(path):
return os.path.realpath(os.path.expanduser(path))
def blksize(path):
"""Get optimal file system buffer size (in bytes) for I/O calls."""
if os.name != "nt":
size = os.statvfs(path).f_bsize
else:
import ctypes
drive = os.path.splitdrive(os.path.abspath(path))[0] + "\\"
cluster_sectors = ctypes.c_longlong(0)
sector_size = ctypes.c_longlong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceW(
ctypes.c_wchar_p(drive),
ctypes.pointer(cluster_sectors),
ctypes.pointer(sector_size),
None,
None,
)
size = int(cluster_sectors.value * sector_size.value)
return size
def bufread(fp, buffering=-1, sentinel=b""):
buf = blksize(fp.name) if buffering < 0 else buffering
func = fp.readline if buffering == 1 else lambda: fp.read(buf)
return iter(func, sentinel)
def _crcsum(filename, chkname, buffering):
last = 0
call = getattr(zlib, chkname)
with io.open(filename, mode="rb") as fp:
for chunk in bufread(fp, buffering):
last = call(chunk, last)
return f"{last & 0xffffffff:x}"
def _hashsum(filename, chkname, buffering):
h = hashlib.new(chkname)
buffering *= h.block_size
with io.open(filename, mode="rb") as fp:
for chunk in bufread(fp, buffering):
h.update(chunk)
return h.hexdigest()
def checksum(filename, chkname, buffering=None):
res = None
buf = buffering or blksize(filename)
if chkname in ("adler32", "crc32"):
res = _crcsum(filename, chkname, buf)
elif chkname in hashlib.algorithms_available:
res = _hashsum(filename, chkname, buf)
return res
def is_exec(filename):
return os.path.isfile(filename) and os.access(filename, os.X_OK)
# def lopen(*args, **kwargs):
# if kwargs.get("blocking", True):
# flags = portalocker.LOCK_EX
# else:
# flags = portalocker.LOCK_EX | portalocker.LOCK_NB
# fp = io.open(*args, **kwargs)
# portalocker.lock(fp, flags)
# return fp
def flush(filename, exist_ok=False):
if not exist_ok and not os.path.exists(filename):
raise OSError("Path not exists")
with io.open(filename) as fp:
fp.flush()
os.fsync(fp.fileno())
def merge(dst_file, src_file):
with io.open(dst_file, mode="ab") as dfp:
with io.open(src_file, mode="rb") as sfp:
for chunk in bufread(sfp):
dfp.write(chunk)
def mountpoint(path):
path = fullpath(path)
rest = True
while rest:
if os.path.ismount(path):
return path
path, rest = path.rsplit(os.sep, 1)
# def filesystem(path):
# mp = mountpoint(path)
# fs = dict((part.mountpoint, part.fstype) for part in psutil.disk_partitions())
# return fs.get(mp)
def mkfile(filename, size=None):
if os.path.isfile(filename):
raise OSError("Path already exists")
with io.open(filename, mode="wb") as fp:
if size and os.name == "nt":
fp.truncate(size)
def makedirs(dirname, mode=0o777, exist_ok=False):
try:
os.makedirs(dirname, mode)
except OSError as exc:
if not os.path.isdir(dirname) or not exist_ok:
raise OSError(exc)
def makefile(filepath, mode=0o700, size=None, exist_ok=False):
dirname, _ = os.path.split(filepath)
makedirs(dirname, mode, exist_ok=True)
try:
mkfile(filepath, size)
except OSError as exc:
if not os.path.isfile(filepath) or not exist_ok:
raise OSError(exc)
def _moverc(src, dst, overwrite):
exists = os.path.exists
move = shutil.move
removedirs = os.removedirs
for src_dir, dirnames, filenames in os.walk(src):
dst_dir = src_dir.replace(src, dst, 1)
if exists(dst_dir):
_shdorc(move, filenames, src_dir, dst_dir, overwrite)
else:
_shdo(move, src_dir, dst_dir, overwrite, dirnames)
try:
removedirs(src_dir)
except Exception:
pass
def move(src, dst, overwrite=None):
if not os.path.isdir(dst) or not os.path.isdir(src):
return _shdo(shutil.move, src, dst, overwrite)
_moverc(src, dst, overwrite)
try:
os.rmdir(src)
except Exception:
pass
def mtime(path):
getmtime = os.path.getmtime
join = os.path.join
if not os.path.isdir(path):
return getmtime(path)
mtimes = (
getmtime(join(root, fname))
for root, dirnames, filenames in os.walk(path)
for fname in filenames
)
return max(0, 0, *mtimes)
def _cleanpy2(root, filenames):
join = os.path.join
remove = os.remove
for fname in filenames:
if fname[-4:] not in (".pyc", ".pyo", ".pyd"):
continue
try:
remove(join(root, fname))
except OSError:
pass
def _cleanpy3(root, dirnames):
name = "__pycache__"
if name not in dirnames:
return
dirnames.remove(name)
try:
os.remove(os.path.join(root, name))
except OSError:
pass
def cleanpy(dirname, recursive=True):
walk_it = os.walk(dirname)
if not recursive:
walk_it = next(walk_it)
for dirpath, dirnames, filenames in walk_it:
_cleanpy2(dirpath, filenames)
_cleanpy3(dirpath, dirnames)
def remove(path, try_trash=True):
# path = os.fsdecode(path)
if not os.path.exists(path):
return
if try_trash:
try:
send2trash.send2trash(path)
except AttributeError as exc:
exc_logger.exception(exc)
elif os.path.isdir(path):
shutil.rmtree(path, ignore_errors=True)
else:
os.remove(path)
def empty(path, try_trash=False, exist_ok=True):
if not exist_ok and not os.path.exists(path):
raise OSError("Path not exists")
if os.path.isfile(path):
if try_trash:
origfile = path + ".orig"
os.rename(path, origfile)
shutil.copy2(origfile, path)
remove(path, try_trash)
os.rename(origfile, path)
fp = io.open(path, mode="wb")
fp.close()
elif os.path.isdir(path):
for name in os.listdir(path):
remove(name, try_trash)
else:
raise TypeError
def which(filename):
try:
return shutil.which(filename) # NOTE: Available only under Python 3
except AttributeError:
pass
dirname = os.path.dirname(filename)
if dirname:
return filename if is_exec(filename) else None
for envpath in os.environ["PATH"].split(os.pathsep):
filename = os.path.join(envpath.strip('"'), filename)
if is_exec(filename):
return filename
| agpl-3.0 | 1,738,767,629,270,754,000 | 24.263021 | 84 | 0.60602 | false |
Subsets and Splits