hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a25f58d9111ba38b578740fd6a1a04f9c5f586d | 331 | py | Python | 238_Product of Array Except Self.py | carryking1988/carryleetcode | 9d6b353e8f235219d0b9e4feb131bfea6fe3ef21 | [
"MIT"
] | null | null | null | 238_Product of Array Except Self.py | carryking1988/carryleetcode | 9d6b353e8f235219d0b9e4feb131bfea6fe3ef21 | [
"MIT"
] | null | null | null | 238_Product of Array Except Self.py | carryking1988/carryleetcode | 9d6b353e8f235219d0b9e4feb131bfea6fe3ef21 | [
"MIT"
] | null | null | null | # Given an integer array nums, return an array answer such that answer[i] is equal to the product of all the elements of nums except nums[i].
#
# The product of any prefix or suffix of nums is guaranteed to fit in a 32-bit integer.
#
# You must write an algorithm that runs in O(n) time and without using the division operation.
#
| 47.285714 | 141 | 0.755287 |
4a25f5ec915d33af3684612cdc50a825841213d3 | 18,716 | py | Python | ddsp/spectral_ops.py | NiklasWan/ddsp | dda0320a7f5b8bc080623333c8be63ffb35500c9 | [
"Apache-2.0"
] | null | null | null | ddsp/spectral_ops.py | NiklasWan/ddsp | dda0320a7f5b8bc080623333c8be63ffb35500c9 | [
"Apache-2.0"
] | null | null | null | ddsp/spectral_ops.py | NiklasWan/ddsp | dda0320a7f5b8bc080623333c8be63ffb35500c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 The DDSP Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Library of FFT operations for loss functions and conditioning."""
import crepe
from ddsp.core import safe_log
from ddsp.core import tf_float32
import gin
import librosa
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
CREPE_SAMPLE_RATE = 16000
_CREPE_FRAME_SIZE = 1024
F0_RANGE = 127.0 # MIDI
LD_RANGE = 120.0 # dB
def stft(audio, frame_size=2048, overlap=0.75, pad_end=True):
"""Differentiable stft in tensorflow, computed in batch."""
assert frame_size * overlap % 2.0 == 0.0
# Remove channel dim if present.
audio = tf_float32(audio)
if len(audio.shape) == 3:
audio = tf.squeeze(audio, axis=-1)
s = tf.signal.stft(
signals=audio,
frame_length=int(frame_size),
frame_step=int(frame_size * (1.0 - overlap)),
fft_length=int(frame_size),
pad_end=pad_end)
return s
def stft_np(audio, frame_size=2048, overlap=0.75, pad_end=True):
"""Non-differentiable stft using librosa, one example at a time."""
assert frame_size * overlap % 2.0 == 0.0
hop_size = int(frame_size * (1.0 - overlap))
is_2d = (len(audio.shape) == 2)
if pad_end:
n_samples_initial = int(audio.shape[-1])
n_frames = int(np.ceil(n_samples_initial / hop_size))
n_samples_final = (n_frames - 1) * hop_size + frame_size
pad = n_samples_final - n_samples_initial
padding = ((0, 0), (0, pad)) if is_2d else ((0, pad),)
audio = np.pad(audio, padding, 'constant')
def stft_fn(y):
return librosa.stft(y=y,
n_fft=int(frame_size),
hop_length=hop_size,
center=False).T
s = np.stack([stft_fn(a) for a in audio]) if is_2d else stft_fn(audio)
return s
@gin.register
def compute_mag(audio, size=2048, overlap=0.75, pad_end=True):
mag = tf.abs(stft(audio, frame_size=size, overlap=overlap, pad_end=pad_end))
return tf_float32(mag)
@gin.register
def compute_mel(audio,
lo_hz=0.0,
hi_hz=8000.0,
bins=64,
fft_size=2048,
overlap=0.75,
pad_end=True,
sample_rate=16000):
"""Calculate Mel Spectrogram."""
mag = compute_mag(audio, fft_size, overlap, pad_end)
num_spectrogram_bins = int(mag.shape[-1])
linear_to_mel_matrix = tf.signal.linear_to_mel_weight_matrix(
bins, num_spectrogram_bins, sample_rate, lo_hz, hi_hz)
mel = tf.tensordot(mag, linear_to_mel_matrix, 1)
mel.set_shape(mag.shape[:-1].concatenate(linear_to_mel_matrix.shape[-1:]))
return mel
@gin.register
def compute_logmag(audio, size=2048, overlap=0.75, pad_end=True):
return safe_log(compute_mag(audio, size, overlap, pad_end))
@gin.register
def compute_logmel(audio,
lo_hz=80.0,
hi_hz=7600.0,
bins=64,
fft_size=2048,
overlap=0.75,
pad_end=True,
sample_rate=16000):
"""Logarithmic amplitude of mel-scaled spectrogram."""
mel = compute_mel(audio, lo_hz, hi_hz, bins,
fft_size, overlap, pad_end, sample_rate)
return safe_log(mel)
@gin.register
def compute_mfcc(audio,
lo_hz=20.0,
hi_hz=8000.0,
fft_size=1024,
mel_bins=128,
mfcc_bins=13,
overlap=0.75,
pad_end=True,
sample_rate=16000):
"""Calculate Mel-frequency Cepstral Coefficients."""
logmel = compute_logmel(
audio,
lo_hz=lo_hz,
hi_hz=hi_hz,
bins=mel_bins,
fft_size=fft_size,
overlap=overlap,
pad_end=pad_end,
sample_rate=sample_rate)
mfccs = tf.signal.mfccs_from_log_mel_spectrograms(logmel)
return mfccs[..., :mfcc_bins]
def diff(x, axis=-1):
"""Take the finite difference of a tensor along an axis.
Args:
x: Input tensor of any dimension.
axis: Axis on which to take the finite difference.
Returns:
d: Tensor with size less than x by 1 along the difference dimension.
Raises:
ValueError: Axis out of range for tensor.
"""
shape = x.shape.as_list()
ndim = len(shape)
if axis >= ndim:
raise ValueError('Invalid axis index: %d for tensor with only %d axes.' %
(axis, ndim))
begin_back = [0 for _ in range(ndim)]
begin_front = [0 for _ in range(ndim)]
begin_front[axis] = 1
shape[axis] -= 1
slice_front = tf.slice(x, begin_front, shape)
slice_back = tf.slice(x, begin_back, shape)
d = slice_front - slice_back
return d
def amplitude_to_db(amplitude, use_tf=False):
"""Converts amplitude to decibels."""
lib = tf if use_tf else np
log10 = (lambda x: tf.math.log(x) / tf.math.log(10.0)) if use_tf else np.log10
amin = 1e-20 # Avoid log(0) instabilities.
db = log10(lib.maximum(amin, amplitude))
db *= 20.0
return db
def db_to_amplitude(db):
"""Converts decibels to amplitude."""
return 10.0**(db / 20.0)
@gin.register
def compute_loudness(audio,
sample_rate=16000,
frame_rate=250,
n_fft=2048,
range_db=LD_RANGE,
ref_db=20.7,
use_tf=False,
pad_end=True):
"""Perceptual loudness in dB, relative to white noise, amplitude=1.
Function is differentiable if use_tf=True.
Args:
audio: Numpy ndarray or tensor. Shape [batch_size, audio_length] or
[batch_size,].
sample_rate: Audio sample rate in Hz.
frame_rate: Rate of loudness frames in Hz.
n_fft: Fft window size.
range_db: Sets the dynamic range of loudness in decibles. The minimum
loudness (per a frequency bin) corresponds to -range_db.
ref_db: Sets the reference maximum perceptual loudness as given by
(A_weighting + 10 * log10(abs(stft(audio))**2.0). The default value
corresponds to white noise with amplitude=1.0 and n_fft=2048. There is a
slight dependence on fft_size due to different granularity of perceptual
weighting.
use_tf: Make function differentiable by using tensorflow.
pad_end: Add zero padding at end of audio (like `same` convolution).
Returns:
Loudness in decibels. Shape [batch_size, n_frames] or [n_frames,].
"""
if sample_rate % frame_rate != 0:
raise ValueError(
'frame_rate: {} must evenly divide sample_rate: {}.'
'For default frame_rate: 250Hz, suggested sample_rate: 16kHz or 48kHz'
.format(frame_rate, sample_rate))
# Pick tensorflow or numpy.
lib = tf if use_tf else np
# Make inputs tensors for tensorflow.
audio = tf_float32(audio) if use_tf else audio
# Temporarily a batch dimension for single examples.
is_1d = (len(audio.shape) == 1)
audio = audio[lib.newaxis, :] if is_1d else audio
# Take STFT.
hop_size = sample_rate // frame_rate
overlap = 1 - hop_size / n_fft
stft_fn = stft if use_tf else stft_np
s = stft_fn(audio, frame_size=n_fft, overlap=overlap, pad_end=pad_end)
# Compute power.
amplitude = lib.abs(s)
power_db = amplitude_to_db(amplitude, use_tf=use_tf)
# Perceptual weighting.
frequencies = librosa.fft_frequencies(sr=sample_rate, n_fft=n_fft)
a_weighting = librosa.A_weighting(frequencies)[lib.newaxis, lib.newaxis, :]
loudness = power_db + a_weighting
# Set dynamic range.
loudness -= ref_db
loudness = lib.maximum(loudness, -range_db)
mean = tf.reduce_mean if use_tf else np.mean
# Average over frequency bins.
loudness = mean(loudness, axis=-1)
# Remove temporary batch dimension.
loudness = loudness[0] if is_1d else loudness
audio_len = tf.cast(tf.shape(audio)[-1], dtype=tf.float32) if use_tf else audio.shape[-1]
# Compute expected length of loudness vector
n_secs = audio_len / float(
sample_rate) # `n_secs` can have milliseconds
expected_len = int(n_secs * frame_rate)
# Pad with `-range_db` noise floor or trim vector
loudness = pad_or_trim_to_expected_length(
loudness, expected_len, -range_db, use_tf=use_tf)
return loudness
@gin.register
def compute_f0(audio, sample_rate, frame_rate, viterbi=True):
"""Fundamental frequency (f0) estimate using CREPE.
This function is non-differentiable and takes input as a numpy array.
Args:
audio: Numpy ndarray of single audio example. Shape [audio_length,].
sample_rate: Sample rate in Hz.
frame_rate: Rate of f0 frames in Hz.
viterbi: Use Viterbi decoding to estimate f0.
Returns:
f0_hz: Fundamental frequency in Hz. Shape [n_frames,].
f0_confidence: Confidence in Hz estimate (scaled [0, 1]). Shape [n_frames,].
"""
n_secs = len(audio) / float(sample_rate) # `n_secs` can have milliseconds
crepe_step_size = 1000 / frame_rate # milliseconds
expected_len = int(n_secs * frame_rate)
audio = np.asarray(audio)
# Compute f0 with crepe.
_, f0_hz, f0_confidence, _ = crepe.predict(
audio,
sr=sample_rate,
viterbi=viterbi,
step_size=crepe_step_size,
center=False,
verbose=0)
# Postprocessing on f0_hz
f0_hz = pad_or_trim_to_expected_length(f0_hz, expected_len, 0) # pad with 0
f0_hz = f0_hz.astype(np.float32)
# Postprocessing on f0_confidence
f0_confidence = pad_or_trim_to_expected_length(f0_confidence, expected_len, 1)
f0_confidence = np.nan_to_num(f0_confidence) # Set nans to 0 in confidence
f0_confidence = f0_confidence.astype(np.float32)
return f0_hz, f0_confidence
def compute_rms_energy(audio,
sample_rate=16000,
frame_rate=250,
frame_size=2048,
pad_end=True):
"""Compute root mean squared energy of audio."""
audio = tf_float32(audio)
hop_size = sample_rate // frame_rate
audio_frames = tf.signal.frame(audio, frame_size, hop_size, pad_end=pad_end)
rms_energy = tf.reduce_mean(audio_frames**2.0, axis=-1)**0.5
if pad_end:
n_samples = audio.shape[0] if len(audio.shape) == 1 else audio.shape[1]
n_secs = n_samples / float(sample_rate) # `n_secs` can have milliseconds
expected_len = int(n_secs * frame_rate)
return pad_or_trim_to_expected_length(rms_energy, expected_len, use_tf=True)
else:
return rms_energy
def compute_power(audio,
sample_rate=16000,
frame_rate=250,
frame_size=1024,
range_db=LD_RANGE,
ref_db=20.7,
pad_end=True):
"""Compute power of audio in dB."""
# TODO(hanoih@): enable `use_tf` to be True or False like `compute_loudness`
rms_energy = compute_rms_energy(
audio, sample_rate, frame_rate, frame_size, pad_end)
power_db = amplitude_to_db(rms_energy**2, use_tf=True)
# Set dynamic range.
power_db -= ref_db
power_db = tf.maximum(power_db, -range_db)
return power_db
def pad_or_trim_to_expected_length(vector,
expected_len,
pad_value=0,
len_tolerance=20,
use_tf=False):
"""Make vector equal to the expected length.
Feature extraction functions like `compute_loudness()` or `compute_f0` produce
feature vectors that vary in length depending on factors such as `sample_rate`
or `hop_size`. This function corrects vectors to the expected length, warning
the user if the difference between the vector and expected length was
unusually high to begin with.
Args:
vector: Numpy 1D ndarray. Shape [vector_length,]
expected_len: Expected length of vector.
pad_value: Value to pad at end of vector.
len_tolerance: Tolerance of difference between original and desired vector
length.
use_tf: Make function differentiable by using tensorflow.
Returns:
vector: Vector with corrected length.
Raises:
ValueError: if `len(vector)` is different from `expected_len` beyond
`len_tolerance` to begin with.
"""
expected_len = int(expected_len)
vector_len = int(tf.shape(vector)[-1]) if use_tf else int(vector.shape[-1])
if not use_tf and abs(vector_len - expected_len) > len_tolerance:
# Ensure vector was close to expected length to begin with
raise ValueError('Vector length: {} differs from expected length: {} '
'beyond tolerance of : {}'.format(vector_len,
expected_len,
len_tolerance))
# Pick tensorflow or numpy.
lib = tf if use_tf else np
is_1d = tf.size(tf.shape(vector)) if use_tf else (len(vector.shape) == 1)
vector = vector[lib.newaxis, :] if is_1d else vector
# Pad missing samples
if vector_len < expected_len:
n_padding = expected_len - vector_len
vector = lib.pad(
vector, ((0, 0), (0, n_padding)),
mode='constant',
constant_values=pad_value)
# Trim samples
elif vector_len > expected_len:
vector = vector[..., :expected_len]
# Remove temporary batch dimension.
vector = vector[0] if is_1d else vector
return vector
def reset_crepe():
"""Reset the global state of CREPE to force model re-building."""
for k in crepe.core.models:
crepe.core.models[k] = None
class PretrainedCREPE(tf.keras.Model):
"""A wrapper around a pretrained CREPE model, for pitch prediction.
Enables predicting pitch and confidence entirely in TF for running in batch
on accelerators. For [full,large,small,tiny] crepe models, reads h5 models
from installed pip package. Other saved models
"""
def __init__(self,
model_size_or_path,
hop_size=160,
**kwargs):
super().__init__(**kwargs)
self.hop_size = hop_size
self.frame_size = 1024
self.sample_rate = 16000
# Load the crepe model.
if model_size_or_path in ['full', 'large', 'small', 'tiny']:
self.core_model = crepe.core.build_and_load_model(model_size_or_path)
else:
self.core_model = tf.keras.models.load_model(model_size_or_path)
self.model_size_or_path = model_size_or_path
@classmethod
def activations_to_f0_and_confidence(cls, activations, centers=None):
"""Convert network outputs (activations) to f0 predictions."""
cent_mapping = tf.cast(
tf.linspace(0, 7180, 360) + 1997.3794084376191, tf.float32)
# The confidence of voicing activity and the argmax bin.
confidence = tf.reduce_max(activations, axis=-1, keepdims=True)
if centers is None:
centers = tf.math.argmax(activations, axis=-1)
centers = tf.cast(centers, tf.int32)
# Slice the local neighborhood around the argmax bin.
start = centers - 4
idx_list = tf.range(0, 10)
idx_list = start[:, None] + idx_list[None, :]
# Bound to [0, 359].
idx_list = tf.where(idx_list > 0, idx_list, 0)
idx_list = tf.where(idx_list < 359, idx_list, 359)
# Gather and weight activations.
weights = tf.gather(activations, idx_list, batch_dims=1)
cents = tf.gather(cent_mapping, idx_list, batch_dims=0)
f0_cent = tf.reduce_sum(weights * cents, axis=-1) / tf.reduce_sum(
weights, axis=-1)
f0_hz = 10 * 2**(f0_cent / 1200.)
return f0_hz, confidence
def batch_frames(self, audio):
"""Chop audio into overlapping frames, and push to batch dimension."""
if audio.shape[-1] == self.frame_size:
return audio
else:
frames = tf.signal.frame(audio, self.frame_size, self.hop_size)
frames = tf.reshape(frames, [-1, self.frame_size])
return frames
def normalize_frames(self, frames):
"""Normalize each frame -- this is expected by the model."""
mu, var = tf.nn.moments(frames, axes=[-1])
std = tf.where(tf.abs(var) > 0, tf.sqrt(var), 1e-8)
frames -= mu[:, None]
frames /= std[:, None]
return frames
def predict_f0_and_confidence(self, audio, viterbi=False):
audio = audio[None, :] if len(audio.shape) == 1 else audio
batch_size = audio.shape[0]
frames = self.batch_frames(audio)
frames = self.normalize_frames(frames)
acts = self.core_model(frames, training=False)
if viterbi:
acts_viterbi = tf.reshape(acts, [batch_size, -1, 360])
centers = self.viterbi_decode(acts_viterbi)
centers = tf.reshape(centers, [-1])
else:
centers = None
f0_hz, confidence = self.activations_to_f0_and_confidence(acts, centers)
f0_hz = tf.reshape(f0_hz, [batch_size, -1])
confidence = tf.reshape(confidence, [batch_size, -1])
return f0_hz, confidence
def create_hmm(self, num_steps):
"""Same as the original CREPE viterbi decdoding, but in TF."""
# Initial distribution is uniform.
initial_distribution = tfp.distributions.Categorical(
probs=tf.ones([360]) / 360)
# Transition probabilities inducing continuous pitch.
bins = tf.range(360, dtype=tf.float32)
xx, yy = tf.meshgrid(bins, bins)
min_transition = 1e-5 # For training stabiity.
transition = tf.maximum(12 - abs(xx - yy), min_transition)
transition = transition / tf.reduce_sum(transition, axis=1)[:, None]
transition = tf.cast(transition, tf.float32)
transition_distribution = tfp.distributions.Categorical(
probs=transition)
# Emission probability = fixed probability for self, evenly distribute the
# others.
self_emission = 0.1
emission = (
tf.eye(360) * self_emission + tf.ones(shape=(360, 360)) *
((1 - self_emission) / 360.)
)
emission = tf.cast(emission, tf.float32)[None, ...]
observation_distribution = tfp.distributions.Multinomial(
total_count=1, probs=emission)
return tfp.distributions.HiddenMarkovModel(
initial_distribution=initial_distribution,
transition_distribution=transition_distribution,
observation_distribution=observation_distribution,
num_steps=num_steps,
)
def viterbi_decode(self, acts):
"""Adapted from original CREPE viterbi decdoding, but in TF."""
num_steps = acts.shape[1]
hmm = self.create_hmm(num_steps)
centers = hmm.posterior_mode(acts)
return centers
| 33.905797 | 91 | 0.666008 |
4a25f6088b0ac27e5cdbf3cfe37d691460b2f097 | 2,747 | py | Python | src/main.py | seaniedan/enviroplus-mqtt | f4e877b4301fc026c73f96cc85993f62d9143bba | [
"MIT"
] | null | null | null | src/main.py | seaniedan/enviroplus-mqtt | f4e877b4301fc026c73f96cc85993f62d9143bba | [
"MIT"
] | null | null | null | src/main.py | seaniedan/enviroplus-mqtt | f4e877b4301fc026c73f96cc85993f62d9143bba | [
"MIT"
] | null | null | null | import argparse, time, sys
from logger import EnvLogger
def parse_args():
ap = argparse.ArgumentParser(add_help=False)
ap.add_argument("-h", "--host", required=True, help="the MQTT host to connect to")
ap.add_argument("-p", "--port", type=int, default=1883, help="the port on the MQTT host to connect to")
ap.add_argument("-U", "--username", default=None, help="the MQTT username to connect with")
ap.add_argument("-P", "--password", default=None, help="the password to connect with")
ap.add_argument("--prefix", default="", help="the topic prefix to use when publishing readings, i.e. 'lounge/enviroplus'")
ap.add_argument("--client-id", default="", help="the MQTT client identifier to use when connecting")
ap.add_argument("--interval", type=int, default=5, help="the duration in seconds between updates")
ap.add_argument("--delay", type=int, default=15, help="the duration in seconds to allow the sensors to stabilise before starting to publish readings")
ap.add_argument("--use-pms5003", action="store_true", help="if set, PM readings will be taken from the PMS5003 sensor")
ap.add_argument("-r", "--retain", action='store_true', help="tell MQTT broker to retain the last message")
ap.add_argument("--help", action="help", help="print this help message and exit")
return vars(ap.parse_args())
def main():
args = parse_args()
# Initialise the logger
logger = EnvLogger(
client_id=args["client_id"],
host=args["host"],
port=args["port"],
username=args["username"],
password=args["password"],
prefix=args["prefix"],
use_pms5003=args["use_pms5003"],
num_samples=args["interval"],
retain=args["retain"],
)
# Take readings without publishing them for the specified delay period,
# to allow the sensors time to warm up and stabilise
publish_start_time = time.time() + args["delay"]
while time.time() < publish_start_time:
logger.update(publish_readings=False)
time.sleep(1)
# Start taking readings and publishing them at the specified interval
next_sample_time = time.time()
next_publish_time = time.time() + args["interval"]
while True:
if logger.connection_error is not None:
sys.exit(f"Connecting to the MQTT server failed: {logger.connection_error}")
should_publish = time.time() >= next_publish_time
if should_publish:
next_publish_time += args["interval"]
logger.update(publish_readings=should_publish)
next_sample_time += 1
sleep_duration = max(next_sample_time - time.time(), 0)
time.sleep(sleep_duration)
if __name__ == "__main__":
main()
| 41.621212 | 154 | 0.67055 |
4a25f6cc7059df1fa928495d77d2ab3be8355c51 | 11,999 | py | Python | tests/python/pants_test/backend/jvm/tasks/test_checkstyle_integration.py | viktortnk/pants | 54c98206de5ac9aadfe26d83175f472941be6c7d | [
"Apache-2.0"
] | 1 | 2020-06-13T22:01:39.000Z | 2020-06-13T22:01:39.000Z | tests/python/pants_test/backend/jvm/tasks/test_checkstyle_integration.py | viktortnk/pants | 54c98206de5ac9aadfe26d83175f472941be6c7d | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/backend/jvm/tasks/test_checkstyle_integration.py | viktortnk/pants | 54c98206de5ac9aadfe26d83175f472941be6c7d | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import json
import os
import shutil
from contextlib import contextmanager
from textwrap import dedent
from pants.base.build_environment import get_buildroot
from pants.testutil.pants_run_integration_test import PantsRunIntegrationTest, ensure_cached
from pants.util.contextutil import temporary_dir
class CheckstyleIntegrationTest(PantsRunIntegrationTest):
def _create_config_file(self, filepath, rules_xml=""):
with open(filepath, "w") as f:
f.write(
dedent(
"""<?xml version="1.0"?>
<!DOCTYPE module PUBLIC
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
<module name="Checker">
{rules_xml}
</module>""".format(
rules_xml=rules_xml
)
)
)
@ensure_cached(expected_num_artifacts=2)
def test_config_invalidates_targets(self, cache_args):
with self.temporary_workdir() as workdir:
with temporary_dir(root_dir=get_buildroot()) as tmp:
configs = [
dedent(
"""
<module name="TreeWalker">
<property name="tabWidth" value="2"/>
</module>"""
),
dedent(
"""
<module name="TreeWalker">
<module name="LineLength">
<property name="max" value="100"/>
</module>
</module>"""
),
]
for config in configs:
# Ensure that even though the config files have the same name, their
# contents will invalidate the targets.
config_file = os.path.join(tmp, "config.xml")
self._create_config_file(config_file, config)
args = [
"clean-all",
f"--checkstyle-config={config_file}",
"lint.checkstyle",
cache_args,
"examples/src/java/org/pantsbuild/example/hello/simple",
]
pants_run = self.run_pants_with_workdir(args, workdir)
self.assert_success(pants_run)
@ensure_cached(expected_num_artifacts=2)
def test_config_name_invalidates_targets(self, cache_args):
with self.temporary_workdir() as workdir:
with temporary_dir(root_dir=get_buildroot()) as tmp:
config_names = ["one.xml", "two.xml"]
config = dedent(
"""
<module name="TreeWalker">
<property name="tabWidth" value="2"/>
</module>"""
)
for config_name in config_names:
# Ensure that even though the config files have the same name, their contents will
# invalidate the targets.
config_file = os.path.join(tmp, config_name)
self._create_config_file(config_file, config)
args = [
f"--checkstyle-config={config_file}",
"lint.checkstyle",
cache_args,
"examples/src/java/org/pantsbuild/example/hello/simple",
]
pants_run = self.run_pants_with_workdir(args, workdir)
self.assert_success(pants_run)
@contextmanager
def _temporary_buildroot(self, files_to_copy, current_root=None):
if current_root is None:
current_root = get_buildroot()
files_to_copy = set(files_to_copy)
files_to_copy.update(
f for f in os.listdir(current_root) if f.endswith(".toml") or f.startswith("BUILD")
)
files_to_copy.update(
("pants", "3rdparty", "build-support", "src", ".isort.cfg", "pyproject.toml")
)
with temporary_dir() as temp_root:
temp_root = os.path.normpath(temp_root)
for path in files_to_copy:
src = os.path.join(current_root, path)
dst = os.path.join(temp_root, path)
if os.path.isdir(path):
shutil.copytree(src, dst)
else:
shutil.copyfile(src, dst)
current = os.getcwd()
try:
os.chdir(temp_root)
temp_root = os.getcwd()
yield temp_root
finally:
os.chdir(current)
def _temporary_buildroots(self, files_to_copy=None, current_root=None, iterations=2):
while iterations:
with self._temporary_buildroot(files_to_copy, current_root) as root:
yield root
iterations -= 1
@ensure_cached(expected_num_artifacts=1)
def test_config_buildroot_does_not_invalidate_targets(self, cache_args):
previous_names = set()
for buildroot in self._temporary_buildroots(["examples"]):
with temporary_dir(root_dir=buildroot, prefix=".pants.d", suffix=".pants.d") as workdir:
tmp = os.path.join(buildroot, "tmp")
os.mkdir(tmp)
config = dedent(
"""
<module name="TreeWalker">
<property name="tabWidth" value="2"/>
</module>"""
)
# Ensure that even though the config files have the same name, their
# contents will invalidate the targets.
config_file = os.path.join(tmp, "one.xml")
self.assertNotIn(config_file, previous_names)
previous_names.add(config_file)
self._create_config_file(config_file, config)
args = [
f"--checkstyle-config={config_file}",
"lint.checkstyle",
cache_args,
"examples/src/java/org/pantsbuild/example/hello/simple",
]
pants_run = self.run_pants_with_workdir(args, workdir)
self.assert_success(pants_run)
@ensure_cached(expected_num_artifacts=1)
def test_properties_file_names_does_not_invalidates_targets(self, cache_args):
with self.temporary_workdir() as workdir:
with temporary_dir(root_dir=get_buildroot()) as tmp:
suppression_names = ["one-suppress.xml", "two-suppress.xml"]
suppression_data = dedent(
"""
<?xml version="1.0"?>
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.1//EN"
"http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
<suppressions>
<suppress files=".*/bad-files/.*\\.java" checks=".*"/>
</suppressions>
"""
).strip()
for suppression_name in suppression_names:
suppression_file = os.path.join(tmp, suppression_name)
self._create_config_file(suppression_file, suppression_data)
properties = {
"checkstyle.suppression.files": suppression_file,
}
args = [
"lint.checkstyle",
cache_args,
"examples/src/java/org/pantsbuild/example/hello/simple",
f"--lint-checkstyle-properties={json.dumps(properties)}",
]
pants_run = self.run_pants_with_workdir(args, workdir)
self.assert_success(pants_run)
@ensure_cached(expected_num_artifacts=2)
def test_properties_file_contents_invalidates_targets(self, cache_args):
with self.temporary_workdir() as workdir:
with temporary_dir(root_dir=get_buildroot()) as tmp:
suppression_files = [
dedent(
"""
<?xml version="1.0"?>
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.1//EN"
"http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
<suppressions>
<suppress files=".*/bad-files/.*\\.java" checks=".*"/>
</suppressions>
"""
).strip(),
dedent(
"""
<?xml version="1.0"?>
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.1//EN"
"http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
<suppressions>
<suppress files=".*/bad-files/.*\\.java" checks=".*"/>
<suppress files=".*/really-bad-files/.*\\.java" checks=".*"/>
</suppressions>
"""
).strip(),
]
for suppressions in suppression_files:
suppression_file = os.path.join(tmp, "suppressions.xml")
self._create_config_file(suppression_file, suppressions)
properties = {
"checkstyle.suppression.files": suppression_file,
}
args = [
"lint.checkstyle",
cache_args,
"examples/src/java/org/pantsbuild/example/hello/simple",
f"--lint-checkstyle-properties={json.dumps(properties)}",
]
pants_run = self.run_pants_with_workdir(args, workdir)
self.assert_success(pants_run)
@ensure_cached(expected_num_artifacts=2)
def test_properties_nonfile_values_invalidates_targets(self, cache_args):
with self.temporary_workdir() as workdir:
with temporary_dir(root_dir=get_buildroot()):
values = ["this-is-not-a-file", "37"]
for value in values:
properties = {
"my.value": value,
}
args = [
"lint.checkstyle",
cache_args,
"examples/src/java/org/pantsbuild/example/hello/simple",
f"--lint-checkstyle-properties={json.dumps(properties)}",
]
pants_run = self.run_pants_with_workdir(args, workdir)
self.assert_success(pants_run)
@ensure_cached(expected_num_artifacts=2)
def test_jvm_tool_changes_invalidate_targets(self, cache_args):
with self.temporary_workdir() as workdir:
# Ensure that only the second use of the default checkstyle will not invalidate anything.
for checkstyle_jar in (None, "testprojects/3rdparty/checkstyle", None):
args = [
"lint.checkstyle",
cache_args,
f"--checkstyle={checkstyle_jar}" if checkstyle_jar else "",
"examples/src/java/org/pantsbuild/example/hello/simple",
]
pants_run = self.run_pants_with_workdir(args, workdir)
print(pants_run.stdout_data)
self.assert_success(pants_run)
| 44.440741 | 102 | 0.506376 |
4a25f820b9e1ba7aeed022d658eada72c8e765e8 | 1,616 | py | Python | addons/account/models/digest.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | addons/account/models/digest.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | addons/account/models/digest.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, _
from odoo.exceptions import AccessError
class Digest(models.Model):
_inherit = 'digest.digest'
kpi_account_total_revenue = fields.Boolean('Revenue')
kpi_account_total_revenue_value = fields.Monetary(compute='_compute_kpi_account_total_revenue_value')
def _compute_kpi_account_total_revenue_value(self):
if not self.env.user.has_group('account.group_account_invoice'):
raise AccessError(_("Do not have access, skip this data for user's digest email"))
for record in self:
start, end, company = record._get_kpi_compute_parameters()
self._cr.execute('''
SELECT -SUM(line.balance)
FROM account_move_line line
JOIN account_move move ON move.id = line.move_id
JOIN account_account account ON account.id = line.account_id
WHERE line.company_id = %s AND line.date >= %s AND line.date < %s
AND account.internal_group = 'income'
AND move.state = 'posted'
''', [company.id, start, end])
query_res = self._cr.fetchone()
record.kpi_account_total_revenue_value = query_res and query_res[0] or 0.0
def _compute_kpis_actions(self, company, user):
res = super(Digest, self)._compute_kpis_actions(company, user)
res['kpi_account_total_revenue'] = 'account.action_move_out_invoice_type&menu_id=%s' % self.env.ref('account.menu_finance').id
return res
| 46.171429 | 134 | 0.668936 |
4a25f993a94138e68ea0906b7283f5c3a07de516 | 4,023 | py | Python | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/ns/nssurgeq.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/ns/nssurgeq.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/ns/nssurgeq.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class nssurgeq(base_resource) :
""" Configuration for surge queue resource. """
def __init__(self) :
self._name = None
self._servername = None
self._port = None
@property
def name(self) :
r"""Name of a virtual server, service or service group for which the SurgeQ must be flushed.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
r"""Name of a virtual server, service or service group for which the SurgeQ must be flushed.
"""
try :
self._name = name
except Exception as e:
raise e
@property
def servername(self) :
r"""Name of a service group member. This argument is needed when you want to flush the SurgeQ of a service group.
"""
try :
return self._servername
except Exception as e:
raise e
@servername.setter
def servername(self, servername) :
r"""Name of a service group member. This argument is needed when you want to flush the SurgeQ of a service group.
"""
try :
self._servername = servername
except Exception as e:
raise e
@property
def port(self) :
r"""port on which server is bound to the entity(Servicegroup).<br/>Range 1 - 65535<br/>* in CLI is represented as 65535 in NITRO API.
"""
try :
return self._port
except Exception as e:
raise e
@port.setter
def port(self, port) :
r"""port on which server is bound to the entity(Servicegroup).<br/>Range 1 - 65535<br/>* in CLI is represented as 65535 in NITRO API
"""
try :
self._port = port
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(nssurgeq_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.nssurgeq
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def flush(cls, client, resource) :
r""" Use this API to flush nssurgeq.
"""
try :
if type(resource) is not list :
flushresource = nssurgeq()
flushresource.name = resource.name
flushresource.servername = resource.servername
flushresource.port = resource.port
return flushresource.perform_operation(client,"flush")
except Exception as e :
raise e
class nssurgeq_response(base_response) :
def __init__(self, length=1) :
self.nssurgeq = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.nssurgeq = [nssurgeq() for _ in range(length)]
| 29.8 | 135 | 0.717127 |
4a25fa3c9889a515690312af4bd98726d0c0308a | 21,915 | py | Python | homeassistant/components/zha/light.py | dlintott/core | a6c83cc46a34084fdc4c0e7221b6ba493f82cbac | [
"Apache-2.0"
] | 1 | 2021-01-27T08:47:19.000Z | 2021-01-27T08:47:19.000Z | homeassistant/components/zha/light.py | dlintott/core | a6c83cc46a34084fdc4c0e7221b6ba493f82cbac | [
"Apache-2.0"
] | 48 | 2020-10-11T08:00:05.000Z | 2022-03-31T06:02:23.000Z | homeassistant/components/zha/light.py | dlintott/core | a6c83cc46a34084fdc4c0e7221b6ba493f82cbac | [
"Apache-2.0"
] | 2 | 2017-10-13T21:54:28.000Z | 2018-02-24T23:48:21.000Z | """Lights on Zigbee Home Automation networks."""
from collections import Counter
from datetime import timedelta
import functools
import itertools
import logging
import random
from typing import Any, Dict, List, Optional, Tuple
from zigpy.zcl.clusters.general import Identify, LevelControl, OnOff
from zigpy.zcl.clusters.lighting import Color
from zigpy.zcl.foundation import Status
from homeassistant.components import light
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_EFFECT_LIST,
ATTR_HS_COLOR,
ATTR_MAX_MIREDS,
ATTR_MIN_MIREDS,
ATTR_WHITE_VALUE,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
)
from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_ON, STATE_UNAVAILABLE
from homeassistant.core import State, callback
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.event import async_track_time_interval
import homeassistant.util.color as color_util
from .core import discovery, helpers
from .core.const import (
CHANNEL_COLOR,
CHANNEL_LEVEL,
CHANNEL_ON_OFF,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
EFFECT_BLINK,
EFFECT_BREATHE,
EFFECT_DEFAULT_VARIANT,
SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED,
SIGNAL_SET_LEVEL,
)
from .core.helpers import LogMixin
from .core.registries import ZHA_ENTITIES
from .core.typing import ZhaDeviceType
from .entity import ZhaEntity, ZhaGroupEntity
_LOGGER = logging.getLogger(__name__)
CAPABILITIES_COLOR_LOOP = 0x4
CAPABILITIES_COLOR_XY = 0x08
CAPABILITIES_COLOR_TEMP = 0x10
UPDATE_COLORLOOP_ACTION = 0x1
UPDATE_COLORLOOP_DIRECTION = 0x2
UPDATE_COLORLOOP_TIME = 0x4
UPDATE_COLORLOOP_HUE = 0x8
FLASH_EFFECTS = {light.FLASH_SHORT: EFFECT_BLINK, light.FLASH_LONG: EFFECT_BREATHE}
UNSUPPORTED_ATTRIBUTE = 0x86
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, light.DOMAIN)
GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, light.DOMAIN)
PARALLEL_UPDATES = 0
SIGNAL_LIGHT_GROUP_STATE_CHANGED = "zha_light_group_state_changed"
SUPPORT_GROUP_LIGHT = (
SUPPORT_BRIGHTNESS
| SUPPORT_COLOR_TEMP
| SUPPORT_EFFECT
| SUPPORT_FLASH
| SUPPORT_COLOR
| SUPPORT_TRANSITION
| SUPPORT_WHITE_VALUE
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation light from config entry."""
entities_to_create = hass.data[DATA_ZHA][light.DOMAIN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities, async_add_entities, entities_to_create
),
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
class BaseLight(LogMixin, light.LightEntity):
"""Operations common to all light entities."""
def __init__(self, *args, **kwargs):
"""Initialize the light."""
super().__init__(*args, **kwargs)
self._available: bool = False
self._brightness: Optional[int] = None
self._off_brightness: Optional[int] = None
self._hs_color: Optional[Tuple[float, float]] = None
self._color_temp: Optional[int] = None
self._min_mireds: Optional[int] = 153
self._max_mireds: Optional[int] = 500
self._white_value: Optional[int] = None
self._effect_list: Optional[List[str]] = None
self._effect: Optional[str] = None
self._supported_features: int = 0
self._state: bool = False
self._on_off_channel = None
self._level_channel = None
self._color_channel = None
self._identify_channel = None
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return state attributes."""
attributes = {"off_brightness": self._off_brightness}
return attributes
@property
def is_on(self) -> bool:
"""Return true if entity is on."""
if self._state is None:
return False
return self._state
@property
def brightness(self):
"""Return the brightness of this light."""
return self._brightness
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._min_mireds
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._max_mireds
def set_level(self, value):
"""Set the brightness of this light between 0..254.
brightness level 255 is a special value instructing the device to come
on at `on_level` Zigbee attribute value, regardless of the last set
level
"""
value = max(0, min(254, value))
self._brightness = value
self.async_write_ha_state()
@property
def hs_color(self):
"""Return the hs color value [int, int]."""
return self._hs_color
@property
def color_temp(self):
"""Return the CT color value in mireds."""
return self._color_temp
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._effect_list
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
transition = kwargs.get(light.ATTR_TRANSITION)
duration = transition * 10 if transition else 1
brightness = kwargs.get(light.ATTR_BRIGHTNESS)
effect = kwargs.get(light.ATTR_EFFECT)
flash = kwargs.get(light.ATTR_FLASH)
if brightness is None and self._off_brightness is not None:
brightness = self._off_brightness
t_log = {}
if (
brightness is not None or transition
) and self._supported_features & light.SUPPORT_BRIGHTNESS:
if brightness is not None:
level = min(254, brightness)
else:
level = self._brightness or 254
result = await self._level_channel.move_to_level_with_on_off(
level, duration
)
t_log["move_to_level_with_on_off"] = result
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
self.debug("turned on: %s", t_log)
return
self._state = bool(level)
if level:
self._brightness = level
if brightness is None or brightness:
# since some lights don't always turn on with move_to_level_with_on_off,
# we should call the on command on the on_off cluster if brightness is not 0.
result = await self._on_off_channel.on()
t_log["on_off"] = result
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
self.debug("turned on: %s", t_log)
return
self._state = True
if (
light.ATTR_COLOR_TEMP in kwargs
and self.supported_features & light.SUPPORT_COLOR_TEMP
):
temperature = kwargs[light.ATTR_COLOR_TEMP]
result = await self._color_channel.move_to_color_temp(temperature, duration)
t_log["move_to_color_temp"] = result
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
self.debug("turned on: %s", t_log)
return
self._color_temp = temperature
self._hs_color = None
if (
light.ATTR_HS_COLOR in kwargs
and self.supported_features & light.SUPPORT_COLOR
):
hs_color = kwargs[light.ATTR_HS_COLOR]
xy_color = color_util.color_hs_to_xy(*hs_color)
result = await self._color_channel.move_to_color(
int(xy_color[0] * 65535), int(xy_color[1] * 65535), duration
)
t_log["move_to_color"] = result
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
self.debug("turned on: %s", t_log)
return
self._hs_color = hs_color
self._color_temp = None
if (
effect == light.EFFECT_COLORLOOP
and self.supported_features & light.SUPPORT_EFFECT
):
result = await self._color_channel.color_loop_set(
UPDATE_COLORLOOP_ACTION
| UPDATE_COLORLOOP_DIRECTION
| UPDATE_COLORLOOP_TIME,
0x2, # start from current hue
0x1, # only support up
transition if transition else 7, # transition
0, # no hue
)
t_log["color_loop_set"] = result
self._effect = light.EFFECT_COLORLOOP
elif (
self._effect == light.EFFECT_COLORLOOP
and effect != light.EFFECT_COLORLOOP
and self.supported_features & light.SUPPORT_EFFECT
):
result = await self._color_channel.color_loop_set(
UPDATE_COLORLOOP_ACTION,
0x0,
0x0,
0x0,
0x0, # update action only, action off, no dir, time, hue
)
t_log["color_loop_set"] = result
self._effect = None
if flash is not None and self._supported_features & light.SUPPORT_FLASH:
result = await self._identify_channel.trigger_effect(
FLASH_EFFECTS[flash], EFFECT_DEFAULT_VARIANT
)
t_log["trigger_effect"] = result
self._off_brightness = None
self.debug("turned on: %s", t_log)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
duration = kwargs.get(light.ATTR_TRANSITION)
supports_level = self.supported_features & light.SUPPORT_BRIGHTNESS
if duration and supports_level:
result = await self._level_channel.move_to_level_with_on_off(
0, duration * 10
)
else:
result = await self._on_off_channel.off()
self.debug("turned off: %s", result)
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
return
self._state = False
if duration and supports_level:
# store current brightness so that the next turn_on uses it.
self._off_brightness = self._brightness
self.async_write_ha_state()
@STRICT_MATCH(channel_names=CHANNEL_ON_OFF, aux_channels={CHANNEL_COLOR, CHANNEL_LEVEL})
class Light(BaseLight, ZhaEntity):
"""Representation of a ZHA or ZLL light."""
_REFRESH_INTERVAL = (45, 75)
def __init__(self, unique_id, zha_device: ZhaDeviceType, channels, **kwargs):
"""Initialize the ZHA light."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._on_off_channel = self.cluster_channels.get(CHANNEL_ON_OFF)
self._state = bool(self._on_off_channel.on_off)
self._level_channel = self.cluster_channels.get(CHANNEL_LEVEL)
self._color_channel = self.cluster_channels.get(CHANNEL_COLOR)
self._identify_channel = self.zha_device.channels.identify_ch
if self._color_channel:
self._min_mireds: Optional[int] = self._color_channel.min_mireds
self._max_mireds: Optional[int] = self._color_channel.max_mireds
self._cancel_refresh_handle = None
effect_list = []
if self._level_channel:
self._supported_features |= light.SUPPORT_BRIGHTNESS
self._supported_features |= light.SUPPORT_TRANSITION
self._brightness = self._level_channel.current_level
if self._color_channel:
color_capabilities = self._color_channel.color_capabilities
if color_capabilities & CAPABILITIES_COLOR_TEMP:
self._supported_features |= light.SUPPORT_COLOR_TEMP
self._color_temp = self._color_channel.color_temperature
if color_capabilities & CAPABILITIES_COLOR_XY:
self._supported_features |= light.SUPPORT_COLOR
curr_x = self._color_channel.current_x
curr_y = self._color_channel.current_y
if curr_x is not None and curr_y is not None:
self._hs_color = color_util.color_xy_to_hs(
float(curr_x / 65535), float(curr_y / 65535)
)
else:
self._hs_color = (0, 0)
if color_capabilities & CAPABILITIES_COLOR_LOOP:
self._supported_features |= light.SUPPORT_EFFECT
effect_list.append(light.EFFECT_COLORLOOP)
if self._color_channel.color_loop_active == 1:
self._effect = light.EFFECT_COLORLOOP
if self._identify_channel:
self._supported_features |= light.SUPPORT_FLASH
if effect_list:
self._effect_list = effect_list
@callback
def async_set_state(self, attr_id, attr_name, value):
"""Set the state."""
self._state = bool(value)
if value:
self._off_brightness = None
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.async_accept_signal(
self._on_off_channel, SIGNAL_ATTR_UPDATED, self.async_set_state
)
if self._level_channel:
self.async_accept_signal(
self._level_channel, SIGNAL_SET_LEVEL, self.set_level
)
refresh_interval = random.randint(*[x * 60 for x in self._REFRESH_INTERVAL])
self._cancel_refresh_handle = async_track_time_interval(
self.hass, self._refresh, timedelta(seconds=refresh_interval)
)
self.async_accept_signal(
None,
SIGNAL_LIGHT_GROUP_STATE_CHANGED,
self._maybe_force_refresh,
signal_override=True,
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect entity object when removed."""
self._cancel_refresh_handle()
await super().async_will_remove_from_hass()
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
self._state = last_state.state == STATE_ON
if "brightness" in last_state.attributes:
self._brightness = last_state.attributes["brightness"]
if "off_brightness" in last_state.attributes:
self._off_brightness = last_state.attributes["off_brightness"]
if "color_temp" in last_state.attributes:
self._color_temp = last_state.attributes["color_temp"]
if "hs_color" in last_state.attributes:
self._hs_color = last_state.attributes["hs_color"]
if "effect" in last_state.attributes:
self._effect = last_state.attributes["effect"]
async def async_get_state(self):
"""Attempt to retrieve the state from the light."""
if not self.available:
return
self.debug("polling current state")
if self._on_off_channel:
state = await self._on_off_channel.get_attribute_value(
"on_off", from_cache=False
)
if state is not None:
self._state = state
if self._level_channel:
level = await self._level_channel.get_attribute_value(
"current_level", from_cache=False
)
if level is not None:
self._brightness = level
if self._color_channel:
attributes = [
"color_temperature",
"current_x",
"current_y",
"color_loop_active",
]
results = await self._color_channel.get_attributes(
attributes, from_cache=False
)
color_temp = results.get("color_temperature")
if color_temp is not None:
self._color_temp = color_temp
color_x = results.get("current_x")
color_y = results.get("current_y")
if color_x is not None and color_y is not None:
self._hs_color = color_util.color_xy_to_hs(
float(color_x / 65535), float(color_y / 65535)
)
color_loop_active = results.get("color_loop_active")
if color_loop_active is not None:
if color_loop_active == 1:
self._effect = light.EFFECT_COLORLOOP
else:
self._effect = None
async def async_update(self):
"""Update to the latest state."""
await self.async_get_state()
async def _refresh(self, time):
"""Call async_get_state at an interval."""
await self.async_get_state()
self.async_write_ha_state()
async def _maybe_force_refresh(self, signal):
"""Force update the state if the signal contains the entity id for this entity."""
if self.entity_id in signal["entity_ids"]:
await self.async_get_state()
self.async_write_ha_state()
@STRICT_MATCH(
channel_names=CHANNEL_ON_OFF,
aux_channels={CHANNEL_COLOR, CHANNEL_LEVEL},
manufacturers="Philips",
)
class HueLight(Light):
"""Representation of a HUE light which does not report attributes."""
_REFRESH_INTERVAL = (3, 5)
@GROUP_MATCH()
class LightGroup(BaseLight, ZhaGroupEntity):
"""Representation of a light group."""
def __init__(
self, entity_ids: List[str], unique_id: str, group_id: int, zha_device, **kwargs
) -> None:
"""Initialize a light group."""
super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs)
group = self.zha_device.gateway.get_group(self._group_id)
self._on_off_channel = group.endpoint[OnOff.cluster_id]
self._level_channel = group.endpoint[LevelControl.cluster_id]
self._color_channel = group.endpoint[Color.cluster_id]
self._identify_channel = group.endpoint[Identify.cluster_id]
self._debounced_member_refresh = None
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
if self._debounced_member_refresh is None:
force_refresh_debouncer = Debouncer(
self.hass,
_LOGGER,
cooldown=3,
immediate=True,
function=self._force_member_updates,
)
self._debounced_member_refresh = force_refresh_debouncer
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
await super().async_turn_on(**kwargs)
await self._debounced_member_refresh.async_call()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
await super().async_turn_off(**kwargs)
await self._debounced_member_refresh.async_call()
async def async_update(self) -> None:
"""Query all members and determine the light group state."""
all_states = [self.hass.states.get(x) for x in self._entity_ids]
states: List[State] = list(filter(None, all_states))
on_states = [state for state in states if state.state == STATE_ON]
self._state = len(on_states) > 0
self._available = any(state.state != STATE_UNAVAILABLE for state in states)
self._brightness = helpers.reduce_attribute(on_states, ATTR_BRIGHTNESS)
self._hs_color = helpers.reduce_attribute(
on_states, ATTR_HS_COLOR, reduce=helpers.mean_tuple
)
self._white_value = helpers.reduce_attribute(on_states, ATTR_WHITE_VALUE)
self._color_temp = helpers.reduce_attribute(on_states, ATTR_COLOR_TEMP)
self._min_mireds = helpers.reduce_attribute(
states, ATTR_MIN_MIREDS, default=153, reduce=min
)
self._max_mireds = helpers.reduce_attribute(
states, ATTR_MAX_MIREDS, default=500, reduce=max
)
self._effect_list = None
all_effect_lists = list(helpers.find_state_attributes(states, ATTR_EFFECT_LIST))
if all_effect_lists:
# Merge all effects from all effect_lists with a union merge.
self._effect_list = list(set().union(*all_effect_lists))
self._effect = None
all_effects = list(helpers.find_state_attributes(on_states, ATTR_EFFECT))
if all_effects:
# Report the most common effect.
effects_count = Counter(itertools.chain(all_effects))
self._effect = effects_count.most_common(1)[0][0]
self._supported_features = 0
for support in helpers.find_state_attributes(states, ATTR_SUPPORTED_FEATURES):
# Merge supported features by emulating support for every feature
# we find.
self._supported_features |= support
# Bitwise-and the supported features with the GroupedLight's features
# so that we don't break in the future when a new feature is added.
self._supported_features &= SUPPORT_GROUP_LIGHT
async def _force_member_updates(self):
"""Force the update of member entities to ensure the states are correct for bulbs that don't report their state."""
async_dispatcher_send(
self.hass,
SIGNAL_LIGHT_GROUP_STATE_CHANGED,
{"entity_ids": self._entity_ids},
)
| 36.956155 | 123 | 0.641387 |
4a25fafcc599f1262061a69b88cd86294cb752df | 4,028 | py | Python | prody/apps/prody_apps/prody_contacts.py | grandevelia/ProDy | 7c725640a94c16543423c0756388998cb86a97ae | [
"MIT"
] | 210 | 2015-01-26T08:17:56.000Z | 2022-03-30T01:40:34.000Z | prody/apps/prody_apps/prody_contacts.py | grandevelia/ProDy | 7c725640a94c16543423c0756388998cb86a97ae | [
"MIT"
] | 555 | 2015-01-05T21:51:54.000Z | 2022-03-31T16:51:41.000Z | prody/apps/prody_apps/prody_contacts.py | grandevelia/ProDy | 7c725640a94c16543423c0756388998cb86a97ae | [
"MIT"
] | 99 | 2015-02-09T18:00:39.000Z | 2022-03-07T12:52:51.000Z | # -*- coding: utf-8 -*-
"""This module defines a routine for contact identification."""
from ..apptools import *
__all__ = ['prody_contacts']
def prody_contacts(**kwargs):
"""Identify contacts of a target structure with one or more ligands.
Contacting atoms (or extended subset of atoms, such as residues) are
outputted in PDB file format.
:arg target: target PDB identifier or filename
:arg ligand: ligand PDB identifier(s) or filename(s)
:arg select: atom selection string for target structure
:arg radius: contact radius (Å), default is ``4.0``
:arg extend: output same ``'residue'``, ``'chain'``, or ``'segment'`` along
with contacting atoms
:arg prefix: prefix for output file, default is *target* filename
:arg suffix: output filename suffix, default is *ligand* filename"""
import prody
LOGGER = prody.LOGGER
target = prody.parsePDB(kwargs['target'])
title = kwargs.get('prefix') or target.getTitle()
selstr = kwargs.get('select')
if selstr:
target = target.select(selstr)
contacts = prody.Contacts(target)
suffix = kwargs.get('suffix', '_contacts')
extend = kwargs.get('extend')
radius = float(kwargs.get('radius', 4.0))
ligands = kwargs.get('ligand')
if len(ligands) > 1:
outfn = lambda fn: title + suffix + '_' + fn + '.pdb'
else:
outfn = lambda fn: title + suffix + '.pdb'
for pdb in ligands:
ligand = prody.parsePDB(pdb)
sel = contacts(radius, ligand)
if sel:
LOGGER.info('{0} atoms from {1} contact {2}.'
.format(len(sel), pdb, str(target)))
if extend:
sel = target.select('same ' + extend + ' as sel', sel=sel)
LOGGER.info('Selection is extended to {0} atoms of the same '
'{1}(s).'.format(len(sel), extend))
pdbfn = outfn(ligand.getTitle())
LOGGER.info('Writing contacts into ' + pdbfn)
prody.writePDB(pdbfn, sel)
def addCommand(commands):
subparser = commands.add_parser('contacts',
help='identify contacts between a target and ligand(s)')
subparser.add_argument('--quiet', help="suppress info messages to stderr",
action=Quiet, nargs=0)
subparser.add_argument('--examples', action=UsageExample, nargs=0,
help='show usage examples and exit')
subparser.set_defaults(usage_example=
"""Identify contacts of a target structure with one or more ligands.
Fetch PDB structure 1zz2, save PDB files for individual ligands, and identify \
contacting residues of the target protein:
$ prody select -o B11 "resname B11" 1zz2
$ prody select -o BOG "resname BOG" 1zz2
$ prody contacts -r 4.0 -t residue -s protein 1zz2 B11.pdb BOG.pdb
""",
test_examples=[(0,1,2)]
)
subparser.add_argument('-s', '--select', dest='select', type=str,
metavar='SELSTR', help='selection string for target')
subparser.add_argument('-r', '--radius', dest='radius', type=float,
default=4.0, metavar='FLOAT',
help='contact radius (default: %(default)s)')
subparser.add_argument('-t', '--extend', dest='extend', type=str,
metavar='STR', choices=set(['residue', 'chain', 'segment']),
help=('output same residue, chain, or segment as contacting atoms'))
subparser.add_argument('-p', '--prefix', dest='prefix', type=str,
metavar='STR',
help=('output filename prefix (default: target filename)'))
subparser.add_argument('-x', '--suffix', dest='suffix', type=str,
default='_contacts', metavar='STR',
help=('output filename suffix (default: %(default)s)'))
subparser.add_argument('target',
help='target PDB identifier or filename')
subparser.add_argument('ligand', nargs='+',
help='ligand PDB identifier(s) or filename(s)')
subparser.set_defaults(func=lambda opt: prody_contacts(**opt.__dict__))
subparser.set_defaults(subparser=subparser)
| 35.646018 | 79 | 0.635303 |
4a25fbc2b2f4701edd14f859c6f644cc7fa7c24e | 1,875 | py | Python | game.py | bdupau/wordsnake | 7d8be2048412990ffa850d86bece3687dcfe24c8 | [
"Apache-2.0"
] | null | null | null | game.py | bdupau/wordsnake | 7d8be2048412990ffa850d86bece3687dcfe24c8 | [
"Apache-2.0"
] | null | null | null | game.py | bdupau/wordsnake | 7d8be2048412990ffa850d86bece3687dcfe24c8 | [
"Apache-2.0"
] | null | null | null | __author__ = 'Bastiaan'
class Game:
def __init__(self, fn_get_current_word, fn_add_word, fn_score):
self._scores_per_player = {}
self._get_current_word = fn_get_current_word
self._add_word = fn_add_word
self._score = fn_score
self._is_started = False
def game_finished(self):
return max(self._scores_per_player.values()) > 25
def start_game(self, players):
if len(self._scores_per_player) > 0:
return "Het is niet mogelijk het spel te starten. Het spel is al gestart."
if len(set(players)) < len(players):
return "Het is niet mogelijk een spel te starten. Er spelers zijn met dezelfde naam."
self._scores_per_player = { player : 0 for player in players }
self._is_started = True
return "Nieuw spel is gestart."
def add_word(self, player, word):
if not self._is_started:
return "Het is niet mogelijk een woord toe te voegen. Het spel is nog niet begonnen."
if self.game_finished():
return "Het is niet mogelijk een woord toe te voegen. Het spel is al afgelopen."
if player not in self._scores_per_player:
return "Het is niet mogelijk een woord toe te voegen. {} is geen deelnemer aan het spel.".format(player)
current_word = self._get_current_word()
if self._add_word(word):
score = self._score(current_word, word)
self._scores_per_player[player] += score
bot_text = "{0} heeft \'{1}\' aan de wordsnake toegevoegd voor {2} punten. {0} heeft nu {3} punten.".format(player, word, score, self._scores_per_player[player])
if self.game_finished():
bot_text += " {} heeft het spel gewonnen !".format(player)
return bot_text
return "Het woord \'{}\' is niet valide.".format(word) | 44.642857 | 173 | 0.639467 |
4a25fc17b7fc6ad585df47befc0bb9bfac54467f | 1,706 | py | Python | source/file_writter.py | noltron000-coursework/adventure | 744cc0b16e76dee7e1ba617c7153d90fca37718e | [
"MIT"
] | 1 | 2019-12-13T14:31:21.000Z | 2019-12-13T14:31:21.000Z | source/file_writter.py | noltron000-coursework/adventure | 744cc0b16e76dee7e1ba617c7153d90fca37718e | [
"MIT"
] | 1 | 2019-12-13T19:17:40.000Z | 2019-12-13T19:17:40.000Z | source/file_writter.py | noltron000/adventure | 744cc0b16e76dee7e1ba617c7153d90fca37718e | [
"MIT"
] | 1 | 2022-01-10T20:21:37.000Z | 2022-01-10T20:21:37.000Z | import os
class FileWritter:
def __init__(self, story):
self.story = story
self.dir = input('What folder-name do you want your output files to be in? ')
if self.dir == '':
self.dir = 'story'
if os.path.isdir(f'./{self.dir}'):
raise # ValueError('This folder already exists! Please try again.')
else:
os.mkdir(f'./{self.dir}')
os.mkdir(f'./{self.dir}/pages')
def main(self):
story = self.story
self.write_cover()
self.write_event(story.root)
def write_cover(self):
story = self.story
markdown = (
f'{story.title.title()}\n'
f'{len(story.title) * "="}\n\n'
f'{story.subtitle.title()}\n'
f'{len(story.subtitle) * "-"}\n\n'
f'> {story.synopsis}\n\n'
'---\n\n'
'### [*click here to start reading*]'
f'(./pages/{id(story.root)}.md)\n'
)
# then create the file
with open(f'./{self.dir}/START.md', 'w+') as file:
file.write(markdown)
def write_event(self, event):
# compile event path
event_path = f'./{self.dir}/pages/{id(event)}.md'
# check if the compiled markdown file exists.
if os.path.isfile(event_path):
return
# first create the list of options.
option_list = ''
for index, choice in enumerate(event.choices):
outcome = event.outcomes[index]
# compile event path
outcome_path = f'./{id(outcome)}.md'
option_list += f'1. [{choice}]({outcome_path})\n'
if option_list != '':
option_list = '\n---\n\n' + option_list
# next compile the markdown
markdown = (
f'{event.content}\n'
f'{option_list}'
)
# then create the file
with open(event_path, 'w+') as file:
file.write(markdown)
# finally repeat for child events
for outcome in event.outcomes:
self.write_event(outcome)
| 24.724638 | 79 | 0.635404 |
4a25fec3ad8ffa3d00b2180b2bacebf5e670eeb0 | 480 | py | Python | PYTHON/python-datastructures/algorithms/algorithms/arrays/missing_ranges.py | Web-Dev-Collaborative/DS-ALGO-OFFICIAL | 6d7195d33c28a0fe22f12231efffb39f4bf05c97 | [
"Apache-2.0"
] | 11 | 2021-02-18T04:53:44.000Z | 2022-01-16T10:57:39.000Z | PYTHON/python-datastructures/algorithms/algorithms/arrays/missing_ranges.py | Web-Dev-Collaborative/DS-ALGO-OFFICIAL | 6d7195d33c28a0fe22f12231efffb39f4bf05c97 | [
"Apache-2.0"
] | 162 | 2021-03-09T01:52:11.000Z | 2022-03-12T01:09:07.000Z | PYTHON/python-datastructures/algorithms/algorithms/arrays/missing_ranges.py | Web-Dev-Collaborative/DS-ALGO-OFFICIAL | 6d7195d33c28a0fe22f12231efffb39f4bf05c97 | [
"Apache-2.0"
] | 8 | 2021-02-18T05:12:34.000Z | 2022-03-06T19:02:14.000Z | """
Find missing ranges between low and high in the given array.
Ex) [3, 5] lo=1 hi=10 => answer: [(1, 2), (4, 4), (6, 10)]
"""
def missing_ranges(arr, lo, hi):
res = []
start = lo
for n in arr:
if n == start:
start += 1
elif n > start:
res.append((start, n - 1))
start = n + 1
if start <= hi: # after done iterating thru array,
res.append((start, hi)) # append remainder to list
return res
| 20 | 60 | 0.510417 |
4a25fefdc109991ae3c152cf1b28621469514b63 | 2,821 | py | Python | projects/scale_hyperprior_lightning/vimeo.py | tallamjr/NeuralCompression | 21d05ec0d9f8c52d8742fde36f569b4dad2842a5 | [
"MIT"
] | 233 | 2021-07-19T18:50:21.000Z | 2022-03-30T22:06:40.000Z | projects/scale_hyperprior_lightning/vimeo.py | tallamjr/NeuralCompression | 21d05ec0d9f8c52d8742fde36f569b4dad2842a5 | [
"MIT"
] | 79 | 2021-07-22T13:33:45.000Z | 2022-02-09T16:38:42.000Z | projects/scale_hyperprior_lightning/vimeo.py | tallamjr/NeuralCompression | 21d05ec0d9f8c52d8742fde36f569b4dad2842a5 | [
"MIT"
] | 21 | 2021-07-29T18:27:59.000Z | 2022-02-28T02:32:53.000Z | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import List, Optional, Sequence, Union
from pytorch_lightning import LightningDataModule
from torch.utils.data import DataLoader
from torchvision import transforms
from neuralcompression.data import Vimeo90kSeptuplet
class Vimeo90kSeptupletLightning(LightningDataModule):
"""
PyTorch Lightning data module version of ``Vimeo90kSeptuplet``.
Args:
data_dir: root directory of Vimeo dataset.
train_batch_size: the batch size to use during training.
val_batch_size: the batch size to use during validation.
patch_size: the size of the crop to take from the original images.
num_workers: the number of parallel workers to create to load data
items (see PyTorch's Dataloader documentation for more details).
pin_memory: whether prepared items should be loaded into pinned memory
or not. This improves performance on GPUs.
"""
def __init__(
self,
data_dir: str,
train_batch_size: int = 8,
val_batch_size: int = 8,
patch_size: Union[int, Sequence[int]] = (256, 256),
num_workers: int = 0,
pin_memory: bool = False,
):
super().__init__()
self.data_dir = data_dir
self.train_batch_size = train_batch_size
self.val_batch_size = val_batch_size
self.patch_size = patch_size
self.num_workers = num_workers
self.pin_memory = pin_memory
def setup(self, stage: Optional[str] = None) -> None:
train_transforms = transforms.Compose(
[transforms.RandomCrop(self.patch_size), transforms.ToTensor()]
)
val_transforms = transforms.Compose(
[transforms.CenterCrop(self.patch_size), transforms.ToTensor()]
)
self.train_dataset = Vimeo90kSeptuplet(
self.data_dir,
pil_transform=train_transforms,
split="train",
)
self.val_dataset = Vimeo90kSeptuplet(
self.data_dir,
pil_transform=val_transforms,
split="test",
)
def train_dataloader(self) -> DataLoader:
return DataLoader(
self.train_dataset,
batch_size=self.train_batch_size,
num_workers=self.num_workers,
shuffle=True,
pin_memory=self.pin_memory,
)
def val_dataloader(self) -> Union[DataLoader, List[DataLoader]]:
return DataLoader(
self.val_dataset,
batch_size=self.val_batch_size,
num_workers=self.num_workers,
shuffle=False,
pin_memory=self.pin_memory,
)
| 32.802326 | 78 | 0.649061 |
4a25ff119f06e33c1cd8e0332802c77847ff5fa3 | 3,180 | py | Python | helios/pipeViewer/pipe_view/gui/widgets/transaction_list.py | debjyoti0891/map | abdae67964420d7d36255dcbf83e4240a1ef4295 | [
"MIT"
] | 44 | 2019-12-13T06:39:13.000Z | 2022-03-29T23:09:28.000Z | helios/pipeViewer/pipe_view/gui/widgets/transaction_list.py | debjyoti0891/map | abdae67964420d7d36255dcbf83e4240a1ef4295 | [
"MIT"
] | 222 | 2020-01-14T21:58:56.000Z | 2022-03-31T20:05:12.000Z | helios/pipeViewer/pipe_view/gui/widgets/transaction_list.py | debjyoti0891/map | abdae67964420d7d36255dcbf83e4240a1ef4295 | [
"MIT"
] | 19 | 2020-01-03T19:03:22.000Z | 2022-01-09T08:36:20.000Z | import wx
from gui.font_utils import GetMonospaceFont
# # This class is a GUI list control element that shows transactions.
class TransactionList(wx.ListCtrl):
def __init__(self, parent, id, canvas, name = '', style = wx.LC_REPORT | wx.SUNKEN_BORDER):
wx.ListCtrl.__init__(self, parent = parent, id = wx.NewId(), name = name, style = style)
self.SetFont(GetMonospaceFont(canvas.GetSettings().layout_font_size))
# used for coloring
self.__canvas = canvas
# list of dictionary of properties
self.__transactions = []
# properties to show.
self.__properties = ['start', 'location', 'annotation'] # must have at least 1
# insertion point for elements at end
self.__current_new_idx = 0
self.__colorize = True
self.RefreshAll()
def GetProperties(self):
return tuple(self.__properties)
def SetProperties(self, properties):
self.__properties = properties[:]
self.RefreshAll()
def Clear(self):
self.__transactions = []
def Colorize(self, colorize):
self.__colorize = colorize
# # Destroy all graphical elements and recreate them
def RefreshAll(self):
self.ClearAll()
# make header
width = int(self.GetClientSize()[0] / len(self.__properties))
if width < 100:
width = 100
for col_idx, column in enumerate(self.__properties):
self.InsertColumn(col_idx, column)
self.SetColumnWidth(col_idx, width)
self.__current_new_idx = 0
for transaction in self.__transactions:
self.__AddGraphicalTransaction(transaction)
def RefreshTransaction(self, index):
transaction = self.__transactions[index]
for col_idx, prop in enumerate(self.__properties):
self.SetItem(index, col_idx, str(transaction.get(prop)))
annotation = transaction.get('annotation')
if self.__colorize and annotation:
color = self.__canvas.GetAutocolorColor(annotation)
else:
color = (255, 255, 255)
self.SetItemBackgroundColour(index, color)
def GetTransaction(self, index):
return self.__transactions[index]
def __AddGraphicalTransaction(self, transaction):
self.InsertItem(self.__current_new_idx, str(transaction.get(self.__properties[0])))
self.RefreshTransaction(self.__current_new_idx)
self.__current_new_idx += 1
# # Add a new element to bottom of list. New item must be dictionary of properties.
def Add(self, transaction_properties):
self.__transactions.append(transaction_properties)
self.__AddGraphicalTransaction(transaction_properties)
return self.__current_new_idx - 1
def Remove(self, index):
del self.__transactions[index]
self.DeleteItem(index)
self.__current_new_idx -= 1
# # Attempts to resize the columns based on size
# @pre All content should be added before this is called (once)
def FitColumns(self):
for idx, _ in enumerate(self.__properties):
self.SetColumnWidth(idx, wx.LIST_AUTOSIZE)
self.Layout()
| 36.976744 | 96 | 0.66478 |
4a25ff80ca81de3d80b18cf54a848c6870885e31 | 1,042 | py | Python | picam.py | hxydebug/FR | dfb54acc8eab79204ee55f7eb1cdc547566c6466 | [
"MIT"
] | 1 | 2021-12-08T03:22:46.000Z | 2021-12-08T03:22:46.000Z | picam.py | hxydebug/FR | dfb54acc8eab79204ee55f7eb1cdc547566c6466 | [
"MIT"
] | null | null | null | picam.py | hxydebug/FR | dfb54acc8eab79204ee55f7eb1cdc547566c6466 | [
"MIT"
] | null | null | null | """
Pi camera device capture class for OpenCV. This class allows you to capture a
single image from the pi camera as an OpenCV image.
"""
import io
import time
import cv2
import numpy as np
import picamera
import config
class OpenCVCapture(object):
def read(self):
"""Read a single frame from the camera and return the data as an OpenCV
image (which is a numpy array).
"""
# This code is based on the picamera example at:
# http://picamera.readthedocs.org/en/release-1.0/recipes1.html#capturing-to-an-opencv-object
# Capture a frame from the camera.
data = io.BytesIO()
with picamera.PiCamera() as camera:
camera.capture(data, format='jpeg')
data = np.fromstring(data.getvalue(), dtype=np.uint8)
# Decode the image data and return an OpenCV image.
image = cv2.imdecode(data, 1)
# Save captured image for debugging.
cv2.imwrite(config.DEBUG_IMAGE, image)
# Return the captured image data.
return image
| 31.575758 | 100 | 0.661228 |
4a25ff8b2fa182faaf4f4ce8909c9ec2e9b55ccc | 4,334 | py | Python | ppocr/utils/utility.py | chccc1994/PaddleOCR | 03e768b26ff769a8af1adfec062c472fe54bb640 | [
"Apache-2.0"
] | null | null | null | ppocr/utils/utility.py | chccc1994/PaddleOCR | 03e768b26ff769a8af1adfec062c472fe54bb640 | [
"Apache-2.0"
] | null | null | null | ppocr/utils/utility.py | chccc1994/PaddleOCR | 03e768b26ff769a8af1adfec062c472fe54bb640 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import imghdr
import cv2
import random
import numpy as np
import paddle
def print_dict(d, logger, delimiter=0):
"""
Recursively visualize a dict and
indenting acrrording by the relationship of keys.
"""
for k, v in sorted(d.items()):
if isinstance(v, dict):
logger.info("{}{} : ".format(delimiter * " ", str(k)))
print_dict(v, logger, delimiter + 4)
elif isinstance(v, list) and len(v) >= 1 and isinstance(v[0], dict):
logger.info("{}{} : ".format(delimiter * " ", str(k)))
for value in v:
print_dict(value, logger, delimiter + 4)
else:
logger.info("{}{} : {}".format(delimiter * " ", k, v))
def get_check_global_params(mode):
check_params = ['use_gpu', 'max_text_length', 'image_shape', \
'image_shape', 'character_type', 'loss_type']
if mode == "train_eval":
check_params = check_params + [ \
'train_batch_size_per_card', 'test_batch_size_per_card']
elif mode == "test":
check_params = check_params + ['test_batch_size_per_card']
return check_params
def _check_image_file(path):
img_end = {'jpg', 'bmp', 'png', 'jpeg', 'rgb', 'tif', 'tiff', 'gif'}
return any([path.lower().endswith(e) for e in img_end])
def get_image_file_list(img_file):
imgs_lists = []
if img_file is None or not os.path.exists(img_file):
raise Exception("not found any img file in {}".format(img_file))
img_end = {'jpg', 'bmp', 'png', 'jpeg', 'rgb', 'tif', 'tiff', 'gif'}
if os.path.isfile(img_file) and _check_image_file(img_file):
imgs_lists.append(img_file)
elif os.path.isdir(img_file):
for single_file in os.listdir(img_file):
file_path = os.path.join(img_file, single_file)
if os.path.isfile(file_path) and _check_image_file(file_path):
imgs_lists.append(file_path)
if len(imgs_lists) == 0:
raise Exception("not found any img file in {}".format(img_file))
imgs_lists = sorted(imgs_lists)
return imgs_lists
def check_and_read_gif(img_path):
if os.path.basename(img_path)[-3:] in ['gif', 'GIF']:
gif = cv2.VideoCapture(img_path)
ret, frame = gif.read()
if not ret:
logger = logging.getLogger('ppocr')
logger.info("Cannot read {}. This gif image maybe corrupted.")
return None, False
if len(frame.shape) == 2 or frame.shape[-1] == 1:
frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2RGB)
imgvalue = frame[:, :, ::-1]
return imgvalue, True
return None, False
def load_vqa_bio_label_maps(label_map_path):
with open(label_map_path, "r", encoding='utf-8') as fin:
lines = fin.readlines()
lines = [line.strip() for line in lines]
if "O" not in lines:
lines.insert(0, "O")
labels = []
for line in lines:
if line == "O":
labels.append("O")
else:
labels.append("B-" + line)
labels.append("I-" + line)
label2id_map = {label: idx for idx, label in enumerate(labels)}
id2label_map = {idx: label for idx, label in enumerate(labels)}
return label2id_map, id2label_map
def set_seed(seed=1024):
random.seed(seed)
np.random.seed(seed)
paddle.seed(seed)
class AverageMeter:
def __init__(self):
self.reset()
def reset(self):
"""reset"""
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
"""update"""
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
| 32.833333 | 76 | 0.617213 |
4a25ff8be9712851407ab150e1c31fa4487b4d8e | 330 | py | Python | script.py | likelion/script.tubecast | 33c6bd7f87dfdca3e86a0dc2239a31d3f43a0450 | [
"MIT"
] | 83 | 2018-03-16T21:42:59.000Z | 2022-03-13T20:51:41.000Z | script.py | likelion/script.tubecast | 33c6bd7f87dfdca3e86a0dc2239a31d3f43a0450 | [
"MIT"
] | 52 | 2018-03-29T01:35:10.000Z | 2022-03-01T15:48:24.000Z | script.py | likelion/script.tubecast | 33c6bd7f87dfdca3e86a0dc2239a31d3f43a0450 | [
"MIT"
] | 34 | 2018-03-28T13:58:46.000Z | 2021-12-06T11:13:48.000Z | # -*- coding: utf-8 -*-
from resources.lib.kodi.utils import get_string, yes_no
from resources.lib.tubecast.youtube.pairing import generate_pairing_code
if __name__ == "__main__":
dg = yes_no(get_string(32008),
yeslabel=get_string(32009), nolabel=get_string(32010))
if dg:
generate_pairing_code()
| 33 | 72 | 0.706061 |
4a2600ded2ff6b7b2a3d8935d83407f524008e07 | 9,897 | py | Python | web_console_v2/api/fedlearner_webconsole/k8s/models.py | duanbing/fedlearner | 5cce3c1fe09abe66879274a0ad3dc8e2f25a322d | [
"Apache-2.0"
] | null | null | null | web_console_v2/api/fedlearner_webconsole/k8s/models.py | duanbing/fedlearner | 5cce3c1fe09abe66879274a0ad3dc8e2f25a322d | [
"Apache-2.0"
] | null | null | null | web_console_v2/api/fedlearner_webconsole/k8s/models.py | duanbing/fedlearner | 5cce3c1fe09abe66879274a0ad3dc8e2f25a322d | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import logging
from abc import ABCMeta, abstractmethod
from datetime import datetime, timezone
from enum import Enum, unique
from typing import Optional, List
# Please keep the value consistent with operator's definition
@unique
class PodType(Enum):
UNKNOWN = 'UNKNOWN'
# Parameter server
PS = 'PS'
# Master worker
MASTER = 'MASTER'
WORKER = 'WORKER'
@staticmethod
def from_value(value: str) -> 'PodType':
try:
if isinstance(value, str):
value = value.upper()
return PodType(value)
except ValueError:
logging.error(f'Unexpected value of PodType: {value}')
return PodType.UNKNOWN
@unique
class PodState(Enum):
UNKNOWN = 'UNKNOWN'
RUNNING = 'RUNNING'
SUCCEEDED = 'SUCCEEDED'
# Succeeded and resource released
SUCCEEDED_AND_FREED = 'SUCCEEDED_AND_FREED'
FAILED = 'FAILED'
# Failed and resource released
FAILED_AND_FREED = 'FAILED_AND_FREED'
PENDING = 'PENDING'
@staticmethod
def from_value(value: str) -> 'PodState':
try:
if isinstance(value, str):
value = value.upper()
return PodState(value)
except ValueError:
logging.error(f'Unexpected value of PodState: {value}')
return PodState.UNKNOWN
class MessageProvider(metaclass=ABCMeta):
@abstractmethod
def get_message(self, private: bool = False) -> Optional[str]:
pass
class ContainerState(MessageProvider):
def __init__(self, state: str,
message: Optional[str] = None,
reason: Optional[str] = None):
self.state = state
self.message = message
self.reason = reason
def get_message(self, private: bool = False) -> Optional[str]:
if private:
if self.message is not None:
return f'{self.state}:{self.message}'
if self.reason is not None:
return f'{self.state}:{self.reason}'
return None
def __eq__(self, other):
if not isinstance(other, ContainerState):
return False
return self.state == other.state and \
self.message == other.message and \
self.reason == other.reason
class PodCondition(MessageProvider):
def __init__(self, cond_type: str,
message: Optional[str] = None,
reason: Optional[str] = None):
self.cond_type = cond_type
self.message = message
self.reason = reason
def get_message(self, private: bool = False) -> Optional[str]:
if private:
if self.message is not None:
return f'{self.cond_type}:{self.message}'
if self.reason is not None:
return f'{self.cond_type}:{self.reason}'
return None
def __eq__(self, other):
if not isinstance(other, PodCondition):
return False
return self.cond_type == other.cond_type and \
self.message == other.message and \
self.reason == other.reason
class Pod(object):
def __init__(self,
name: str,
state: PodState,
pod_type: PodType,
container_states: List[ContainerState] = None,
pod_conditions: List[PodCondition] = None):
self.name = name
self.state = state or PodState.UNKNOWN
self.pod_type = pod_type
self.container_states = container_states or []
self.pod_conditions = pod_conditions or []
def __eq__(self, other):
if not isinstance(other, Pod):
return False
if len(self.container_states) != len(other.container_states):
return False
for index, state in enumerate(self.container_states):
if state != other.container_states[index]:
return False
if len(self.pod_conditions or []) != len(self.pod_conditions or []):
return False
for index, cond in enumerate(self.pod_conditions):
if cond != other.pod_conditions[index]:
return False
return self.name == other.name and \
self.state == other.state and \
self.pod_type == other.pod_type
def to_dict(self, include_private_info: bool = False):
# TODO: to reuse to_dict from db.py
messages = []
for container_state in self.container_states:
message = container_state.get_message(include_private_info)
if message is not None:
messages.append(message)
for pod_condition in self.pod_conditions:
message = pod_condition.get_message(include_private_info)
if message is not None:
messages.append(message)
return {
'name': self.name,
'pod_type': self.pod_type.name,
'state': self.state.name,
'message': ', '.join(messages)
}
@classmethod
def from_json(cls, p: dict) -> 'Pod':
"""Extracts information from original K8S pod info.
Schema ref: https://github.com/garethr/kubernetes-json-schema/blob/
master/v1.6.5-standalone/pod.json"""
container_states: List[ContainerState] = []
pod_conditions: List[PodCondition] = []
if 'containerStatuses' in p['status'] and \
isinstance(p['status']['containerStatuses'], list) and \
len(p['status']['containerStatuses']) > 0:
for state, detail in \
p['status']['containerStatuses'][0]['state'].items():
container_states.append(ContainerState(
state=state,
message=detail.get('message'),
reason=detail.get('reason')
))
if 'conditions' in p['status'] and \
isinstance(p['status']['conditions'], list):
for cond in p['status']['conditions']:
pod_conditions.append(PodCondition(
cond_type=cond['type'],
message=cond.get('message'),
reason=cond.get('reason')
))
return cls(
name=p['metadata']['name'],
pod_type=PodType.from_value(
p['metadata']['labels']['fl-replica-type']),
state=PodState.from_value(p['status']['phase']),
container_states=container_states,
pod_conditions=pod_conditions)
# Please keep the value consistent with operator's definition
@unique
class FlAppState(Enum):
UNKNOWN = 'Unknown'
NEW = 'FLStateNew'
BOOTSTRAPPED = 'FLStateBootstrapped'
SYNC_SENT = 'FLStateSyncSent'
RUNNING = 'FLStateRunning'
COMPLETED = 'FLStateComplete'
FAILING = 'FLStateFailing'
SHUTDOWN = 'FLStateShutDown'
FAILED = 'FLStateFailed'
@staticmethod
def from_value(value: str) -> 'FlAppState':
try:
return FlAppState(value)
except ValueError:
logging.error(f'Unexpected value of FlAppState: {value}')
return FlAppState.UNKNOWN
class FlApp(object):
def __init__(self,
state: FlAppState = FlAppState.UNKNOWN,
pods: Optional[List[Pod]] = None,
completed_at: Optional[int] = None):
self.state = state
self.pods = pods or []
self.completed_at = completed_at
def __eq__(self, other):
if not isinstance(other, FlApp):
return False
if len(self.pods) != len(other.pods):
return False
for index, pod in enumerate(self.pods):
if pod != other.pods[index]:
return False
return self.state == other.state and \
self.completed_at == other.completed_at
@classmethod
def from_json(cls, flapp: dict) -> 'FlApp':
if flapp is None \
or 'status' not in flapp \
or not isinstance(flapp['status'], dict):
return cls()
pods: List[Pod] = []
completed_at: Optional[int] = None
# Parses pod related info
replicas = flapp['status'].get('flReplicaStatus', {})
for pod_type in replicas:
for state in ['failed', 'succeeded']:
for pod_name in replicas[pod_type].get(state, {}):
if state == 'failed':
pod_state = PodState.FAILED_AND_FREED
else:
pod_state = PodState.SUCCEEDED_AND_FREED
pods.append(Pod(
name=pod_name,
pod_type=PodType.from_value(pod_type),
state=pod_state))
state = flapp['status'].get('appState')
if flapp['status'].get('completionTime', None):
# Completion time is a iso formatted datetime in UTC timezone
completed_at = int(datetime.strptime(
flapp['status']['completionTime'],
'%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc)
.timestamp())
return cls(state=FlAppState.from_value(state),
pods=pods,
completed_at=completed_at)
| 35.346429 | 76 | 0.582197 |
4a2600f6a0ad09c6dfc7df50333e70dbcb04fe17 | 6,115 | py | Python | shoogie/admin.py | counterjack/django-shoogie | 96f0b1178a7f08b1a980274d610b131ca1fc0f89 | [
"MIT"
] | null | null | null | shoogie/admin.py | counterjack/django-shoogie | 96f0b1178a7f08b1a980274d610b131ca1fc0f89 | [
"MIT"
] | null | null | null | shoogie/admin.py | counterjack/django-shoogie | 96f0b1178a7f08b1a980274d610b131ca1fc0f89 | [
"MIT"
] | 1 | 2021-04-03T07:31:49.000Z | 2021-04-03T07:31:49.000Z | from django.contrib import admin
from django.contrib.admin.views.main import ChangeList
try:
from django.conf.urls import url
except ImportError:
from django.conf.urls import url
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from . import models, utils, views
def get_name(user):
return user.get_full_name() or user.username
class Truncate(object):
max_length = 50
def __init__(self, attrname, max_length=None):
self.attrname = attrname
self.__name__ = attrname
if max_length is not None:
self.max_length = max_length
def __call__(self, instance):
val = getattr(instance, self.attrname, '')
return utils.truncate(val, self.max_length)
class FasterChangeList(ChangeList):
"Defers large fields we don't use"
defer_fields = (
'post_data',
'cookie_data',
'session_data',
'technical_response',
)
# get_query_set() was renamed to get_queryset() in Django 1.6
def get_queryset(self, *args, **kwargs):
changelist = super(FasterChangeList, self)
if hasattr(changelist, 'get_queryset'):
qset = changelist.get_queryset(*args, **kwargs)
else:
qset = changelist.get_query_set(*args, **kwargs)
return qset.defer(*self.defer_fields)
get_query_set = get_queryset
class ServerErrorAdmin(admin.ModelAdmin):
list_display = (Truncate('exception_type', 40),
Truncate('exception_str', 80),
'path_link',
'error_date_format',
'user_link',
'technicalresponse_link',
'resolved',)
date_hierarchy = 'timestamp'
search_fields = ('request_path', 'exception_type', 'exception_str', 'source_file', 'source_function', 'source_text')
actions = ('get_email_list', 'resolve_servererror', 'unresolve_servererror')
exclude = ('technical_response',)
readonly_fields = (
'timestamp',
'hostname',
'request_method',
'request_path',
'query_string',
'post_data',
'cookie_data',
'session_id',
'session_data',
'user',
'exception_type',
'exception_str',
'source_file',
'source_line_num',
'source_function',
'source_text',
)
# queryset() was renamed to get_queryset() in Django 1.6
def get_queryset(self, request):
model_admin = super(ServerErrorAdmin, self)
if hasattr(model_admin, 'get_queryset'):
qset = model_admin.get_queryset(request)
else:
qset = model_admin.queryset(request)
return qset.select_related('user')
queryset = get_queryset
def get_changelist(self, request, **kwargs):
return FasterChangeList
def error_date_format(self, instance):
return instance.timestamp.strftime('%Y-%b-%d %H:%M')
error_date_format.admin_order_field = 'timestamp'
error_date_format.short_description = 'timestamp'
get_request_path = Truncate('request_path', 40)
def path_link(self, instance):
request_path = self.get_request_path(instance)
if 'GET' != instance.request_method:
if instance.request_method:
return u'%s (%s)' % (request_path, instance.request_method)
else:
return request_path
url = u'http://%s%s?%s' % (instance.hostname, instance.request_path, instance.query_string)
return u'<a href="{0}" title="{0}">{1}</a>'.format(url, request_path)
path_link.admin_order_field = 'request_path'
path_link.allow_tags = True
path_link.short_description = 'path'
def user_link(self, instance):
if not instance.user:
return u'(None)'
user = instance.user
url = reverse('admin:auth_user_change', args=(user.id,))
templ = u'<a href="{url}" title="{name}">{username}</a>'
return templ.format(url=url, username=user.username, name=get_name(user))
user_link.admin_order_field = 'user'
user_link.allow_tags = True
user_link.short_description = 'user'
def get_email_list(self, request, queryset):
emails = set()
for se in queryset.select_related('user'):
user = se.user
if user and user.email:
name = get_name(user)
emails.add('"%s" <%s>' % (name, user.email))
return HttpResponse(',\n'.join(emails), mimetype='text/plain')
get_email_list.short_description = 'Get user email addresses for selected errors'
def technicalresponse_link(self, instance):
tr_url = reverse('admin:shoogie_technicalresponse', kwargs={'pk':instance.pk})
return '<a href="%s"><b>debug</b></a>' % tr_url
technicalresponse_link.allow_tags = True
technicalresponse_link.short_description = 'Debug'
def resolve_servererror(self, request, queryset):
update_count = queryset.update(resolved=True)
plural = 's' if update_count != 1 else ''
self.message_user(request, "Marked %d error%s as resolved" % (update_count, plural))
resolve_servererror.short_description = "Mark selected errors as resolved"
def unresolve_servererror(self, request, queryset):
update_count = queryset.update(resolved=False)
plural = 's' if update_count != 1 else ''
self.message_user(request, "Marked %d error%s as not resolved" % (update_count, plural))
unresolve_servererror.short_description = "Mark selected errors as NOT resolved"
def get_urls(self):
myview = views.TechnicalResponseView.as_view()
myurls = ['',
url(r'(?P<pk>\d+)/technicalresponse/$',
self.admin_site.admin_view(myview, cacheable=True),
name='shoogie_technicalresponse',
),
]
return myurls + super(ServerErrorAdmin, self).get_urls()
admin.site.register(models.ServerError, ServerErrorAdmin)
| 38.21875 | 121 | 0.628618 |
4a2601226669c55c36c016a5a9ef915b3224757b | 3,410 | py | Python | sdk/cognitivelanguage/azure-ai-language-conversations/samples/sample_analyze_orchestration_app_qna_response.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-01-24T08:54:57.000Z | 2022-01-24T08:54:57.000Z | sdk/cognitivelanguage/azure-ai-language-conversations/samples/sample_analyze_orchestration_app_qna_response.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/cognitivelanguage/azure-ai-language-conversations/samples/sample_analyze_orchestration_app_qna_response.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
"""
FILE: sample_analyze_orchestration_app_qna_response.py
DESCRIPTION:
This sample demonstrates how to analyze user query using an orchestration project.
In this sample, orchestration project's top intent will map to a Qna project.
For more info about how to setup a CLU orchestration project, see the README.
USAGE:
python sample_analyze_orchestration_app_qna_response.py
Set the environment variables with your own values before running the sample:
1) AZURE_CONVERSATIONS_ENDPOINT - endpoint for your CLU resource.
2) AZURE_CONVERSATIONS_KEY - API key for your CLU resource.
3) AZURE_CONVERSATIONS_WORKFLOW_PROJECT_NAME - project name for your CLU orchestration project.
4) AZURE_CONVERSATIONS_WORKFLOW_DEPLOYMENT_NAME - deployment name for your CLU orchestration project.
"""
def sample_analyze_orchestration_app_qna_response():
# [START analyze_orchestration_app_qna_response]
# import libraries
import os
from azure.core.credentials import AzureKeyCredential
from azure.ai.language.conversations import ConversationAnalysisClient
from azure.ai.language.conversations.models import (
CustomConversationalTask,
ConversationAnalysisOptions,
CustomConversationTaskParameters,
TextConversationItem
)
# get secrets
clu_endpoint = os.environ["AZURE_CONVERSATIONS_ENDPOINT"]
clu_key = os.environ["AZURE_CONVERSATIONS_KEY"]
project_name = os.environ["AZURE_CONVERSATIONS_WORKFLOW_PROJECT_NAME"]
deployment_name = os.environ["AZURE_CONVERSATIONS_WORKFLOW_DEPLOYMENT_NAME"]
# analyze query
client = ConversationAnalysisClient(clu_endpoint, AzureKeyCredential(clu_key))
with client:
query = "How are you?"
result = client.analyze_conversation(
task=CustomConversationalTask(
analysis_input=ConversationAnalysisOptions(
conversation_item=TextConversationItem(
text=query
)
),
parameters=CustomConversationTaskParameters(
project_name=project_name,
deployment_name=deployment_name
)
)
)
# view result
print("query: {}".format(result.results.query))
print("project kind: {}\n".format(result.results.prediction.project_kind))
# top intent
top_intent = result.results.prediction.top_intent
print("top intent: {}".format(top_intent))
top_intent_object = result.results.prediction.intents[top_intent]
print("confidence score: {}".format(top_intent_object.confidence))
print("project kind: {}".format(top_intent_object.target_kind))
if top_intent_object.target_kind == "question_answering":
print("\nview qna result:")
qna_result = top_intent_object.result
for answer in qna_result.answers:
print("\nanswer: {}".format(answer.answer))
print("answer: {}".format(answer.confidence))
# [END analyze_orchestration_app_qna_response]
if __name__ == '__main__':
sample_analyze_orchestration_app_qna_response() | 40.117647 | 106 | 0.676833 |
4a2603845d35b4b4dc36e6ae25c9ad0bb815f71e | 867 | py | Python | var/spack/repos/builtin/packages/dnstracer/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 9 | 2018-04-18T07:51:40.000Z | 2021-09-10T03:56:57.000Z | var/spack/repos/builtin/packages/dnstracer/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 907 | 2018-04-18T11:17:57.000Z | 2022-03-31T13:20:25.000Z | var/spack/repos/builtin/packages/dnstracer/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 29 | 2018-11-05T16:14:23.000Z | 2022-02-03T16:07:09.000Z | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Dnstracer(Package):
"""Dnstracer determines where a given Domain Name Server gets
its information from, and follows the chain of DNS servers back to
the servers which know the data."""
homepage = "https://github.com/Orc/dnstracer"
git = "https://github.com/Orc/dnstracer.git"
version('master', branch='master')
phases = ['configure', 'build', 'install']
def configure(self, spec, prefix):
configure = Executable('./configure.sh')
configure('--prefix={0}'.format(prefix))
def build(self, spec, prefix):
make()
def install(self, spec, prefix):
make('install')
| 28.9 | 73 | 0.672434 |
4a260391c23291acb9ce469940ae518e2e90b45c | 2,626 | py | Python | square__heisenberg__static/square_heisenberg_linearoperator.py | ryuikaneko/quspin_example | 931ca2ea5e6bbe02ebdd6d6a22d90db24d6c760c | [
"BSD-3-Clause"
] | null | null | null | square__heisenberg__static/square_heisenberg_linearoperator.py | ryuikaneko/quspin_example | 931ca2ea5e6bbe02ebdd6d6a22d90db24d6c760c | [
"BSD-3-Clause"
] | null | null | null | square__heisenberg__static/square_heisenberg_linearoperator.py | ryuikaneko/quspin_example | 931ca2ea5e6bbe02ebdd6d6a22d90db24d6c760c | [
"BSD-3-Clause"
] | null | null | null | ## http://weinbe58.github.io/QuSpin/generated/quspin.basis.spin_basis_general.html#quspin.basis.spin_basis_general
## http://weinbe58.github.io/QuSpin/generated/quspin.operators.quantum_LinearOperator.html
from __future__ import print_function, division
#from quspin.operators import hamiltonian # operators
from quspin.operators import quantum_LinearOperator # Hamiltonians and operators
from quspin.basis import spin_basis_general # spin basis constructor
import numpy as np # general math functions
import scipy.sparse.linalg
#
###### define model parameters ######
J = 1.0 # spin-spin interaction
Lx, Ly = 4, 4 # linear dimension of 2d lattice
#Lx, Ly = 6, 6 # linear dimension of 2d lattice
N_2d = Lx*Ly # number of sites
#
###### setting up user-defined symmetry transformations for 2d lattice ######
s = np.arange(N_2d) # sites [0,1,2,....]
x = s%Lx # x positions for sites
y = s//Lx # y positions for sites
T_x = (x+1)%Lx + Lx*y # translation along x-direction
T_y = x +Lx*((y+1)%Ly) # translation along y-direction
P_x = x + Lx*(Ly-y-1) # reflection about x-axis
P_y = (Lx-x-1) + Lx*y # reflection about y-axis
Z = -(s+1) # spin inversion
#
###### setting up bases ######
basis_2d = spin_basis_general(N=N_2d,Nup=N_2d//2,S="1/2",pauli=0,kxblock=(T_x,0),kyblock=(T_y,0),pxblock=(P_x,0),pyblock=(P_y,0),zblock=(Z,0))
#basis_2d = spin_basis_general(N=N_2d,Nup=N_2d//2,S="1/2",pauli=0)
#
###### setting up hamiltonian ######
# setting up site-coupling lists
Jzzs = [[J,i,T_x[i]] for i in range(N_2d)]+[[J,i,T_y[i]] for i in range(N_2d)]
Jpms = [[0.5*J,i,T_x[i]] for i in range(N_2d)]+[[0.5*J,i,T_y[i]] for i in range(N_2d)]
Jmps = [[0.5*J,i,T_x[i]] for i in range(N_2d)]+[[0.5*J,i,T_y[i]] for i in range(N_2d)]
#
static = [["zz",Jzzs],["+-",Jpms],["-+",Jmps]]
# build hamiltonian
#H = hamiltonian(static,[],static_fmt="csr",basis=basis_2d,dtype=np.float64)
#H = quantum_LinearOperator(static,basis=basis_2d,dtype=np.float64)
no_checks = dict(check_symm=False, check_pcon=False, check_herm=False)
#H = hamiltonian(static,[],static_fmt="csr",basis=basis_2d,dtype=np.float64,**no_checks)
H = quantum_LinearOperator(static,basis=basis_2d,dtype=np.float64,**no_checks)
# diagonalise H
#ene,vec = H.eigsh(time=0.0,which="SA",k=2)
#ene,vec = H.eigsh(which="SA",k=2)
#ene = H.eigsh(which="SA",k=2,return_eigenvectors=False); ene = np.sort(ene)
## https://weinbe58.github.io/QuSpin/parallelization.html#parallel-support-in-the-operator-module-hamiltonian-quantum-operator-and-quantum-linearoperator
ene = scipy.sparse.linalg.eigsh(H,which="SA",k=2,return_eigenvectors=False); ene = np.sort(ene)
print(J,ene[0]/N_2d,ene[1]/N_2d)
| 52.52 | 153 | 0.714014 |
4a26053aff24417fd2b56efd74ae8de312b64ecf | 23,683 | py | Python | src/twisted/conch/scripts/tkconch.py | apjanke/twisted | 22f949f7ce187513f0c218b73186c8a73baa00b4 | [
"Unlicense",
"MIT"
] | 1 | 2021-01-03T01:54:14.000Z | 2021-01-03T01:54:14.000Z | src/twisted/conch/scripts/tkconch.py | zerospam/twisted | e23b5e2040a4d643bc6a43785621358569886a0d | [
"MIT",
"Unlicense"
] | null | null | null | src/twisted/conch/scripts/tkconch.py | zerospam/twisted | e23b5e2040a4d643bc6a43785621358569886a0d | [
"MIT",
"Unlicense"
] | null | null | null | # -*- test-case-name: twisted.conch.test.test_scripts -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementation module for the `tkconch` command.
"""
from twisted.conch import error
from twisted.conch.ui import tkvt100
from twisted.conch.ssh import transport, userauth, connection, common, keys
from twisted.conch.ssh import session, forwarding, channel
from twisted.conch.client.default import isInKnownHosts
from twisted.internet import reactor, defer, protocol, tksupport
from twisted.python import usage, log
import base64
import getpass
import os
import signal
import struct
import sys
from typing import List, Tuple
import tkinter as Tkinter
import tkinter.filedialog as tkFileDialog
import tkinter.messagebox as tkMessageBox
class TkConchMenu(Tkinter.Frame):
def __init__(self, *args, **params):
## Standard heading: initialization
Tkinter.Frame.__init__(self, *args, **params)
self.master.title("TkConch")
self.localRemoteVar = Tkinter.StringVar()
self.localRemoteVar.set("local")
Tkinter.Label(self, anchor="w", justify="left", text="Hostname").grid(
column=1, row=1, sticky="w"
)
self.host = Tkinter.Entry(self)
self.host.grid(column=2, columnspan=2, row=1, sticky="nesw")
Tkinter.Label(self, anchor="w", justify="left", text="Port").grid(
column=1, row=2, sticky="w"
)
self.port = Tkinter.Entry(self)
self.port.grid(column=2, columnspan=2, row=2, sticky="nesw")
Tkinter.Label(self, anchor="w", justify="left", text="Username").grid(
column=1, row=3, sticky="w"
)
self.user = Tkinter.Entry(self)
self.user.grid(column=2, columnspan=2, row=3, sticky="nesw")
Tkinter.Label(self, anchor="w", justify="left", text="Command").grid(
column=1, row=4, sticky="w"
)
self.command = Tkinter.Entry(self)
self.command.grid(column=2, columnspan=2, row=4, sticky="nesw")
Tkinter.Label(self, anchor="w", justify="left", text="Identity").grid(
column=1, row=5, sticky="w"
)
self.identity = Tkinter.Entry(self)
self.identity.grid(column=2, row=5, sticky="nesw")
Tkinter.Button(self, command=self.getIdentityFile, text="Browse").grid(
column=3, row=5, sticky="nesw"
)
Tkinter.Label(self, text="Port Forwarding").grid(column=1, row=6, sticky="w")
self.forwards = Tkinter.Listbox(self, height=0, width=0)
self.forwards.grid(column=2, columnspan=2, row=6, sticky="nesw")
Tkinter.Button(self, text="Add", command=self.addForward).grid(column=1, row=7)
Tkinter.Button(self, text="Remove", command=self.removeForward).grid(
column=1, row=8
)
self.forwardPort = Tkinter.Entry(self)
self.forwardPort.grid(column=2, row=7, sticky="nesw")
Tkinter.Label(self, text="Port").grid(column=3, row=7, sticky="nesw")
self.forwardHost = Tkinter.Entry(self)
self.forwardHost.grid(column=2, row=8, sticky="nesw")
Tkinter.Label(self, text="Host").grid(column=3, row=8, sticky="nesw")
self.localForward = Tkinter.Radiobutton(
self, text="Local", variable=self.localRemoteVar, value="local"
)
self.localForward.grid(column=2, row=9)
self.remoteForward = Tkinter.Radiobutton(
self, text="Remote", variable=self.localRemoteVar, value="remote"
)
self.remoteForward.grid(column=3, row=9)
Tkinter.Label(self, text="Advanced Options").grid(
column=1, columnspan=3, row=10, sticky="nesw"
)
Tkinter.Label(self, anchor="w", justify="left", text="Cipher").grid(
column=1, row=11, sticky="w"
)
self.cipher = Tkinter.Entry(self, name="cipher")
self.cipher.grid(column=2, columnspan=2, row=11, sticky="nesw")
Tkinter.Label(self, anchor="w", justify="left", text="MAC").grid(
column=1, row=12, sticky="w"
)
self.mac = Tkinter.Entry(self, name="mac")
self.mac.grid(column=2, columnspan=2, row=12, sticky="nesw")
Tkinter.Label(self, anchor="w", justify="left", text="Escape Char").grid(
column=1, row=13, sticky="w"
)
self.escape = Tkinter.Entry(self, name="escape")
self.escape.grid(column=2, columnspan=2, row=13, sticky="nesw")
Tkinter.Button(self, text="Connect!", command=self.doConnect).grid(
column=1, columnspan=3, row=14, sticky="nesw"
)
# Resize behavior(s)
self.grid_rowconfigure(6, weight=1, minsize=64)
self.grid_columnconfigure(2, weight=1, minsize=2)
self.master.protocol("WM_DELETE_WINDOW", sys.exit)
def getIdentityFile(self):
r = tkFileDialog.askopenfilename()
if r:
self.identity.delete(0, Tkinter.END)
self.identity.insert(Tkinter.END, r)
def addForward(self):
port = self.forwardPort.get()
self.forwardPort.delete(0, Tkinter.END)
host = self.forwardHost.get()
self.forwardHost.delete(0, Tkinter.END)
if self.localRemoteVar.get() == "local":
self.forwards.insert(Tkinter.END, "L:%s:%s" % (port, host))
else:
self.forwards.insert(Tkinter.END, "R:%s:%s" % (port, host))
def removeForward(self):
cur = self.forwards.curselection()
if cur:
self.forwards.remove(cur[0])
def doConnect(self):
finished = 1
options["host"] = self.host.get()
options["port"] = self.port.get()
options["user"] = self.user.get()
options["command"] = self.command.get()
cipher = self.cipher.get()
mac = self.mac.get()
escape = self.escape.get()
if cipher:
if cipher in SSHClientTransport.supportedCiphers:
SSHClientTransport.supportedCiphers = [cipher]
else:
tkMessageBox.showerror("TkConch", "Bad cipher.")
finished = 0
if mac:
if mac in SSHClientTransport.supportedMACs:
SSHClientTransport.supportedMACs = [mac]
elif finished:
tkMessageBox.showerror("TkConch", "Bad MAC.")
finished = 0
if escape:
if escape == "none":
options["escape"] = None
elif escape[0] == "^" and len(escape) == 2:
options["escape"] = chr(ord(escape[1]) - 64)
elif len(escape) == 1:
options["escape"] = escape
elif finished:
tkMessageBox.showerror("TkConch", "Bad escape character '%s'." % escape)
finished = 0
if self.identity.get():
options.identitys.append(self.identity.get())
for line in self.forwards.get(0, Tkinter.END):
if line[0] == "L":
options.opt_localforward(line[2:])
else:
options.opt_remoteforward(line[2:])
if "@" in options["host"]:
options["user"], options["host"] = options["host"].split("@", 1)
if (not options["host"] or not options["user"]) and finished:
tkMessageBox.showerror("TkConch", "Missing host or username.")
finished = 0
if finished:
self.master.quit()
self.master.destroy()
if options["log"]:
realout = sys.stdout
log.startLogging(sys.stderr)
sys.stdout = realout
else:
log.discardLogs()
log.deferr = handleError # HACK
if not options.identitys:
options.identitys = ["~/.ssh/id_rsa", "~/.ssh/id_dsa"]
host = options["host"]
port = int(options["port"] or 22)
log.msg((host, port))
reactor.connectTCP(host, port, SSHClientFactory())
frame.master.deiconify()
frame.master.title("%s@%s - TkConch" % (options["user"], options["host"]))
else:
self.focus()
class GeneralOptions(usage.Options):
synopsis = """Usage: tkconch [options] host [command]
"""
optParameters = [
["user", "l", None, "Log in using this user name."],
["identity", "i", "~/.ssh/identity", "Identity for public key authentication"],
["escape", "e", "~", "Set escape character; ``none'' = disable"],
["cipher", "c", None, "Select encryption algorithm."],
["macs", "m", None, "Specify MAC algorithms for protocol version 2."],
["port", "p", None, "Connect to this port. Server must be on the same port."],
[
"localforward",
"L",
None,
"listen-port:host:port Forward local port to remote address",
],
[
"remoteforward",
"R",
None,
"listen-port:host:port Forward remote port to local address",
],
]
optFlags = [
["tty", "t", "Tty; allocate a tty even if command is given."],
["notty", "T", "Do not allocate a tty."],
["version", "V", "Display version number only."],
["compress", "C", "Enable compression."],
["noshell", "N", "Do not execute a shell or command."],
["subsystem", "s", "Invoke command (mandatory) as SSH2 subsystem."],
["log", "v", "Log to stderr"],
["ansilog", "a", "Print the received data to stdout"],
]
_ciphers = transport.SSHClientTransport.supportedCiphers
_macs = transport.SSHClientTransport.supportedMACs
compData = usage.Completions(
mutuallyExclusive=[("tty", "notty")],
optActions={
"cipher": usage.CompleteList(_ciphers),
"macs": usage.CompleteList(_macs),
"localforward": usage.Completer(descr="listen-port:host:port"),
"remoteforward": usage.Completer(descr="listen-port:host:port"),
},
extraActions=[
usage.CompleteUserAtHost(),
usage.Completer(descr="command"),
usage.Completer(descr="argument", repeat=True),
],
)
identitys = [] # type: List[str]
localForwards = [] # type: List[Tuple[int, Tuple[int, int]]]
remoteForwards = [] # type: List[Tuple[int, Tuple[int, int]]]
def opt_identity(self, i):
self.identitys.append(i)
def opt_localforward(self, f):
localPort, remoteHost, remotePort = f.split(":") # doesn't do v6 yet
localPort = int(localPort)
remotePort = int(remotePort)
self.localForwards.append((localPort, (remoteHost, remotePort)))
def opt_remoteforward(self, f):
remotePort, connHost, connPort = f.split(":") # doesn't do v6 yet
remotePort = int(remotePort)
connPort = int(connPort)
self.remoteForwards.append((remotePort, (connHost, connPort)))
def opt_compress(self):
SSHClientTransport.supportedCompressions[0:1] = ["zlib"]
def parseArgs(self, *args):
if args:
self["host"] = args[0]
self["command"] = " ".join(args[1:])
else:
self["host"] = ""
self["command"] = ""
# Rest of code in "run"
options = None
menu = None
exitStatus = 0
frame = None
def deferredAskFrame(question, echo):
if frame.callback:
raise ValueError("can't ask 2 questions at once!")
d = defer.Deferred()
resp = []
def gotChar(ch, resp=resp):
if not ch:
return
if ch == "\x03": # C-c
reactor.stop()
if ch == "\r":
frame.write("\r\n")
stresp = "".join(resp)
del resp
frame.callback = None
d.callback(stresp)
return
elif 32 <= ord(ch) < 127:
resp.append(ch)
if echo:
frame.write(ch)
elif ord(ch) == 8 and resp: # BS
if echo:
frame.write("\x08 \x08")
resp.pop()
frame.callback = gotChar
frame.write(question)
frame.canvas.focus_force()
return d
def run():
global menu, options, frame
args = sys.argv[1:]
if "-l" in args: # cvs is an idiot
i = args.index("-l")
args = args[i : i + 2] + args
del args[i + 2 : i + 4]
for arg in args[:]:
try:
i = args.index(arg)
if arg[:2] == "-o" and args[i + 1][0] != "-":
args[i : i + 2] = [] # suck on it scp
except ValueError:
pass
root = Tkinter.Tk()
root.withdraw()
top = Tkinter.Toplevel()
menu = TkConchMenu(top)
menu.pack(side=Tkinter.TOP, fill=Tkinter.BOTH, expand=1)
options = GeneralOptions()
try:
options.parseOptions(args)
except usage.UsageError as u:
print("ERROR: %s" % u)
options.opt_help()
sys.exit(1)
for k, v in options.items():
if v and hasattr(menu, k):
getattr(menu, k).insert(Tkinter.END, v)
for (p, (rh, rp)) in options.localForwards:
menu.forwards.insert(Tkinter.END, "L:%s:%s:%s" % (p, rh, rp))
options.localForwards = []
for (p, (rh, rp)) in options.remoteForwards:
menu.forwards.insert(Tkinter.END, "R:%s:%s:%s" % (p, rh, rp))
options.remoteForwards = []
frame = tkvt100.VT100Frame(root, callback=None)
root.geometry(
"%dx%d"
% (tkvt100.fontWidth * frame.width + 3, tkvt100.fontHeight * frame.height + 3)
)
frame.pack(side=Tkinter.TOP)
tksupport.install(root)
root.withdraw()
if (options["host"] and options["user"]) or "@" in options["host"]:
menu.doConnect()
else:
top.mainloop()
reactor.run()
sys.exit(exitStatus)
def handleError():
from twisted.python import failure
global exitStatus
exitStatus = 2
log.err(failure.Failure())
reactor.stop()
raise
class SSHClientFactory(protocol.ClientFactory):
noisy = True
def stopFactory(self):
reactor.stop()
def buildProtocol(self, addr):
return SSHClientTransport()
def clientConnectionFailed(self, connector, reason):
tkMessageBox.showwarning(
"TkConch",
"Connection Failed, Reason:\n %s: %s" % (reason.type, reason.value),
)
class SSHClientTransport(transport.SSHClientTransport):
def receiveError(self, code, desc):
global exitStatus
exitStatus = (
"conch:\tRemote side disconnected with error code %i\nconch:\treason: %s"
% (code, desc)
)
def sendDisconnect(self, code, reason):
global exitStatus
exitStatus = (
"conch:\tSending disconnect with error code %i\nconch:\treason: %s"
% (code, reason)
)
transport.SSHClientTransport.sendDisconnect(self, code, reason)
def receiveDebug(self, alwaysDisplay, message, lang):
global options
if alwaysDisplay or options["log"]:
log.msg("Received Debug Message: %s" % message)
def verifyHostKey(self, pubKey, fingerprint):
# d = defer.Deferred()
# d.addCallback(lambda x:defer.succeed(1))
# d.callback(2)
# return d
goodKey = isInKnownHosts(options["host"], pubKey, {"known-hosts": None})
if goodKey == 1: # good key
return defer.succeed(1)
elif goodKey == 2: # AAHHHHH changed
return defer.fail(error.ConchError("bad host key"))
else:
if options["host"] == self.transport.getPeer().host:
host = options["host"]
khHost = options["host"]
else:
host = "%s (%s)" % (options["host"], self.transport.getPeer().host)
khHost = "%s,%s" % (options["host"], self.transport.getPeer().host)
keyType = common.getNS(pubKey)[0]
ques = """The authenticity of host '%s' can't be established.\r
%s key fingerprint is %s.""" % (
host,
{b"ssh-dss": "DSA", b"ssh-rsa": "RSA"}[keyType],
fingerprint,
)
ques += "\r\nAre you sure you want to continue connecting (yes/no)? "
return deferredAskFrame(ques, 1).addCallback(
self._cbVerifyHostKey, pubKey, khHost, keyType
)
def _cbVerifyHostKey(self, ans, pubKey, khHost, keyType):
if ans.lower() not in ("yes", "no"):
return deferredAskFrame("Please type 'yes' or 'no': ", 1).addCallback(
self._cbVerifyHostKey, pubKey, khHost, keyType
)
if ans.lower() == "no":
frame.write("Host key verification failed.\r\n")
raise error.ConchError("bad host key")
try:
frame.write(
"Warning: Permanently added '%s' (%s) to the list of "
"known hosts.\r\n"
% (khHost, {b"ssh-dss": "DSA", b"ssh-rsa": "RSA"}[keyType])
)
with open(os.path.expanduser("~/.ssh/known_hosts"), "a") as known_hosts:
encodedKey = base64.b64encode(pubKey)
known_hosts.write("\n%s %s %s" % (khHost, keyType, encodedKey))
except BaseException:
log.deferr()
raise error.ConchError
def connectionSecure(self):
if options["user"]:
user = options["user"]
else:
user = getpass.getuser()
self.requestService(SSHUserAuthClient(user, SSHConnection()))
class SSHUserAuthClient(userauth.SSHUserAuthClient):
usedFiles = [] # type: List[str]
def getPassword(self, prompt=None):
if not prompt:
prompt = "%s@%s's password: " % (self.user, options["host"])
return deferredAskFrame(prompt, 0)
def getPublicKey(self):
files = [x for x in options.identitys if x not in self.usedFiles]
if not files:
return None
file = files[0]
log.msg(file)
self.usedFiles.append(file)
file = os.path.expanduser(file)
file += ".pub"
if not os.path.exists(file):
return
try:
return keys.Key.fromFile(file).blob()
except:
return self.getPublicKey() # try again
def getPrivateKey(self):
file = os.path.expanduser(self.usedFiles[-1])
if not os.path.exists(file):
return None
try:
return defer.succeed(keys.Key.fromFile(file).keyObject)
except keys.BadKeyError as e:
if e.args[0] == "encrypted key with no password":
prompt = "Enter passphrase for key '%s': " % self.usedFiles[-1]
return deferredAskFrame(prompt, 0).addCallback(self._cbGetPrivateKey, 0)
def _cbGetPrivateKey(self, ans, count):
file = os.path.expanduser(self.usedFiles[-1])
try:
return keys.Key.fromFile(file, password=ans).keyObject
except keys.BadKeyError:
if count == 2:
raise
prompt = "Enter passphrase for key '%s': " % self.usedFiles[-1]
return deferredAskFrame(prompt, 0).addCallback(
self._cbGetPrivateKey, count + 1
)
class SSHConnection(connection.SSHConnection):
def serviceStarted(self):
if not options["noshell"]:
self.openChannel(SSHSession())
if options.localForwards:
for localPort, hostport in options.localForwards:
reactor.listenTCP(
localPort,
forwarding.SSHListenForwardingFactory(
self, hostport, forwarding.SSHListenClientForwardingChannel
),
)
if options.remoteForwards:
for remotePort, hostport in options.remoteForwards:
log.msg(
"asking for remote forwarding for %s:%s" % (remotePort, hostport)
)
data = forwarding.packGlobal_tcpip_forward(("0.0.0.0", remotePort))
self.sendGlobalRequest("tcpip-forward", data)
self.remoteForwards[remotePort] = hostport
class SSHSession(channel.SSHChannel):
name = b"session"
def channelOpen(self, foo):
# global globalSession
# globalSession = self
# turn off local echo
self.escapeMode = 1
c = session.SSHSessionClient()
if options["escape"]:
c.dataReceived = self.handleInput
else:
c.dataReceived = self.write
c.connectionLost = self.sendEOF
frame.callback = c.dataReceived
frame.canvas.focus_force()
if options["subsystem"]:
self.conn.sendRequest(self, b"subsystem", common.NS(options["command"]))
elif options["command"]:
if options["tty"]:
term = os.environ.get("TERM", "xterm")
# winsz = fcntl.ioctl(fd, tty.TIOCGWINSZ, '12345678')
winSize = (25, 80, 0, 0) # struct.unpack('4H', winsz)
ptyReqData = session.packRequest_pty_req(term, winSize, "")
self.conn.sendRequest(self, b"pty-req", ptyReqData)
self.conn.sendRequest(self, "exec", common.NS(options["command"]))
else:
if not options["notty"]:
term = os.environ.get("TERM", "xterm")
# winsz = fcntl.ioctl(fd, tty.TIOCGWINSZ, '12345678')
winSize = (25, 80, 0, 0) # struct.unpack('4H', winsz)
ptyReqData = session.packRequest_pty_req(term, winSize, "")
self.conn.sendRequest(self, b"pty-req", ptyReqData)
self.conn.sendRequest(self, b"shell", b"")
self.conn.transport.transport.setTcpNoDelay(1)
def handleInput(self, char):
# log.msg('handling %s' % repr(char))
if char in ("\n", "\r"):
self.escapeMode = 1
self.write(char)
elif self.escapeMode == 1 and char == options["escape"]:
self.escapeMode = 2
elif self.escapeMode == 2:
self.escapeMode = 1 # so we can chain escapes together
if char == ".": # disconnect
log.msg("disconnecting from escape")
reactor.stop()
return
elif char == "\x1a": # ^Z, suspend
# following line courtesy of Erwin@freenode
os.kill(os.getpid(), signal.SIGSTOP)
return
elif char == "R": # rekey connection
log.msg("rekeying connection")
self.conn.transport.sendKexInit()
return
self.write("~" + char)
else:
self.escapeMode = 0
self.write(char)
def dataReceived(self, data):
data = data.decode("utf-8")
if options["ansilog"]:
print(repr(data))
frame.write(data)
def extReceived(self, t, data):
if t == connection.EXTENDED_DATA_STDERR:
log.msg("got %s stderr data" % len(data))
sys.stderr.write(data)
sys.stderr.flush()
def eofReceived(self):
log.msg("got eof")
sys.stdin.close()
def closed(self):
log.msg("closed %s" % self)
if len(self.conn.channels) == 1: # just us left
reactor.stop()
def request_exit_status(self, data):
global exitStatus
exitStatus = int(struct.unpack(">L", data)[0])
log.msg("exit status: %s" % exitStatus)
def sendEOF(self):
self.conn.sendEOF(self)
if __name__ == "__main__":
run()
| 35.667169 | 88 | 0.564793 |
4a2605eeb99af01df307854567dadef8bd6860a5 | 5,159 | py | Python | sdk/python/pulumi_aws_native/iotcoredeviceadvisor/get_suite_definition.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 29 | 2021-09-30T19:32:07.000Z | 2022-03-22T21:06:08.000Z | sdk/python/pulumi_aws_native/iotcoredeviceadvisor/get_suite_definition.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 232 | 2021-09-30T19:26:26.000Z | 2022-03-31T23:22:06.000Z | sdk/python/pulumi_aws_native/iotcoredeviceadvisor/get_suite_definition.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 4 | 2021-11-10T19:42:01.000Z | 2022-02-05T10:15:49.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetSuiteDefinitionResult',
'AwaitableGetSuiteDefinitionResult',
'get_suite_definition',
'get_suite_definition_output',
]
@pulumi.output_type
class GetSuiteDefinitionResult:
def __init__(__self__, suite_definition_arn=None, suite_definition_configuration=None, suite_definition_id=None, suite_definition_version=None, tags=None):
if suite_definition_arn and not isinstance(suite_definition_arn, str):
raise TypeError("Expected argument 'suite_definition_arn' to be a str")
pulumi.set(__self__, "suite_definition_arn", suite_definition_arn)
if suite_definition_configuration and not isinstance(suite_definition_configuration, dict):
raise TypeError("Expected argument 'suite_definition_configuration' to be a dict")
pulumi.set(__self__, "suite_definition_configuration", suite_definition_configuration)
if suite_definition_id and not isinstance(suite_definition_id, str):
raise TypeError("Expected argument 'suite_definition_id' to be a str")
pulumi.set(__self__, "suite_definition_id", suite_definition_id)
if suite_definition_version and not isinstance(suite_definition_version, str):
raise TypeError("Expected argument 'suite_definition_version' to be a str")
pulumi.set(__self__, "suite_definition_version", suite_definition_version)
if tags and not isinstance(tags, list):
raise TypeError("Expected argument 'tags' to be a list")
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="suiteDefinitionArn")
def suite_definition_arn(self) -> Optional[str]:
"""
The Amazon Resource name for the suite definition.
"""
return pulumi.get(self, "suite_definition_arn")
@property
@pulumi.getter(name="suiteDefinitionConfiguration")
def suite_definition_configuration(self) -> Optional['outputs.SuiteDefinitionConfigurationProperties']:
return pulumi.get(self, "suite_definition_configuration")
@property
@pulumi.getter(name="suiteDefinitionId")
def suite_definition_id(self) -> Optional[str]:
"""
The unique identifier for the suite definition.
"""
return pulumi.get(self, "suite_definition_id")
@property
@pulumi.getter(name="suiteDefinitionVersion")
def suite_definition_version(self) -> Optional[str]:
"""
The suite definition version of a test suite.
"""
return pulumi.get(self, "suite_definition_version")
@property
@pulumi.getter
def tags(self) -> Optional[Sequence['outputs.SuiteDefinitionTag']]:
"""
An array of key-value pairs to apply to this resource.
"""
return pulumi.get(self, "tags")
class AwaitableGetSuiteDefinitionResult(GetSuiteDefinitionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSuiteDefinitionResult(
suite_definition_arn=self.suite_definition_arn,
suite_definition_configuration=self.suite_definition_configuration,
suite_definition_id=self.suite_definition_id,
suite_definition_version=self.suite_definition_version,
tags=self.tags)
def get_suite_definition(suite_definition_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSuiteDefinitionResult:
"""
An example resource schema demonstrating some basic constructs and validation rules.
:param str suite_definition_id: The unique identifier for the suite definition.
"""
__args__ = dict()
__args__['suiteDefinitionId'] = suite_definition_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws-native:iotcoredeviceadvisor:getSuiteDefinition', __args__, opts=opts, typ=GetSuiteDefinitionResult).value
return AwaitableGetSuiteDefinitionResult(
suite_definition_arn=__ret__.suite_definition_arn,
suite_definition_configuration=__ret__.suite_definition_configuration,
suite_definition_id=__ret__.suite_definition_id,
suite_definition_version=__ret__.suite_definition_version,
tags=__ret__.tags)
@_utilities.lift_output_func(get_suite_definition)
def get_suite_definition_output(suite_definition_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSuiteDefinitionResult]:
"""
An example resource schema demonstrating some basic constructs and validation rules.
:param str suite_definition_id: The unique identifier for the suite definition.
"""
...
| 41.943089 | 159 | 0.721845 |
4a2605f4d05f23ddf75b147f827fb14199d03569 | 4,627 | py | Python | rlalgos/a2c/agent.py | ludc/rlstructures | 99fa91bb4e955d31348bed007f25b41641c9fa73 | [
"MIT"
] | null | null | null | rlalgos/a2c/agent.py | ludc/rlstructures | 99fa91bb4e955d31348bed007f25b41641c9fa73 | [
"MIT"
] | null | null | null | rlalgos/a2c/agent.py | ludc/rlstructures | 99fa91bb4e955d31348bed007f25b41641c9fa73 | [
"MIT"
] | null | null | null | #
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
import torch
import torch.nn as nn
import rlstructures.logging as logging
from rlstructures import DictTensor
from rlstructures import Agent
import time
class NNAgent(Agent):
"""
Describes a discrete agent based on a model that produces a score for each
possible action, and an estimation of the value function in the current
state.
"""
def __init__(self,model=None, n_actions=None):
"""
Args:
model (nn.Module): a module producing a tuple: (actions scores, value)
n_actions (int): the number of possible actions
"""
super().__init__()
self.model = model
self.n_actions = n_actions
self.z_size = self.model.initial_state(1).size()[1]
def update(self, state_dict):
self.model.load_state_dict(state_dict)
def __call__(self, state, observation,agent_info=None,history=None):
"""
Executing one step of the agent
"""
# Verify that the batch size is 1
initial_state = observation["initial_state"]
B = observation.n_elems()
if agent_info is None:
agent_info=DictTensor({"stochastic":torch.tensor([True]).repeat(B)})
model_initial_state = self.model.initial_state(B)
agent_state = None
agent_step = None
if state is None:
assert initial_state.all()
agent_state = model_initial_state
agent_step = torch.zeros(B).long()
else:
_is = (
initial_state.float()
.unsqueeze(-1)
.repeat(1, model_initial_state.size()[1])
)
agent_state = _is * model_initial_state + (1 - _is) * state["agent_state"]
agent_step = (
initial_state.float() * torch.zeros(B)
+ (1 - initial_state.float()) * state["agent_step"]
).long()
score_action, value, next_state = self.model(
agent_state, observation["frame"], observation["last_action"]
)
action_proba = torch.softmax(score_action, dim=1)
dist = torch.distributions.Categorical(action_proba)
action_sampled = dist.sample()
action_max = action_proba.max(1)[1]
smask=agent_info["stochastic"].float()
action=(action_sampled*smask+(1-smask)*action_max).long()
new_state = DictTensor(
{"agent_state": next_state, "agent_step": agent_step + 1}
)
agent_do = DictTensor(
{"action": action, "action_probabilities": action_proba}
)
state = DictTensor({"agent_state": agent_state, "agent_step": agent_step})
return state, agent_do, new_state
class MLPAgentModel(nn.Module):
def __init__(self, n_observations, n_actions, n_hidden):
super().__init__()
self.linear = nn.Linear(n_observations, n_hidden)
self.linear2 = nn.Linear(n_hidden, n_actions)
self.linear_v = nn.Linear(n_hidden, 1)
def initial_state(self, batch_size):
d = next(self.parameters()).device
return torch.zeros(batch_size, 1).to(d)
def forward(self, state, frame, last_action):
z = torch.tanh(self.linear(frame))
score_actions = self.linear2(z)
value = self.linear_v(z)
return score_actions, value, self.initial_state(frame.size()[0]) + 1.0
class GRUAgentModel(nn.Module):
def __init__(self, n_observations, n_actions, n_hidden):
super().__init__()
self.linear = nn.Linear(n_hidden, n_actions)
self.linear_obs = nn.Linear(n_observations, n_hidden)
self.linear_action = nn.Linear(n_actions, n_hidden)
self.gru = nn.GRUCell(2 * n_hidden, n_hidden)
self.linear_v = nn.Linear(n_hidden, 1)
self.n_hidden = n_hidden
self.n_actions = n_actions
def initial_state(self, batch_size):
d = next(self.parameters()).device
return torch.zeros(batch_size, self.n_hidden).to(d)
def forward(self, state, frame, last_action):
frame = torch.relu(self.linear_obs(frame))
B = frame.size()[0]
oh = torch.zeros(B, self.n_actions).to(frame.device)
oh[torch.arange(B).to(frame.device), last_action] = 1.0
oh = self.linear_action(oh)
z = self.gru(torch.cat([frame, oh], dim=1), state)
score_actions = self.linear(z)
value = self.linear_v(z)
return score_actions, value, z
| 34.529851 | 86 | 0.625027 |
4a26064f977d8d8a2be39a6137e9775d9a18824f | 1,133 | py | Python | www/www/wsgi.py | mattvenn/cursivedata | 43e43263bef6f01698166d87bcff00b246957277 | [
"CC-BY-4.0"
] | 1 | 2015-12-31T01:46:09.000Z | 2015-12-31T01:46:09.000Z | www/www/wsgi.py | mattvenn/cursivedata | 43e43263bef6f01698166d87bcff00b246957277 | [
"CC-BY-4.0"
] | 5 | 2015-04-06T19:15:15.000Z | 2015-07-25T21:45:16.000Z | www/www/wsgi.py | mattvenn/cursivedata | 43e43263bef6f01698166d87bcff00b246957277 | [
"CC-BY-4.0"
] | null | null | null | """
WSGI config for testsite project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "www.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| 39.068966 | 79 | 0.806708 |
4a260669bb4f6aac0cfacd80645cfe8f299f0e1d | 572 | py | Python | 034-factorialsum.py | johnsonZhaoxin/ProjectEuler | 74f3755d5f0e305897556b46515babe55429a834 | [
"MIT"
] | null | null | null | 034-factorialsum.py | johnsonZhaoxin/ProjectEuler | 74f3755d5f0e305897556b46515babe55429a834 | [
"MIT"
] | null | null | null | 034-factorialsum.py | johnsonZhaoxin/ProjectEuler | 74f3755d5f0e305897556b46515babe55429a834 | [
"MIT"
] | null | null | null | from functools import lru_cache
@lru_cache(maxsize=None)
def factorial(n):
if n == 1 or n == 0:
return 1
else:
return n*factorial(n-1)
def seperate_num(num):
digits = []
[digits.append(int(i)) for i in str(num)]
return digits
def get_digit_factorial(num):
digits = seperate_num(num)
sum = 0
for i in digits:
sum += factorial(i)
return sum
# 主程序
# todo 需要明确到多少位就不可能了 仅有145 40585这两个数字
for i in range(3,int(1e5)):
digit_factorial_sum = get_digit_factorial(i)
if digit_factorial_sum == i:
print(i) | 22 | 48 | 0.63986 |
4a2607283fda685daf05574ba370eefd33a66826 | 459 | py | Python | cli/maestro_cli/settings.py | Farfetch/maestro | 837d93866123aea8b90ad0e2aaf3f13bf3ef6907 | [
"MIT"
] | 21 | 2022-02-02T11:52:14.000Z | 2022-02-06T02:44:16.000Z | cli/maestro_cli/settings.py | Farfetch/maestro | 837d93866123aea8b90ad0e2aaf3f13bf3ef6907 | [
"MIT"
] | 78 | 2022-02-02T12:13:12.000Z | 2022-03-29T08:10:42.000Z | cli/maestro_cli/settings.py | Farfetch/maestro | 837d93866123aea8b90ad0e2aaf3f13bf3ef6907 | [
"MIT"
] | null | null | null | import os
import pathlib
from dotenv import load_dotenv
from maestro_cli.enums import LogLevel
load_dotenv()
def parse_bool(str_value):
return str_value.lower() in ["true", "1"]
APP_NAME = "maestro_cli"
LOG_LEVEL = os.environ.get("LOG_LEVEL", LogLevel.INFO.value)
ROOT_DIRECTORY = pathlib.Path().absolute()
MAESTRO_API_HOST = os.environ.get("MAESTRO_API_HOST", "http://localhost:5000")
MAESTRO_API_TOKEN = os.environ.get("MAESTRO_API_TOKEN", "")
| 20.863636 | 78 | 0.75817 |
4a26079bd9655c1e57ba0b77f14ba00682a7bdc2 | 193 | py | Python | code/backend/src/api/blueprints/__init__.py | tobiasclaas/semantic-data-lake | 16fc25a74918c9ac4d95f14bf3c15af053cee53e | [
"MIT"
] | 1 | 2022-02-23T14:32:38.000Z | 2022-02-23T14:32:38.000Z | code/backend/src/api/blueprints/__init__.py | tobiasclaas/semantic-data-lake | 16fc25a74918c9ac4d95f14bf3c15af053cee53e | [
"MIT"
] | null | null | null | code/backend/src/api/blueprints/__init__.py | tobiasclaas/semantic-data-lake | 16fc25a74918c9ac4d95f14bf3c15af053cee53e | [
"MIT"
] | null | null | null | from .user import USER_BLUEPRINT
from .auth import AUTH_BLUEPRINT
from .datamarts import DATAMARTS_BLUEPRINT
from .workspace import WORKSPACE_BLUEPRINT
from .ontology import ONTOLOGY_BLUEPRINT
| 32.166667 | 42 | 0.870466 |
4a260885931cefdd544dfb83f58a6a8fa6577a51 | 1,605 | py | Python | base_extractors.py | leondz/entity_recognition | f5ef5aafc259139b20c2d54dd54dc1f6f239b605 | [
"Apache-2.0"
] | 63 | 2015-11-11T16:03:21.000Z | 2022-02-23T08:23:59.000Z | base_extractors.py | leondz/entity_recognition | f5ef5aafc259139b20c2d54dd54dc1f6f239b605 | [
"Apache-2.0"
] | 14 | 2015-11-09T19:03:12.000Z | 2015-11-12T08:29:26.000Z | base_extractors.py | leondz/entity-recognition | f5ef5aafc259139b20c2d54dd54dc1f6f239b605 | [
"Apache-2.0"
] | 15 | 2015-11-06T12:35:30.000Z | 2020-06-28T06:47:52.000Z | #!/usr/bin/env python3
# X : list of lists of instances, each instance is a list of feature reprs
# y : list of lists of labels
def word2features(sent, i):
word = sent[i]
features = [
'bias',
'word.lower=' + word.lower(),
'word[-3:]=' + word[-3:],
'word[-2:]=' + word[-2:],
'word.isupper=%s' % word.isupper(),
'word.istitle=%s' % word.istitle(),
'word.isdigit=%s' % word.isdigit(),
]
if i > 0:
word1 = sent[i-1]
features.extend([
'-1word.lower=' + word1.lower(),
'-1word.istitle=%s' % word1.istitle(),
'-1word.isupper=%s' % word1.isupper(),
])
else:
features.append('BOS')
if i < len(sent)-1:
word1 = sent[i+1]
features.extend([
'+1word.lower=' + word1.lower(),
'+1word.istitle=%s' % word1.istitle(),
'+1word.isupper=%s' % word1.isupper(),
])
else:
features.append('EOS')
return features
# takes a list of token/label pairs; returns a list of [feature]/label pairs
def featurise(sentence, brown_cluster = {}):
sentence_repr = []
for i in range(len(sentence)):
features = []
# add brown features
word = sentence[i]
if word in brown_cluster:
for j in range(1,len(brown_cluster[word])+1):
features.append('p' + str(j) + 'b' + brown_cluster[word][0:j])
# add per-word features
features += word2features(sentence, i)
sentence_repr.append(features)
return(sentence_repr)
| 26.75 | 78 | 0.530841 |
4a2608f0806ed0157b74005f4f99da8b0d13c33f | 1,494 | py | Python | acme/agents/jax/sqil/builder_test.py | wookayin/acme | 71b2ab8577a118c103718f034fa62c5ad2c0fd97 | [
"Apache-2.0"
] | null | null | null | acme/agents/jax/sqil/builder_test.py | wookayin/acme | 71b2ab8577a118c103718f034fa62c5ad2c0fd97 | [
"Apache-2.0"
] | null | null | null | acme/agents/jax/sqil/builder_test.py | wookayin/acme | 71b2ab8577a118c103718f034fa62c5ad2c0fd97 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the SQIL iterator."""
from acme import types
from acme.agents.jax.sqil import builder
import numpy as np
import reverb
from absl.testing import absltest
class BuilderTest(absltest.TestCase):
def test_sqil_iterator(self):
demonstrations = [
types.Transition(np.array([[1], [2], [3]]), (), (), (), ())
]
replay = [
reverb.ReplaySample(
info=(),
data=types.Transition(np.array([[4], [5], [6]]), (), (), (), ()))
]
sqil_it = builder._generate_sqil_samples(iter(demonstrations), iter(replay))
np.testing.assert_array_equal(
next(sqil_it).data.observation, np.array([[1], [3], [5]]))
np.testing.assert_array_equal(
next(sqil_it).data.observation, np.array([[2], [4], [6]]))
self.assertRaises(StopIteration, lambda: next(sqil_it))
if __name__ == '__main__':
absltest.main()
| 33.2 | 80 | 0.685408 |
4a2609448c3d77719addc4bbbefed8f1e23e538f | 3,097 | py | Python | config/settings.py | Kimkykie/elite-tracker | 78e5da38e4eaaf0855946aeb8b0fa9cd36d8e2d3 | [
"MIT"
] | null | null | null | config/settings.py | Kimkykie/elite-tracker | 78e5da38e4eaaf0855946aeb8b0fa9cd36d8e2d3 | [
"MIT"
] | null | null | null | config/settings.py | Kimkykie/elite-tracker | 78e5da38e4eaaf0855946aeb8b0fa9cd36d8e2d3 | [
"MIT"
] | null | null | null | """
Django settings for config project.
Generated by 'django-admin startproject' using Django 1.11.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qboflb)a8mkl%yca^d+6#=47@-jw*g73&y(ly2c-wkr-!8k@&h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| 25.595041 | 91 | 0.695512 |
4a2609bcc1187b6e84d10ddc4e5b7da3e337791b | 4,664 | py | Python | feedforward_cuda.py | abhishekbhakat/pyML | 44bc9915e0b42a8cc440d6f9ec3e5d7c4635bb3e | [
"MIT"
] | null | null | null | feedforward_cuda.py | abhishekbhakat/pyML | 44bc9915e0b42a8cc440d6f9ec3e5d7c4635bb3e | [
"MIT"
] | null | null | null | feedforward_cuda.py | abhishekbhakat/pyML | 44bc9915e0b42a8cc440d6f9ec3e5d7c4635bb3e | [
"MIT"
] | null | null | null | import torch
import torchvision
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from torchvision.datasets import MNIST
from torchvision.transforms import ToTensor
from torch.utils.data.dataloader import DataLoader
from torch.utils.data import random_split
class MnistModel(nn.Module):
"""Feedfoward neural network with 1 hidden layer"""
def __init__(self, in_size, hidden_size, out_size):
super().__init__()
# hidden layer
self.linear1 = nn.Linear(in_size, hidden_size)
# output layer
self.linear2 = nn.Linear(hidden_size, out_size)
def forward(self, xb):
# Flatten the image tensors
xb = xb.view(xb.size(0), -1)
# Get intermediate outputs using hidden layer
out = self.linear1(xb)
# Apply activation function
out = F.relu(out)
# Get predictions using output layer
out = self.linear2(out)
return out
def training_step(self, batch):
images, labels = batch
out = self(images) # Generate predictions
loss = F.cross_entropy(out, labels) # Calculate loss
return loss
def validation_step(self, batch):
images, labels = batch
out = self(images) # Generate predictions
loss = F.cross_entropy(out, labels) # Calculate loss
acc = accuracy(out, labels) # Calculate accuracy
return {'val_loss': loss, 'val_acc': acc}
def validation_epoch_end(self, outputs):
batch_losses = [x['val_loss'] for x in outputs]
epoch_loss = torch.stack(batch_losses).mean() # Combine losses
batch_accs = [x['val_acc'] for x in outputs]
epoch_acc = torch.stack(batch_accs).mean() # Combine accuracies
return {'val_loss': epoch_loss.item(), 'val_acc': epoch_acc.item()}
def epoch_end(self, epoch, result):
print("Epoch [{}], val_loss: {:.4f}, val_acc: {:.4f}".format(epoch, result['val_loss'], result['val_acc']))
class DeviceDataLoader():
"""Wrap a dataloader to move data to a device"""
def __init__(self, dl, device):
self.dl = dl
self.device = device
def __iter__(self):
"""Yield a batch of data after moving it to device"""
for b in self.dl:
yield to_device(b, self.device)
def __len__(self):
"""Number of batches"""
return len(self.dl)
def get_default_device():
"""Pick GPU if available, else CPU"""
if torch.cuda.is_available():
return torch.device('cuda')
else:
return torch.device('cpu')
def to_device(data, device):
"""Move tensor(s) to chosen device"""
if isinstance(data, (list,tuple)):
return [to_device(x, device) for x in data]
return data.to(device, non_blocking=True)
def accuracy(outputs, labels):
_, preds = torch.max(outputs, dim=1)
return torch.tensor(torch.sum(preds == labels).item() / len(preds))
def evaluate(model, val_loader):
outputs = [model.validation_step(batch) for batch in val_loader]
return model.validation_epoch_end(outputs)
def fit(epochs, lr, model, train_loader, val_loader, opt_func=torch.optim.SGD):
history = []
optimizer = opt_func(model.parameters(), lr)
for epoch in range(epochs):
# Training Phase
for batch in train_loader:
loss = model.training_step(batch)
loss.backward()
optimizer.step()
optimizer.zero_grad()
# Validation phase
result = evaluate(model, val_loader)
model.epoch_end(epoch, result)
history.append(result)
return history
if __name__ == "__main__":
dataset = MNIST(root='data/', download=True, transform=ToTensor())
val_size = 10000
train_size = len(dataset) - val_size
train_ds, val_ds = random_split(dataset, [train_size, val_size])
len(train_ds), len(val_ds)
batch_size=128
input_size = 784
hidden_size = 32 # you can change this
num_classes = 10
train_loader = DataLoader(train_ds, batch_size, shuffle=True, num_workers=4, pin_memory=True)
val_loader = DataLoader(val_ds, batch_size*2, num_workers=4, pin_memory=True)
device = get_default_device()
train_loader = DeviceDataLoader(train_loader, device)
val_loader = DeviceDataLoader(val_loader, device)
model = MnistModel(input_size, hidden_size=hidden_size, out_size=num_classes)
to_device(model, device)
history = [evaluate(model, val_loader)]
history += fit(5, 0.5, model, train_loader, val_loader)
history += fit(5, 0.1, model, train_loader, val_loader) | 36.4375 | 115 | 0.647727 |
4a2609f7f049eca3c9e481e6a1bf212f6b2164c8 | 142 | py | Python | pkg/__init__.py | bruce30262/idapkg | 5d6af9bd59c5dc886d68335119fae41491f06ea7 | [
"MIT"
] | 125 | 2019-04-04T22:54:53.000Z | 2021-12-15T02:13:12.000Z | pkg/__init__.py | bruce30262/idapkg | 5d6af9bd59c5dc886d68335119fae41491f06ea7 | [
"MIT"
] | 19 | 2019-04-02T15:56:37.000Z | 2022-03-17T09:12:52.000Z | pkg/__init__.py | Jinmo/pm | 5d6af9bd59c5dc886d68335119fae41491f06ea7 | [
"MIT"
] | 14 | 2019-05-29T17:31:08.000Z | 2021-09-26T01:34:42.000Z | __version__ = '0.1.4'
import sys
from .commands import *
# expose 'pkg' in global namespace
__builtins__[__name__] = sys.modules[__name__]
| 15.777778 | 46 | 0.739437 |
4a260a7e799b9c56ad641a198b80501941eae2c2 | 165 | py | Python | matrixscreener/__init__.py | arve0/matrixscreener | 48789576b51e7c35539b6bca9c80c130ea339425 | [
"MIT"
] | 6 | 2015-02-09T08:52:41.000Z | 2021-07-09T02:24:13.000Z | matrixscreener/__init__.py | arve0/matrixscreener | 48789576b51e7c35539b6bca9c80c130ea339425 | [
"MIT"
] | 2 | 2015-02-19T19:03:47.000Z | 2016-05-25T12:32:41.000Z | matrixscreener/__init__.py | arve0/matrixscreener | 48789576b51e7c35539b6bca9c80c130ea339425 | [
"MIT"
] | 4 | 2015-02-09T08:52:49.000Z | 2016-08-04T07:51:19.000Z | """
Interfacing with Leica LAS AF MatrixScreener.
"""
__all__ = [ 'cam', 'experiment', 'utils']
from matrixscreener import cam, experiment, utils
VERSION = '0.6.1'
| 20.625 | 49 | 0.70303 |
4a260aed1dfbee812a569601f1e6689347df9c79 | 125 | py | Python | ex029.1.py | zWillsz/exsvscode | ba507dca6de748e3c82c306731137bb5f6f0c918 | [
"MIT"
] | null | null | null | ex029.1.py | zWillsz/exsvscode | ba507dca6de748e3c82c306731137bb5f6f0c918 | [
"MIT"
] | null | null | null | ex029.1.py | zWillsz/exsvscode | ba507dca6de748e3c82c306731137bb5f6f0c918 | [
"MIT"
] | null | null | null | n = str(input('Digite seu nome todo: ')).split()
print('Seu primeiro nome é {} e seu ultimo nome é {}.'.format(n[0], n[-1]))
| 41.666667 | 75 | 0.616 |
4a260bd6dcb973e165c9e154b8b9e914d44aea31 | 497 | py | Python | tests/tools/test_rules.py | a5kin/xentica | ca08fac9f85af71c9d6d98545a33d50323f851b3 | [
"MIT"
] | 23 | 2018-02-24T23:38:54.000Z | 2022-02-16T15:20:46.000Z | tests/tools/test_rules.py | a5kin/xentica | ca08fac9f85af71c9d6d98545a33d50323f851b3 | [
"MIT"
] | 39 | 2017-11-20T21:54:30.000Z | 2020-09-01T12:43:57.000Z | tests/tools/test_rules.py | a5kin/xentica | ca08fac9f85af71c9d6d98545a33d50323f851b3 | [
"MIT"
] | 4 | 2019-04-23T03:56:44.000Z | 2021-05-14T11:12:54.000Z | """Tests for ``xentica.tools.rules`` module."""
import unittest
from xentica.tools.rules import LifeLike
class TestLifelike(unittest.TestCase):
"""Tests for Lifelike rules helpers."""
def test_golly(self):
"""Test conversion from Golly and vice versa."""
rule_str = "B3/S23"
rule_int = LifeLike.golly2int(rule_str)
self.assertEqual(rule_int, 6152)
rule_converted = LifeLike.int2golly(rule_int)
self.assertEqual(rule_converted, rule_str)
| 29.235294 | 56 | 0.688129 |
4a260c66f84c6098563e847bbd8c75979483888b | 429 | py | Python | confirmation/migrations/0011_alter_confirmation_expiry_date.py | dumpmemory/zulip | 496273ddbc567330a0022699d6d6eb5c646e5da5 | [
"Apache-2.0"
] | 4 | 2021-09-16T16:46:55.000Z | 2022-02-06T13:00:21.000Z | confirmation/migrations/0011_alter_confirmation_expiry_date.py | dumpmemory/zulip | 496273ddbc567330a0022699d6d6eb5c646e5da5 | [
"Apache-2.0"
] | null | null | null | confirmation/migrations/0011_alter_confirmation_expiry_date.py | dumpmemory/zulip | 496273ddbc567330a0022699d6d6eb5c646e5da5 | [
"Apache-2.0"
] | 1 | 2022-02-04T05:15:12.000Z | 2022-02-04T05:15:12.000Z | # Generated by Django 3.2.9 on 2021-11-30 17:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0010_alter_confirmation_expiry_date"),
]
operations = [
migrations.AlterField(
model_name="confirmation",
name="expiry_date",
field=models.DateTimeField(db_index=True, null=True),
),
]
| 22.578947 | 65 | 0.631702 |
4a260c67b4d050d44f02c8c51668e4f5f979459e | 6,454 | py | Python | kubernetes/client/models/v1_network_policy.py | Scalr/kubernetes-client-python | 07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38 | [
"Apache-2.0"
] | 3 | 2019-05-19T05:05:37.000Z | 2020-03-20T04:56:20.000Z | kubernetes/client/models/v1_network_policy.py | Scalr/kubernetes-client-python | 07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/models/v1_network_policy.py | Scalr/kubernetes-client-python | 07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1NetworkPolicy(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1NetworkPolicySpec'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None):
"""
V1NetworkPolicy - a model defined in Swagger
"""
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
@property
def api_version(self):
"""
Gets the api_version of this V1NetworkPolicy.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1NetworkPolicy.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1NetworkPolicy.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1NetworkPolicy.
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""
Gets the kind of this V1NetworkPolicy.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1NetworkPolicy.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1NetworkPolicy.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1NetworkPolicy.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1NetworkPolicy.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:return: The metadata of this V1NetworkPolicy.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1NetworkPolicy.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:param metadata: The metadata of this V1NetworkPolicy.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this V1NetworkPolicy.
Specification of the desired behavior for this NetworkPolicy.
:return: The spec of this V1NetworkPolicy.
:rtype: V1NetworkPolicySpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this V1NetworkPolicy.
Specification of the desired behavior for this NetworkPolicy.
:param spec: The spec of this V1NetworkPolicy.
:type: V1NetworkPolicySpec
"""
self._spec = spec
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1NetworkPolicy):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 30.587678 | 281 | 0.605981 |
4a260e4b9ba93bd40c7fd67a4ee6fc79ae7d65da | 5,621 | py | Python | hypertrainer/htplatform.py | mpelchat04/hypertrainer | 0a6aa3a20c222c0280101ab5a6d72e7b0afddc29 | [
"MIT"
] | 1 | 2019-07-08T13:28:47.000Z | 2019-07-08T13:28:47.000Z | hypertrainer/htplatform.py | mpelchat04/hypertrainer | 0a6aa3a20c222c0280101ab5a6d72e7b0afddc29 | [
"MIT"
] | null | null | null | hypertrainer/htplatform.py | mpelchat04/hypertrainer | 0a6aa3a20c222c0280101ab5a6d72e7b0afddc29 | [
"MIT"
] | 4 | 2019-02-27T15:59:04.000Z | 2019-09-18T19:39:30.000Z | import time
from pathlib import Path
from typing import List, Iterable, Dict
import redis.exceptions
from redis import Redis
from rq import Queue
from rq.job import Job, cancel_job as cancel_rq_job
from hypertrainer.computeplatform import ComputePlatform
from hypertrainer.computeplatformtype import ComputePlatformType
from hypertrainer.htplatform_worker import run, get_jobs_info, get_logs, ping, raise_exception, delete_job, \
cancel_job
from hypertrainer.utils import TaskStatus, get_python_env_command, config_context
ConnectionError = redis.exceptions.ConnectionError
def check_connection(redis_conn):
try:
redis_conn.ping()
except redis.exceptions.ConnectionError as e:
msg = e.args[0]
msg += '\nPlease make sure redis-server is running, and accessible.'
e.args = (msg,) + e.args[1:]
raise e
class HtPlatform(ComputePlatform):
"""The HT (HyperTrainer) Platform allows to send jobs to one or more Linux machines.
Each participating worker consumes jobs from a global queue. There can be several workers per machine.
"""
def __init__(self, same_thread=False):
with config_context() as config:
self.worker_hostnames = config['ht_platform']['worker_hostnames']
redis_conn = Redis(port=config['ht_platform']['redis_port'])
check_connection(redis_conn)
self.redis_conn = redis_conn
self.jobs_queue = Queue(name='jobs', connection=redis_conn, is_async=not same_thread)
self.worker_queues: Dict[str, Queue] = {h: Queue(name=h, connection=redis_conn, is_async=not same_thread)
for h in self.worker_hostnames}
def submit(self, task, resume=False):
output_path = Path(task.output_root) / str(task.uuid)
task.output_path = str(output_path)
python_env_command = get_python_env_command(Path(task.project_path), ComputePlatformType.HT.value)
job = self.jobs_queue.enqueue(run, job_timeout=-1, kwargs=dict(
script_file=Path(task.script_file),
output_path=output_path,
python_env_command=python_env_command,
config_dump=task.dump_config(),
resume=resume))
# At this point, we only know the rq job id. No pid since the job might have to wait.
return job.id
def fetch_logs(self, task, keys=None):
if task.hostname == '': # The job hasn't been consumed yet
return {}
rq_job = self.worker_queues[task.hostname].enqueue(get_logs, args=(task.output_path,), ttl=2, result_ttl=2)
logs = wait_for_result(rq_job)
return logs
def cancel(self, task):
cancel_rq_job(task.job_id, connection=self.redis_conn) # This ensures the job will not start
if task.hostname == '':
print(f'Cannot send cancellation for {task.uuid}: no assigned worker hostname')
else:
self.worker_queues[task.hostname].enqueue(cancel_job, args=(task.job_id,), ttl=4)
def update_tasks(self, tasks):
# TODO only check requested ids
for t in tasks:
assert t.status.is_active
if t.status != TaskStatus.Waiting: # Waiting will not be found until they are picked up
t.status = TaskStatus.Unknown # State is unknown, unless we find the task in a worker db
job_id_to_task = {t.job_id: t for t in tasks}
info_dicts = self._get_info_dict_for_each_worker()
for hostname, local_db in zip(self.worker_hostnames, info_dicts):
if local_db is None:
continue # Did not receive an answer from worker
for job_id in set(local_db.keys()).intersection(job_id_to_task.keys()):
# For each task in the intersection of (tasks to update) and (tasks in worker db)
t = job_id_to_task[job_id]
job_info = local_db[job_id]
t.status = TaskStatus(job_info['status'])
t.hostname = hostname
def delete(self, task):
if task.hostname == '':
print(f'Cannot perform worker deletion for {task.uuid}: no assigned worker hostname')
else:
self.worker_queues[task.hostname].enqueue(delete_job, args=(task.job_id, task.output_path), ttl=4)
def _get_info_dict_for_each_worker(self):
rq_jobs = [q.enqueue(get_jobs_info, ttl=2, result_ttl=2) for q in self.worker_queues.values()]
results = wait_for_results(rq_jobs, raise_exc=False)
return results
def ping_workers(self):
rq_jobs = [q.enqueue(ping, ttl=2, result_ttl=2, args=(h,)) for h, q in self.worker_queues.items()]
results = wait_for_results(rq_jobs)
return results
def raise_exception_in_worker(self, exc_type, queue_name):
self.worker_queues[queue_name].enqueue(raise_exception, ttl=2, result_ttl=2, args=(exc_type,))
def wait_for_result(rq_job: Job, interval_secs=1, tries=4, raise_exc=True):
for i in range(tries):
if rq_job.result is not None:
return rq_job.result
else:
time.sleep(interval_secs)
if raise_exc:
raise TimeoutError
return None
def wait_for_results(rq_jobs: Iterable[Job], interval_secs=1, tries=4, raise_exc=True):
assert tries >= 1
for i in range(tries):
results = [j.result for j in rq_jobs]
if any(r is None for r in results):
time.sleep(interval_secs)
else:
return results
if raise_exc:
raise TimeoutError
# noinspection PyUnboundLocalVariable
return results
| 40.438849 | 115 | 0.666785 |
4a260ebb52a6c33c681b12bffaf71925cc9bedf7 | 1,177 | py | Python | demos/disassembler_special_ins.py | IOsipov/androguard | 13df1b45e93706eb20231626b0cd6bf15390f844 | [
"Apache-2.0"
] | 24 | 2018-10-09T02:08:17.000Z | 2021-08-03T21:39:16.000Z | demos/disassembler_special_ins.py | IOsipov/androguard | 13df1b45e93706eb20231626b0cd6bf15390f844 | [
"Apache-2.0"
] | 3 | 2019-06-21T12:05:39.000Z | 2021-11-09T07:23:37.000Z | demos/disassembler_special_ins.py | IOsipov/androguard | 13df1b45e93706eb20231626b0cd6bf15390f844 | [
"Apache-2.0"
] | 4 | 2018-12-12T21:34:37.000Z | 2021-09-03T19:42:10.000Z | #!/usr/bin/env python
import sys, hashlib
PATH_INSTALL = "./"
sys.path.append(PATH_INSTALL)
from androguard.core.androgen import AndroguardS
from androguard.core.analysis import analysis
TEST = 'examples/android/TestsAndroguard/bin/classes.dex'
a = AndroguardS( TEST )
x = analysis.VMAnalysis( a.get_vm() )
for method in a.get_methods() :
g = x.get_method( method )
if method.get_code() == None :
continue
idx = 0
for i in g.basic_blocks.get() :
for ins in i.get_instructions() :
op_value = ins.get_op_value()
# packed/sparse
if op_value == 0x2b or op_value == 0x2c :
special_ins = i.get_special_ins(idx)
if special_ins != None :
print "\t %x" % idx, ins, special_ins, ins.get_name(), ins.get_output(), special_ins.get_values()
# fill
if op_value == 0x26 :
special_ins = i.get_special_ins(idx)
if special_ins != None :
print "\t %x" % idx, ins, special_ins, ins.get_name(), ins.get_output(), repr( special_ins.get_data() )
idx += ins.get_length() | 30.973684 | 123 | 0.587086 |
4a260fc343794a37486e10aae0aa364ca3f95dd4 | 184 | py | Python | Darlington/phase1/python Basic 1/day 3 solution/qtn4.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 6 | 2020-05-23T19:53:25.000Z | 2021-05-08T20:21:30.000Z | Darlington/phase1/python Basic 1/day 3 solution/qtn4.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 8 | 2020-05-14T18:53:12.000Z | 2020-07-03T00:06:20.000Z | Darlington/phase1/python Basic 1/day 3 solution/qtn4.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 39 | 2020-05-10T20:55:02.000Z | 2020-09-12T17:40:59.000Z | #differences in date of the day
import datetime
#import timedelta
from datetime import date
first = date(2014, 7, 2)
second = date(2014, 7, 11)
diff = second - first
print(diff.days) | 18.4 | 31 | 0.73913 |
4a260ff1ca3f3b1fda1e6b3071781605756d0281 | 12,122 | py | Python | lib/MotorPDE.py | youngmp/NoisyMotors | a2c860e0be3ff298a48685a48b0d29e5c53384b8 | [
"MIT"
] | null | null | null | lib/MotorPDE.py | youngmp/NoisyMotors | a2c860e0be3ff298a48685a48b0d29e5c53384b8 | [
"MIT"
] | 2 | 2021-07-14T18:22:33.000Z | 2021-07-14T18:22:50.000Z | lib/MotorPDE.py | youngmp/NoisyMotors | a2c860e0be3ff298a48685a48b0d29e5c53384b8 | [
"MIT"
] | 1 | 2021-07-28T04:26:38.000Z | 2021-07-28T04:26:38.000Z | """
TODO:
-add ground truth steady-state distribution in phi
-determine correct boudnary condition
Trying to apply upwind/downwind to our problem.
The equation I derived is ...see below
"""
import time
#import matplotlib
#import matplotlib.pyplot as plt
import numpy as np
from scipy.integrate import solve_ivp
#from scipy.interpolate import interp1d
#from cumsumb import cumsum
import lib.libMotorPDE as lib
#matplotlib.use('TkAgg')
class MotorPDE(object):
def __init__(self,**kwargs):
defaults = {'N':100,
'N2':100,
'dt':.0005,
'U':None,
'alpha':14,
'beta':126,
'zeta':1,
'A0':0,
'A':5,
'B':5.1,
'T':10,
'fn_test_option':'root_cos',
'fn_vel':50,
'store_position':False,
'ivp_method':'euler',
'source':True,
'regularized':False,
'testing_ss':False,
'init_pars':None,
'domain':None,
'irregular':False}
# define defaults if not defined in kwargs
for (prop, default) in defaults.items():
setattr(self, prop, kwargs.get(prop, default))
#print('source type',self.source)
assert(self.A <= self.B)
assert(self.A0 <= self.A)
#assert(self.U is not None)
#self.dx = (self.B-self.A0)/self.N
# center an index at z=A and build from there.
if self.irregular:
x_left = np.linspace(self.A0,self.A,self.N)
x_right = np.linspace(self.A,self.B,self.N2)
#print('xleft,xright',x_left,x_right)
self.x = np.append(x_left,x_right[1:])
self.dx = np.diff(self.x)
#self.dx = np.append(self.dx[-1],self.dx[0])
#self.dx = np.append(self.dx,self.dx[-1])
# note that A_idx is chosen just right of z=A
# this is because the mesh is finer on the right and
# easier to manage.
self.A_idx = len(x_left)
self.B_idx = len(self.x)-1
#print(self.x[self.A_idx])
else:
self.x,self.dx = np.linspace(self.A0,self.B,self.N,
endpoint=False,retstep=True)
# index of A
self.A_idx = np.argmin(np.abs(self.x-(self.A)))
# index of position B
self.B_idx = np.argmin(np.abs(self.x-self.B))
self.idx_full = np.arange(len(self.x))
# indices of all points except appropriate boundary
# [0,1,2,3,4,5,6,7] to [0,1,2,3,4,5,6]
self.idx_except_last = self.idx_full[:-1]
# [0,1,2,3,4,5,6,7] to [1,2,3,4,5,6,7]
self.idx_except_first = self.idx_full[1:]
#self.idx_A2B = self.idx_full[self.A_idx:self.B_idx]
# [0,1,2,3,4,5,6,7] to [1,2,3,4,5,6,7]
self.roll_next = np.roll(self.idx_full,-1)[:-1]
# [0,1,2,3,4,5,6,7] to [0,1,2,3,4,5,6]
self.roll_prev = np.roll(self.idx_full,1)[1:]
self.TN = int(self.T/self.dt) # time discretization
# preallocate output array for upwinding scheme here for efficiency.
self.out = np.zeros_like(self.x)
if not self.store_position and self.ivp_method == 'euler':
TN = 1
else:
TN = self.TN
self.t = np.linspace(0,self.T,TN)
self.sol = np.zeros((TN,len(self.x)))
self.U_arr = np.zeros(TN)
if self.regularized:
# regularized delta function
s = (self.A-self.A0)/self.dx
i = np.floor(s) # floor index of A
r = s-i # distance from floor in index
self.delta_idxs = np.mod(np.arange(i-2,i+1+1,1),self.N)+1
self.delta_idxs = self.delta_idxs.astype(int)
q = np.sqrt(1+4*r*(1-r))
self.ws = np.array([(3-2*r-q)/8,
(3-2*r+q)/8,
(1+2*r+q)/8,
(1+2*r-q)/8])
def boundary_left(self,U,sol):
"""
left boundary condition changes depending on sign of U
U < 0: Dirichlet or do nothing
U > 0: Dirichlet +self.alpha*(1-self.theta)/U
"""
if U < 0:
return 0
elif U > 0:
return self.alpha*(1-self.theta_n)/U
def boundary_right(self,U,sol):
"""
Right boundary condition changes depending on sign of U
U < 0: -self.alpha*(1-self.theta_n)/U
U > 0: phi_x = phi_t
"""
if U < 0:
return -self.alpha*(1-self.theta_n)/U
elif U > 0:
return sol
def run(self):
"""
decides on which integration scheme to use
based on user option (self.ivp_method)
"""
# initial condition
if self.init_pars is None:
self.init = np.zeros_like(self.x)
elif self.init_pars['type'] == 'gaussian':
self.init = lib.gauss(self.x-(self.A0+self.B)/2,
sig=self.init_pars['pars'])
self.sol[0,:] = self.init
if self.ivp_method == 'euler':
self.sol = self.run_euler()
else:
obj_integrated = solve_ivp(self.upwind,[0,self.T],
self.init,args=(self.U,),
t_eval=self.t,
method=self.ivp_method)
self.sol = obj_integrated.y.T
def run_euler(self):
#assert (self.CFL < 1), "CFL condition not met for Euler method"
self.i = 0
while self.i < (self.TN-1):
if self.U == 'dynamic':
# generate population distribution from sol
# draw from population distribution
if np.add.reduce(self.sol[self.i,:]) != 0:
xs = lib.inverse_transform_sampling(self,
self.sol[self.i,:],100)
else:
xs = np.zeros(100)
# get total force
f_up = np.add.reduce(lib.force_position(xs))*self.dx
#print(f_up)
f_down = 0
Uval = (-f_up + f_down)/(self.zeta)
# update velocity
#Uval = self.update_velocity(f_up,f_down,Uval)
#print(Uval)
else:
Uval = self.U
k_next, k_now = lib.get_time_index(self.store_position,self.i)
sol_now = self.sol[k_now,:]
self.sol[k_next,:] = sol_now + self.dt*(self.upwind(self.t[k_now],
sol_now,
Uval))
self.i += 1
return self.sol
def upwind(self,t,sol,U):
"""
Implementation of upwinding scheme to be used in Euler loop
method of lines
"""
if False:
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.rcParams['text.usetex'] = False
mpl.rcParams['text.latex.preamble'] = r'\usepackage{amsmath}'
fig = plt.figure(figsize=(4, 5))
ax1 = fig.add_subplot(111)
ax1.set_title('input sol to upwind')
ax1.plot(sol)
plt.show()
plt.close()
time.sleep(2)
if callable(U):
Uval = U(t,vel=self.fn_vel,option=self.fn_test_option)
elif isinstance(U,float) or isinstance(U,int):
Uval = U
if self.ivp_method == 'euler' and self.store_position:
self.U_arr[self.i] = Uval
if Uval > 0:
# boundaries
idx_update = self.idx_except_first
if self.irregular:
dx = self.dx[0]
else:
dx = self.dx
self.out[0] = -self.beta*sol[0]-sol[0]*Uval/dx
else:
# boundaries
idx_update = self.idx_except_last
if self.irregular:
dx = self.dx[-1]
else:
dx = self.dx
self.out[-1] = -self.beta*sol[-1]+sol[-1]*Uval/dx
if Uval <= 0:
U_minus = Uval
U_plus = 0
else:
U_minus = 0
U_plus = Uval
if self.irregular:
dx = self.dx
else:
dx = self.dx
p_plus = (sol[self.roll_next]-sol[self.idx_except_last])/dx
p_minus = (sol[self.idx_except_first]-sol[self.roll_prev])/dx
#if Uval > 0:
# print('p_plus',p_plus[-5:])
#print('p_plus',p_plus[-5:])
# update derivatives
wind = p_plus*U_minus + p_minus*U_plus
#print(self.i,'plus,minus',wind,U_minus,U_plus)
self.out[idx_update] = -self.beta*(sol[idx_update]) - wind
#print(dx,self.dx,self.irregular,self.alpha*(1-self.theta_n)/dx)
#print()
#print(self.out[self.A_idx])
if self.irregular:
self.theta_n = np.add.reduce(sol[:-1]*self.dx)
else:
self.theta_n = np.add.reduce(sol)*self.dx
#assert((self.theta_n <= 1) and (self.theta_n >= 0))
#print('thetan',self.theta_n)
# update input
if (self.source == True or self.source == 'motor')\
and self.regularized == False:
if self.irregular:
dx = self.dx[self.A_idx]
else:
dx = self.dx
#print(dx,self.dx,self.irregular,self.alpha*(1-self.theta_n)/dx)
#print()
#print(self.out[self.A_idx])
self.out[self.A_idx] += self.alpha*(1-self.theta_n)/dx
#print('out@A_idx',self.out[self.A_idx])
#print(self.out[self.A_idx],dx,self.alpha,(1-self.theta_n))
elif (self.source == True or self.source == 'motor')\
and self.regularized == True:
if self.irregular:
dx = self.dx[self.delta_idxs]
else:
dx = self.dx
self.out[self.delta_idxs] += self.ws*self.alpha*(1-self.theta_n)/dx
# Gaussian source
#sig = .3
#k = self.alpha*(1-self.theta_n)/(sig*np.sqrt(2*np.pi))
#out[self.idx_full] += k*np.exp(-0.5*(self.x-self.A)**2/sig**2)
elif callable(self.source) and self.regularized == True:
if self.irregular:
dx = self.dx[self.A_idx]
else:
dx = self.dx
self.out[self.delta_idxs] += self.ws*self.source(t)/dx
elif callable(self.source) and self.regularized == False:
if self.irregular:
dx = self.dx[self.A_idx]
else:
dx = self.dx
self.out[self.A_idx] += self.source(t)/dx
elif self.source == False:
pass
else:
raise ValueError('Unrecognized source option',self.source,
self.regularized)
#elif self.source == 'regularized_custom':
# #print(self.source(t))
# out[self.delta_idxs] += self.ws*self.source(t)/self.dx
#if Uval > 0:
# print('out',self.out[-5:])
return self.out
def main():
pass
if __name__ == "__main__":
main()
| 29.857143 | 79 | 0.46824 |
4a26114321a7344825a88a254bdb94f121bdf846 | 6,200 | bzl | Python | bazel/closure_grpc_web_library.bzl | zhouqiangWang/grpc-web | a470a75000c7b0789adc38028ff8d2cf491405eb | [
"Apache-2.0"
] | null | null | null | bazel/closure_grpc_web_library.bzl | zhouqiangWang/grpc-web | a470a75000c7b0789adc38028ff8d2cf491405eb | [
"Apache-2.0"
] | null | null | null | bazel/closure_grpc_web_library.bzl | zhouqiangWang/grpc-web | a470a75000c7b0789adc38028ff8d2cf491405eb | [
"Apache-2.0"
] | null | null | null | # This rule was inspired by rules_closure`s implementation of
# |closure_proto_library|, licensed under Apache 2.
# https://github.com/bazelbuild/rules_closure/blob/3555e5ba61fdcc17157dd833eaf7d19b313b1bca/closure/protobuf/closure_proto_library.bzl
load(
"@io_bazel_rules_closure//closure/compiler:closure_js_library.bzl",
"create_closure_js_library",
)
load(
"@io_bazel_rules_closure//closure/private:defs.bzl",
"CLOSURE_JS_TOOLCHAIN_ATTRS",
"unfurl",
)
load(
"@io_bazel_rules_closure//closure/protobuf:closure_proto_library.bzl",
"closure_proto_aspect",
)
# This was borrowed from Rules Go, licensed under Apache 2.
# https://github.com/bazelbuild/rules_go/blob/67f44035d84a352cffb9465159e199066ecb814c/proto/compiler.bzl#L72
def _proto_path(proto):
path = proto.path
root = proto.root.path
ws = proto.owner.workspace_root
if path.startswith(root):
path = path[len(root):]
if path.startswith("/"):
path = path[1:]
if path.startswith(ws):
path = path[len(ws):]
if path.startswith("/"):
path = path[1:]
return path
def _proto_include_path(proto):
path = proto.path[:-len(_proto_path(proto))]
if not path:
return "."
if path.endswith("/"):
path = path[:-1]
return path
def _proto_include_paths(protos):
return depset([_proto_include_path(proto) for proto in protos])
def _generate_closure_grpc_web_src_progress_message(name):
# TODO(yannic): Add a better message?
return "Generating GRPC Web %s" % name
def _generate_closure_grpc_web_srcs(
actions,
protoc,
protoc_gen_grpc_web,
import_style,
mode,
sources,
transitive_sources):
all_sources = [src for src in sources] + [src for src in transitive_sources]
proto_include_paths = [
"-I%s" % p
for p in _proto_include_paths(
[f for f in all_sources],
)
]
grpc_web_out_common_options = ",".join([
"import_style={}".format(import_style),
"mode={}".format(mode),
])
files = []
for src in sources:
name = "{}.grpc.js".format(
".".join(src.path.split("/")[-1].split(".")[:-1]),
)
js = actions.declare_file(name)
files.append(js)
args = proto_include_paths + [
"--plugin=protoc-gen-grpc-web={}".format(protoc_gen_grpc_web.path),
"--grpc-web_out={options},out={out_file}:{path}".format(
options = grpc_web_out_common_options,
out_file = name,
path = js.path[:js.path.rfind("/")],
),
src.path,
]
actions.run(
tools = [protoc_gen_grpc_web],
inputs = all_sources,
outputs = [js],
executable = protoc,
arguments = args,
progress_message =
_generate_closure_grpc_web_src_progress_message(name),
)
return files
_error_multiple_deps = "".join([
"'deps' attribute must contain exactly one label ",
"(we didn't name it 'dep' for consistency). ",
"We may revisit this restriction later.",
])
def _closure_grpc_web_library_impl(ctx):
if len(ctx.attr.deps) > 1:
# TODO(yannic): Revisit this restriction.
fail(_error_multiple_deps, "deps")
dep = ctx.attr.deps[0]
srcs = _generate_closure_grpc_web_srcs(
actions = ctx.actions,
protoc = ctx.executable._protoc,
protoc_gen_grpc_web = ctx.executable._protoc_gen_grpc_web,
import_style = ctx.attr.import_style,
mode = ctx.attr.mode,
sources = dep[ProtoInfo].direct_sources,
transitive_sources = dep[ProtoInfo].transitive_imports,
)
deps = unfurl(ctx.attr.deps, provider = "closure_js_library")
deps += [
ctx.attr._grpc_web_abstractclientbase,
ctx.attr._grpc_web_clientreadablestream,
ctx.attr._grpc_web_error,
ctx.attr._grpc_web_grpcwebclientbase,
]
suppress = [
"misplacedTypeAnnotation",
"unusedPrivateMembers",
"reportUnknownTypes",
"strictDependencies",
"extraRequire",
]
library = create_closure_js_library(
ctx = ctx,
srcs = srcs,
deps = deps,
suppress = suppress,
lenient = False,
)
return struct(
exports = library.exports,
closure_js_library = library.closure_js_library,
# The usual suspects are exported as runfiles, in addition to raw source.
runfiles = ctx.runfiles(files = srcs),
)
closure_grpc_web_library = rule(
implementation = _closure_grpc_web_library_impl,
attrs = dict({
"deps": attr.label_list(
mandatory = True,
providers = [ProtoInfo, "closure_js_library"],
# The files generated by this aspect are required dependencies.
aspects = [closure_proto_aspect],
),
"import_style": attr.string(
default = "closure",
values = ["closure"],
),
"mode": attr.string(
default = "grpcwebtext",
values = ["grpcwebtext", "grpcweb"],
),
# internal only
# TODO(yannic): Convert to toolchain.
"_protoc": attr.label(
default = Label("@com_google_protobuf//:protoc"),
executable = True,
cfg = "host",
),
"_protoc_gen_grpc_web": attr.label(
default = Label("//javascript/net/grpc/web:protoc-gen-grpc-web"),
executable = True,
cfg = "host",
),
"_grpc_web_abstractclientbase": attr.label(
default = Label("//javascript/net/grpc/web:abstractclientbase"),
),
"_grpc_web_clientreadablestream": attr.label(
default = Label("//javascript/net/grpc/web:clientreadablestream"),
),
"_grpc_web_error": attr.label(
default = Label("//javascript/net/grpc/web:error"),
),
"_grpc_web_grpcwebclientbase": attr.label(
default = Label("//javascript/net/grpc/web:grpcwebclientbase"),
),
}, **CLOSURE_JS_TOOLCHAIN_ATTRS),
)
| 31.313131 | 134 | 0.610484 |
4a2611610d569dba39326a12ccb6ec967cc90111 | 6,291 | py | Python | tagger-ds-temp/tagger/message_retriever.py | filchyboy/TaggerMail | 4166a49842817ad999a8fbb6262de7cb7e765a5a | [
"MIT"
] | null | null | null | tagger-ds-temp/tagger/message_retriever.py | filchyboy/TaggerMail | 4166a49842817ad999a8fbb6262de7cb7e765a5a | [
"MIT"
] | 6 | 2021-01-28T21:23:23.000Z | 2022-01-22T13:06:54.000Z | tagger-ds-temp/tagger/message_retriever.py | filchyboy/TaggerMail | 4166a49842817ad999a8fbb6262de7cb7e765a5a | [
"MIT"
] | null | null | null | # Module imports / focused on Google API
import base64
from bs4 import BeautifulSoup
import json
import re
import pandas as pd
from collections import Counter
from googleapiclient.discovery import build
from gensim.corpora import Dictionary
from gensim.models.ldamulticore import LdaMulticore
import spacy
import nltk
from spacy.lang.en.stop_words import STOP_WORDS
from nltk.corpus import stopwords
from nltk.stem.wordnet import WordNetLemmatizer
# If modifying these scopes user will be forced to re-auth
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
previous_email_pull = '172317ca92683d33'
def user_emails(creds):
""" Pulls user emails """
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
emails = service.users().messages().list(userId='me').execute()
return emails
def recent_id(emails):
# Find most recent message
recent_msg_id = emails['messages'][0]['id']
return recent_msg_id
def message(recent_msg_id):
""" Pulls and cleans the most recent email """
# Call message content
message_content = service.users().messages().get(
userId='me', id=recent_msg_id).execute()
# Call message body
message_payload = message_content['payload']
if "parts" in message_payload:
message_body = message_content['payload']['parts']
message_body_dict = dict(message_body[0])
else:
message_body = message_content['payload']['body']['data']
message_body_dict = dict(message_body[0])
# Decode base64 encoding
decode = message_body_dict['body']['data']
decodedContents = base64.urlsafe_b64decode(decode.encode('utf-8'))
# Clean the text
text = BeautifulSoup(decodedContents, 'html.parser')
text = str(text.text)
text = re.sub(r"http\S+", "", text)
text = [text]
# Shape text
df = pd.DataFrame(text, columns=['email_body'])
# Remove whitespace
df['email_body'] = df['email_body'].str.strip().str.lower()
# Start with date
df['email_body'].str.match('\d?\d/\d?\d/\d{4}').all()
# Replace all non-overlapping matches
df['email_body'] = df['email_body'].str.replace(
'[^a-zA-Z\s]', '').str.replace('\s+', ' ')
# Prepare df to return
email_body = df['email_body']
return(email_body)
def tag_recent(email_body):
""" Generates tags on the most recent email """
# Load spacy model
nlp = spacy.load('en_core_web_md')
my_stop_words = ['i', 'me', 'my', 'myself', 'we', 'our', 'ours',
'ourselves', 'you', "you're", "you've", "you'll",
"you'd", 'your', 'yours', 'yourself', 'yourselves',
'he', 'him', 'his', 'himself', 'she', "she's",
'her', 'hers', 'herself', 'it', "it's", 'its',
'itself', 'they', 'them', 'their', 'theirs',
'themselves', 'what', 'which', 'who', 'whom', 'this',
'that', "that'll", 'these', 'those', 'am', 'is',
'are', 'was', 'were', 'be', 'been', 'being', 'have',
'has', 'had', 'having', 'do', 'does', 'did', 'doing',
'a', 'an', 'the', 'and', 'but', 'if', 'or', 'because',
'as', 'until', 'while', 'of', 'at', 'by', 'for',
'with', 'about', 'against', 'between', 'into', 'through',
'during', 'before', 'after', 'above', 'below', 'to',
'from', 'up', 'down', 'in', 'out', 'on', 'off', 'over',
'under', 'again', 'further', 'then', 'once', 'here',
'there', 'when', 'where', 'why', 'how', 'all', 'any',
'both', 'each', 'few', 'more', 'most', 'other', 'some',
'such', 'no', 'nor', 'not', 'only', 'own', 'same', 'so',
'than', 'too', 'very', 's', 't', 'can', 'will', 'just',
'don', "don't", 'should', "should've", 'now', 'd', 'll',
'm', 'o', 're', 've', 'y', 'ain', 'aren', "aren't",
'couldn', "couldn't", 'didn', "didn't", 'doesn',
"doesn't", 'hadn', "hadn't", 'hasn', "hasn't", 'haven',
"haven't", 'isn', "isn't", 'ma', 'mightn', "mightn't",
'mustn', "mustn't", 'needn', "needn't", 'shan', "shan't",
'shouldn', "shouldn't", 'wasn', "wasn't", 'weren',
"weren't", 'won', "won't", 'wouldn', "wouldn't",
"unsubscribe", "st"]
STOP_WORDS = nlp.Defaults.stop_words.union(my_stop_words)
# Generate NLP model
tokens = []
for doc in nlp.pipe(email_body, batch_size=500):
doc_tokens = []
for token in doc:
if (token.is_stop is False) & (token.is_punct is False):
doc_tokens.append(token.lemma_.lower())
tokens.append(doc_tokens)
df['tokens'] = tokens
# Topic Modelling
id2word = Dictionary(df['tokens'])
corpus = [id2word.doc2bow(d) for d in df['tokens']]
model = LdaMulticore(corpus=corpus,
id2word=id2word,
random_state=42,
num_topics=10,
passes=8,
workers=(-2))
# Generate topics from model
words = [re.findall(r'"([^"]*)"', t[1]) for t in model.print_topics()]
# Count instances of each word seeking weight of dupes
wordcount = Counter(words[0] + words[1] + words[2] + words[3] + words[4])
# Generate dataframe of results,
# drop those found less than twice,
# sort descending
tags = pd.DataFrame.from_dict(wordcount,
orient='index',
columns=['number'])
tags.drop(tags[tags.number <= 1].index, inplace=True)
tags.sort_values(by=['number'], ascending=False, inplace=True)
tags['length'] = range(len(tags))
tags = tags.T
tags_list = tags.columns
# Return sorted smart tag list
return(tags_list)
# # Recent messages list
# message_tally = []
# for _ in range(len(message['messages'])):
# message_tally.append(message['messages'][_]['id'])
# if message['messages'][_]['id'] == previous_email_pull:
# break
| 37.005882 | 78 | 0.545382 |
4a26119341e4a8c3c36794f6c8993b9ae0af5fb8 | 21,112 | py | Python | stable_baselines/logger.py | yfletberliac/Actor-with-Variance-Estimated-Critic | 294ef39d05f31067907bb6ff9405183d4a530244 | [
"MIT"
] | 4 | 2020-10-04T02:15:47.000Z | 2020-10-13T15:24:22.000Z | stable_baselines/logger.py | yfletberliac/actor-with-variance-estimated-critic | 294ef39d05f31067907bb6ff9405183d4a530244 | [
"MIT"
] | null | null | null | stable_baselines/logger.py | yfletberliac/actor-with-variance-estimated-critic | 294ef39d05f31067907bb6ff9405183d4a530244 | [
"MIT"
] | 1 | 2021-05-06T08:55:24.000Z | 2021-05-06T08:55:24.000Z | import os
import sys
import shutil
import json
import time
import datetime
import tempfile
import warnings
from collections import defaultdict
from typing import Optional
import tensorflow as tf
from tensorflow.python import pywrap_tensorflow
from tensorflow.core.util import event_pb2
from tensorflow.python.util import compat
from stable_baselines.common.misc_util import mpi_rank_or_zero
DEBUG = 10
INFO = 20
WARN = 30
ERROR = 40
DISABLED = 50
class KVWriter(object):
"""
Key Value writer
"""
def writekvs(self, kvs):
"""
write a dictionary to file
:param kvs: (dict)
"""
raise NotImplementedError
class SeqWriter(object):
"""
sequence writer
"""
def writeseq(self, seq):
"""
write an array to file
:param seq: (list)
"""
raise NotImplementedError
class HumanOutputFormat(KVWriter, SeqWriter):
def __init__(self, filename_or_file):
"""
log to a file, in a human readable format
:param filename_or_file: (str or File) the file to write the log to
"""
if isinstance(filename_or_file, str):
self.file = open(filename_or_file, 'wt')
self.own_file = True
else:
assert hasattr(filename_or_file, 'write'), 'Expected file or str, got {}'.format(filename_or_file)
self.file = filename_or_file
self.own_file = False
def writekvs(self, kvs):
# Create strings for printing
key2str = {}
for (key, val) in sorted(kvs.items()):
if isinstance(val, float):
valstr = '%-8.3g' % (val,)
else:
valstr = str(val)
key2str[self._truncate(key)] = self._truncate(valstr)
# Find max widths
if len(key2str) == 0:
warnings.warn('Tried to write empty key-value dict')
return
else:
keywidth = max(map(len, key2str.keys()))
valwidth = max(map(len, key2str.values()))
# Write out the data
dashes = '-' * (keywidth + valwidth + 7)
lines = [dashes]
for (key, val) in sorted(key2str.items()):
lines.append('| %s%s | %s%s |' % (
key,
' ' * (keywidth - len(key)),
val,
' ' * (valwidth - len(val)),
))
lines.append(dashes)
self.file.write('\n'.join(lines) + '\n')
# Flush the output to the file
self.file.flush()
@classmethod
def _truncate(cls, string):
return string[:20] + '...' if len(string) > 23 else string
def writeseq(self, seq):
seq = list(seq)
for (i, elem) in enumerate(seq):
self.file.write(elem)
if i < len(seq) - 1: # add space unless this is the last one
self.file.write(' ')
self.file.write('\n')
self.file.flush()
def close(self):
"""
closes the file
"""
if self.own_file:
self.file.close()
class JSONOutputFormat(KVWriter):
def __init__(self, filename):
"""
log to a file, in the JSON format
:param filename: (str) the file to write the log to
"""
self.file = open(filename, 'wt')
def writekvs(self, kvs):
for key, value in sorted(kvs.items()):
if hasattr(value, 'dtype'):
if value.shape == () or len(value) == 1:
# if value is a dimensionless numpy array or of length 1, serialize as a float
kvs[key] = float(value)
else:
# otherwise, a value is a numpy array, serialize as a list or nested lists
kvs[key] = value.tolist()
self.file.write(json.dumps(kvs) + '\n')
self.file.flush()
def close(self):
"""
closes the file
"""
self.file.close()
class CSVOutputFormat(KVWriter):
def __init__(self, filename):
"""
log to a file, in a CSV format
:param filename: (str) the file to write the log to
"""
self.file = open(filename, 'w+t')
self.keys = []
self.sep = ','
def writekvs(self, kvs):
# Add our current row to the history
extra_keys = kvs.keys() - self.keys
if extra_keys:
self.keys.extend(extra_keys)
self.file.seek(0)
lines = self.file.readlines()
self.file.seek(0)
for (i, key) in enumerate(self.keys):
if i > 0:
self.file.write(',')
self.file.write(key)
self.file.write('\n')
for line in lines[1:]:
self.file.write(line[:-1])
self.file.write(self.sep * len(extra_keys))
self.file.write('\n')
for i, key in enumerate(self.keys):
if i > 0:
self.file.write(',')
value = kvs.get(key)
if value is not None:
self.file.write(str(value))
self.file.write('\n')
self.file.flush()
def close(self):
"""
closes the file
"""
self.file.close()
def summary_val(key, value):
"""
:param key: (str)
:param value: (float)
"""
kwargs = {'tag': key, 'simple_value': float(value)}
return tf.compat.v1.Summary.Value(**kwargs)
def valid_float_value(value):
"""
Returns True if the value can be successfully cast into a float
:param value: (Any) the value to check
:return: (bool)
"""
try:
float(value)
return True
except TypeError:
return False
class TensorBoardOutputFormat(KVWriter):
def __init__(self, folder):
"""
Dumps key/value pairs into TensorBoard's numeric format.
:param folder: (str) the folder to write the log to
"""
os.makedirs(folder, exist_ok=True)
self.dir = folder
self.step = 1
prefix = 'events'
path = os.path.join(os.path.abspath(folder), prefix)
self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path)) # type: pywrap_tensorflow.EventsWriter
def writekvs(self, kvs):
summary = tf.compat.v1.Summary(value=[summary_val(k, v) for k, v in kvs.items() if valid_float_value(v)])
event = event_pb2.Event(wall_time=time.time(), summary=summary)
event.step = self.step # is there any reason why you'd want to specify the step?
self.writer.WriteEvent(event)
self.writer.Flush()
self.step += 1
def close(self):
"""
closes the file
"""
if self.writer:
self.writer.Close()
self.writer = None
def make_output_format(_format, ev_dir, log_suffix=''):
"""
return a logger for the requested format
:param _format: (str) the requested format to log to ('stdout', 'log', 'json', 'csv' or 'tensorboard')
:param ev_dir: (str) the logging directory
:param log_suffix: (str) the suffix for the log file
:return: (KVWrite) the logger
"""
os.makedirs(ev_dir, exist_ok=True)
if _format == 'stdout':
return HumanOutputFormat(sys.stdout)
elif _format == 'log':
return HumanOutputFormat(os.path.join(ev_dir, 'log%s.txt' % log_suffix))
elif _format == 'json':
return JSONOutputFormat(os.path.join(ev_dir, 'progress%s.json' % log_suffix))
elif _format == 'csv':
return CSVOutputFormat(os.path.join(ev_dir, 'progress%s.csv' % log_suffix))
elif _format == 'tensorboard':
return TensorBoardOutputFormat(os.path.join(ev_dir, 'tb%s' % log_suffix))
else:
raise ValueError('Unknown format specified: %s' % (_format,))
# ================================================================
# API
# ================================================================
def logkv(key, val):
"""
Log a value of some diagnostic
Call this once for each diagnostic quantity, each iteration
If called many times, last value will be used.
:param key: (Any) save to log this key
:param val: (Any) save to log this value
"""
Logger.CURRENT.logkv(key, val)
def logkv_mean(key, val):
"""
The same as logkv(), but if called many times, values averaged.
:param key: (Any) save to log this key
:param val: (Number) save to log this value
"""
Logger.CURRENT.logkv_mean(key, val)
def logkvs(key_values):
"""
Log a dictionary of key-value pairs
:param key_values: (dict) the list of keys and values to save to log
"""
for key, value in key_values.items():
logkv(key, value)
def dumpkvs():
"""
Write all of the diagnostics from the current iteration
"""
Logger.CURRENT.dumpkvs()
def getkvs():
"""
get the key values logs
:return: (dict) the logged values
"""
return Logger.CURRENT.name2val
def log(*args, level=INFO):
"""
Write the sequence of args, with no separators,
to the console and output files (if you've configured an output file).
level: int. (see logger.py docs) If the global logger level is higher than
the level argument here, don't print to stdout.
:param args: (list) log the arguments
:param level: (int) the logging level (can be DEBUG=10, INFO=20, WARN=30, ERROR=40, DISABLED=50)
"""
Logger.CURRENT.log(*args, level=level)
def debug(*args):
"""
Write the sequence of args, with no separators,
to the console and output files (if you've configured an output file).
Using the DEBUG level.
:param args: (list) log the arguments
"""
log(*args, level=DEBUG)
def info(*args):
"""
Write the sequence of args, with no separators,
to the console and output files (if you've configured an output file).
Using the INFO level.
:param args: (list) log the arguments
"""
log(*args, level=INFO)
def warn(*args):
"""
Write the sequence of args, with no separators,
to the console and output files (if you've configured an output file).
Using the WARN level.
:param args: (list) log the arguments
"""
log(*args, level=WARN)
def error(*args):
"""
Write the sequence of args, with no separators,
to the console and output files (if you've configured an output file).
Using the ERROR level.
:param args: (list) log the arguments
"""
log(*args, level=ERROR)
def set_level(level):
"""
Set logging threshold on current logger.
:param level: (int) the logging level (can be DEBUG=10, INFO=20, WARN=30, ERROR=40, DISABLED=50)
"""
Logger.CURRENT.set_level(level)
def get_level():
"""
Get logging threshold on current logger.
:return: (int) the logging level (can be DEBUG=10, INFO=20, WARN=30, ERROR=40, DISABLED=50)
"""
return Logger.CURRENT.level
def get_dir():
"""
Get directory that log files are being written to.
will be None if there is no output directory (i.e., if you didn't call start)
:return: (str) the logging directory
"""
return Logger.CURRENT.get_dir()
record_tabular = logkv
dump_tabular = dumpkvs
class ProfileKV:
def __init__(self, name):
"""
Usage:
with logger.ProfileKV("interesting_scope"):
code
:param name: (str) the profiling name
"""
self.name = "wait_" + name
def __enter__(self):
self.start_time = time.time()
def __exit__(self, _type, value, traceback):
Logger.CURRENT.name2val[self.name] += time.time() - self.start_time
def profile(name):
"""
Usage:
@profile("my_func")
def my_func(): code
:param name: (str) the profiling name
:return: (function) the wrapped function
"""
def decorator_with_name(func):
def func_wrapper(*args, **kwargs):
with ProfileKV(name):
return func(*args, **kwargs)
return func_wrapper
return decorator_with_name
# ================================================================
# Backend
# ================================================================
class Logger(object):
# A logger with no output files. (See right below class definition)
# So that you can still log to the terminal without setting up any output files
DEFAULT = None # type: Optional["Logger"]
# Current logger being used by the free functions above
CURRENT = None # type: Optional["Logger"]
def __init__(self, folder, output_formats):
"""
the logger class
:param folder: (str) the logging location
:param output_formats: ([str]) the list of output format
"""
self.name2val = defaultdict(float) # values this iteration
self.name2cnt = defaultdict(int)
self.level = INFO
self.dir = folder
self.output_formats = output_formats
# Logging API, forwarded
# ----------------------------------------
def logkv(self, key, val):
"""
Log a value of some diagnostic
Call this once for each diagnostic quantity, each iteration
If called many times, last value will be used.
:param key: (Any) save to log this key
:param val: (Any) save to log this value
"""
self.name2val[key] = val
def logkv_mean(self, key, val):
"""
The same as logkv(), but if called many times, values averaged.
:param key: (Any) save to log this key
:param val: (Number) save to log this value
"""
if val is None:
self.name2val[key] = None
return
oldval, cnt = self.name2val[key], self.name2cnt[key]
self.name2val[key] = oldval * cnt / (cnt + 1) + val / (cnt + 1)
self.name2cnt[key] = cnt + 1
def dumpkvs(self):
"""
Write all of the diagnostics from the current iteration
"""
if self.level == DISABLED:
return
for fmt in self.output_formats:
if isinstance(fmt, KVWriter):
fmt.writekvs(self.name2val)
self.name2val.clear()
self.name2cnt.clear()
def log(self, *args, level=INFO):
"""
Write the sequence of args, with no separators,
to the console and output files (if you've configured an output file).
level: int. (see logger.py docs) If the global logger level is higher than
the level argument here, don't print to stdout.
:param args: (list) log the arguments
:param level: (int) the logging level (can be DEBUG=10, INFO=20, WARN=30, ERROR=40, DISABLED=50)
"""
if self.level <= level:
self._do_log(args)
# Configuration
# ----------------------------------------
def set_level(self, level):
"""
Set logging threshold on current logger.
:param level: (int) the logging level (can be DEBUG=10, INFO=20, WARN=30, ERROR=40, DISABLED=50)
"""
self.level = level
def get_dir(self):
"""
Get directory that log files are being written to.
will be None if there is no output directory (i.e., if you didn't call start)
:return: (str) the logging directory
"""
return self.dir
def close(self):
"""
closes the file
"""
for fmt in self.output_formats:
fmt.close()
# Misc
# ----------------------------------------
def _do_log(self, args):
"""
log to the requested format outputs
:param args: (list) the arguments to log
"""
for fmt in self.output_formats:
if isinstance(fmt, SeqWriter):
fmt.writeseq(map(str, args))
Logger.DEFAULT = Logger.CURRENT = Logger(folder=None, output_formats=[HumanOutputFormat(sys.stdout)])
def configure(folder=None, format_strs=None):
"""
configure the current logger
:param folder: (str) the save location (if None, $OPENAI_LOGDIR, if still None, tempdir/openai-[date & time])
:param format_strs: (list) the output logging format
(if None, $OPENAI_LOG_FORMAT, if still None, ['stdout', 'log', 'csv'])
"""
if folder is None:
folder = os.getenv('OPENAI_LOGDIR')
if folder is None:
folder = os.path.join(tempfile.gettempdir(), datetime.datetime.now().strftime("openai-%Y-%m-%d-%H-%M-%S-%f"))
assert isinstance(folder, str)
os.makedirs(folder, exist_ok=True)
rank = mpi_rank_or_zero()
log_suffix = ''
if format_strs is None:
if rank == 0:
format_strs = os.getenv('OPENAI_LOG_FORMAT', 'stdout,log,csv').split(',')
else:
log_suffix = "-rank%03i" % rank
format_strs = os.getenv('OPENAI_LOG_FORMAT_MPI', 'log').split(',')
format_strs = filter(None, format_strs)
output_formats = [make_output_format(f, folder, log_suffix) for f in format_strs]
Logger.CURRENT = Logger(folder=folder, output_formats=output_formats)
log('Logging to %s' % folder)
def reset():
"""
reset the current logger
"""
if Logger.CURRENT is not Logger.DEFAULT:
Logger.CURRENT.close()
Logger.CURRENT = Logger.DEFAULT
log('Reset logger')
class ScopedConfigure(object):
def __init__(self, folder=None, format_strs=None):
"""
Class for using context manager while logging
usage:
with ScopedConfigure(folder=None, format_strs=None):
{code}
:param folder: (str) the logging folder
:param format_strs: ([str]) the list of output logging format
"""
self.dir = folder
self.format_strs = format_strs
self.prevlogger = None
def __enter__(self):
self.prevlogger = Logger.CURRENT
configure(folder=self.dir, format_strs=self.format_strs)
def __exit__(self, *args):
Logger.CURRENT.close()
Logger.CURRENT = self.prevlogger
# ================================================================
def _demo():
"""
tests for the logger module
"""
info("hi")
debug("shouldn't appear")
set_level(DEBUG)
debug("should appear")
folder = "/tmp/testlogging"
if os.path.exists(folder):
shutil.rmtree(folder)
configure(folder=folder)
logkv("a", 3)
logkv("b", 2.5)
dumpkvs()
logkv("b", -2.5)
logkv("a", 5.5)
dumpkvs()
info("^^^ should see a = 5.5")
logkv_mean("b", -22.5)
logkv_mean("b", -44.4)
logkv("a", 5.5)
dumpkvs()
with ScopedConfigure(None, None):
info("^^^ should see b = 33.3")
with ScopedConfigure("/tmp/test-logger/", ["json"]):
logkv("b", -2.5)
dumpkvs()
reset()
logkv("a", "longasslongasslongasslongasslongasslongassvalue")
dumpkvs()
warn("hey")
error("oh")
logkvs({"test": 1})
# ================================================================
# Readers
# ================================================================
def read_json(fname):
"""
read a json file using pandas
:param fname: (str) the file path to read
:return: (pandas DataFrame) the data in the json
"""
import pandas
data = []
with open(fname, 'rt') as file_handler:
for line in file_handler:
data.append(json.loads(line))
return pandas.DataFrame(data)
def read_csv(fname):
"""
read a csv file using pandas
:param fname: (str) the file path to read
:return: (pandas DataFrame) the data in the csv
"""
import pandas
return pandas.read_csv(fname, index_col=None, comment='#')
def read_tb(path):
"""
read a tensorboard output
:param path: (str) a tensorboard file OR a directory, where we will find all TB files of the form events.
:return: (pandas DataFrame) the tensorboad data
"""
import pandas
import numpy as np
from glob import glob
# from collections import defaultdict
import tensorflow as tf
if os.path.isdir(path):
fnames = glob(os.path.join(path, "events.*"))
elif os.path.basename(path).startswith("events."):
fnames = [path]
else:
raise NotImplementedError("Expected tensorboard file or directory containing them. Got %s" % path)
tag2pairs = defaultdict(list)
maxstep = 0
for fname in fnames:
for summary in tf.compat.v1.train.summary_iterator(fname):
if summary.step > 0:
for value in summary.summary.value:
pair = (summary.step, value.simple_value)
tag2pairs[value.tag].append(pair)
maxstep = max(summary.step, maxstep)
data = np.empty((maxstep, len(tag2pairs)))
data[:] = np.nan
tags = sorted(tag2pairs.keys())
for (colidx, tag) in enumerate(tags):
pairs = tag2pairs[tag]
for (step, value) in pairs:
data[step - 1, colidx] = value
return pandas.DataFrame(data, columns=tags)
if __name__ == "__main__":
_demo()
| 28.376344 | 117 | 0.575881 |
4a26119c2990d982aee640d62ea718e71873636b | 11,486 | py | Python | airflow/secrets/local_filesystem.py | shashijangra/airflow-1 | c3e340584bf1892c4f73aa9e7495b5823dab0c40 | [
"Apache-2.0"
] | 1 | 2020-09-15T02:32:55.000Z | 2020-09-15T02:32:55.000Z | airflow/secrets/local_filesystem.py | shashijangra/airflow-1 | c3e340584bf1892c4f73aa9e7495b5823dab0c40 | [
"Apache-2.0"
] | 14 | 2019-12-03T02:54:42.000Z | 2020-02-27T16:08:10.000Z | airflow/secrets/local_filesystem.py | shashijangra/airflow-1 | c3e340584bf1892c4f73aa9e7495b5823dab0c40 | [
"Apache-2.0"
] | 1 | 2020-11-04T03:10:24.000Z | 2020-11-04T03:10:24.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Objects relating to retrieving connections and variables from local file
"""
import json
import logging
import os
import warnings
from collections import defaultdict
from inspect import signature
from json import JSONDecodeError
from typing import Any, Dict, List, Optional, Set, Tuple
import yaml
from airflow.exceptions import (
AirflowException, AirflowFileParseException, ConnectionNotUnique, FileSyntaxError,
)
from airflow.secrets.base_secrets import BaseSecretsBackend
from airflow.utils.file import COMMENT_PATTERN
from airflow.utils.log.logging_mixin import LoggingMixin
log = logging.getLogger(__name__)
def get_connection_parameter_names() -> Set[str]:
"""Returns :class:`airflow.models.connection.Connection` constructor parameters."""
from airflow.models.connection import Connection
return {k for k in signature(Connection.__init__).parameters.keys() if k != "self"}
def _parse_env_file(file_path: str) -> Tuple[Dict[str, List[str]], List[FileSyntaxError]]:
"""
Parse a file in the ``.env '' format.
.. code-block:: text
MY_CONN_ID=my-conn-type://my-login:my-pa%2Fssword@my-host:5432/my-schema?param1=val1¶m2=val2
:param file_path: The location of the file that will be processed.
:type file_path: str
:return: Tuple with mapping of key and list of values and list of syntax errors
"""
with open(file_path) as f:
content = f.read()
secrets: Dict[str, List[str]] = defaultdict(list)
errors: List[FileSyntaxError] = []
for line_no, line in enumerate(content.splitlines(), 1):
if not line:
# Ignore empty line
continue
if COMMENT_PATTERN.match(line):
# Ignore comments
continue
var_parts: List[str] = line.split("=", 2)
if len(var_parts) != 2:
errors.append(
FileSyntaxError(
line_no=line_no,
message='Invalid line format. The line should contain at least one equal sign ("=").',
)
)
continue
key, value = var_parts
if not key:
errors.append(FileSyntaxError(line_no=line_no, message="Invalid line format. Key is empty.",))
secrets[key].append(value)
return secrets, errors
def _parse_yaml_file(file_path: str) -> Tuple[Dict[str, List[str]], List[FileSyntaxError]]:
"""
Parse a file in the YAML format.
:param file_path: The location of the file that will be processed.
:type file_path: str
:return: Tuple with mapping of key and list of values and list of syntax errors
"""
with open(file_path) as f:
content = f.read()
if not content:
return {}, [FileSyntaxError(line_no=1, message="The file is empty.")]
try:
secrets = yaml.safe_load(content)
except yaml.MarkedYAMLError as e:
return {}, [FileSyntaxError(line_no=e.problem_mark.line, message=str(e))]
if not isinstance(secrets, dict):
return {}, [FileSyntaxError(line_no=1, message="The file should contain the object.")]
return secrets, []
def _parse_json_file(file_path: str) -> Tuple[Dict[str, Any], List[FileSyntaxError]]:
"""
Parse a file in the JSON format.
:param file_path: The location of the file that will be processed.
:type file_path: str
:return: Tuple with mapping of key and list of values and list of syntax errors
"""
with open(file_path) as f:
content = f.read()
if not content:
return {}, [FileSyntaxError(line_no=1, message="The file is empty.")]
try:
secrets = json.loads(content)
except JSONDecodeError as e:
return {}, [FileSyntaxError(line_no=int(e.lineno), message=e.msg)]
if not isinstance(secrets, dict):
return {}, [FileSyntaxError(line_no=1, message="The file should contain the object.")]
return secrets, []
FILE_PARSERS = {
"env": _parse_env_file,
"json": _parse_json_file,
"yaml": _parse_yaml_file,
}
def _parse_secret_file(file_path: str) -> Dict[str, Any]:
"""
Based on the file extension format, selects a parser, and parses the file.
:param file_path: The location of the file that will be processed.
:type file_path: str
:return: Map of secret key (e.g. connection ID) and value.
"""
if not os.path.exists(file_path):
raise AirflowException(
f"File {file_path} was not found. Check the configuration of your Secrets backend."
)
log.debug("Parsing file: %s", file_path)
ext = file_path.rsplit(".", 2)[-1].lower()
if ext not in FILE_PARSERS:
raise AirflowException("Unsupported file format. The file must have the extension .env or .json")
secrets, parse_errors = FILE_PARSERS[ext](file_path)
log.debug("Parsed file: len(parse_errors)=%d, len(secrets)=%d", len(parse_errors), len(secrets))
if parse_errors:
raise AirflowFileParseException(
"Failed to load the secret file.", file_path=file_path, parse_errors=parse_errors
)
return secrets
def _create_connection(conn_id: str, value: Any):
"""
Creates a connection based on a URL or JSON object.
"""
from airflow.models.connection import Connection
if isinstance(value, str):
return Connection(conn_id=conn_id, uri=value)
if isinstance(value, dict):
connection_parameter_names = get_connection_parameter_names() | {"extra_dejson"}
current_keys = set(value.keys())
if not current_keys.issubset(connection_parameter_names):
illegal_keys = current_keys - connection_parameter_names
illegal_keys_list = ", ".join(illegal_keys)
raise AirflowException(
f"The object have illegal keys: {illegal_keys_list}. "
f"The dictionary can only contain the following keys: {connection_parameter_names}"
)
if "extra" in value and "extra_dejson" in value:
raise AirflowException(
"The extra and extra_dejson parameters are mutually exclusive. "
"Please provide only one parameter."
)
if "extra_dejson" in value:
value["extra"] = json.dumps(value["extra_dejson"])
del value["extra_dejson"]
if "conn_id" in current_keys and conn_id != value["conn_id"]:
raise AirflowException(
f"Mismatch conn_id. "
f"The dictionary key has the value: {value['conn_id']}. "
f"The item has the value: {conn_id}."
)
value["conn_id"] = conn_id
return Connection(**value)
raise AirflowException(
f"Unexpected value type: {type(value)}. The connection can only be defined using a string or object."
)
def load_variables(file_path: str) -> Dict[str, str]:
"""
Load variables from a text file.
Both ``JSON`` and ``.env`` files are supported.
:param file_path: The location of the file that will be processed.
:type file_path: str
:rtype: Dict[str, List[str]]
"""
log.debug("Loading variables from a text file")
secrets = _parse_secret_file(file_path)
invalid_keys = [key for key, values in secrets.items() if isinstance(values, list) and len(values) != 1]
if invalid_keys:
raise AirflowException(f'The "{file_path}" file contains multiple values for keys: {invalid_keys}')
variables = {key: values[0] if isinstance(values, list) else values for key, values in secrets.items()}
log.debug("Loaded %d variables: ", len(variables))
return variables
def load_connections(file_path) -> Dict[str, List[Any]]:
"""
This function is deprecated. Please use `airflow.secrets.local_filesystem.load_connections_dict`.",
"""
warnings.warn(
"This function is deprecated. Please use `airflow.secrets.local_filesystem.load_connections_dict`.",
DeprecationWarning, stacklevel=2
)
return {k: [v] for k, v in load_connections_dict(file_path).values()}
def load_connections_dict(file_path: str) -> Dict[str, Any]:
"""
Load connection from text file.
Both ``JSON`` and ``.env`` files are supported.
:return: A dictionary where the key contains a connection ID and the value contains a list of connections.
:rtype: Dict[str, airflow.models.connection.Connection]
"""
log.debug("Loading connection")
secrets: Dict[str, Any] = _parse_secret_file(file_path)
connection_by_conn_id = {}
for key, secret_values in list(secrets.items()):
if isinstance(secret_values, list):
if len(secret_values) > 1:
raise ConnectionNotUnique(f"Found multiple values for {key} in {file_path}.")
for secret_value in secret_values:
connection_by_conn_id[key] = _create_connection(key, secret_value)
else:
connection_by_conn_id[key] = _create_connection(key, secret_values)
num_conn = len(connection_by_conn_id)
log.debug("Loaded %d connections", num_conn)
return connection_by_conn_id
class LocalFilesystemBackend(BaseSecretsBackend, LoggingMixin):
"""
Retrieves Connection objects and Variables from local files
Both ``JSON`` and ``.env`` files are supported.
:param variables_file_path: File location with variables data.
:type variables_file_path: str
:param connections_file_path: File location with connection data.
:type connections_file_path: str
"""
def __init__(
self, variables_file_path: Optional[str] = None, connections_file_path: Optional[str] = None
):
super().__init__()
self.variables_file = variables_file_path
self.connections_file = connections_file_path
@property
def _local_variables(self) -> Dict[str, str]:
if not self.variables_file:
self.log.debug("The file for variables is not specified. Skipping")
# The user may not specify any file.
return {}
secrets = load_variables(self.variables_file)
return secrets
@property
def _local_connections(self) -> Dict[str, List[Any]]:
if not self.connections_file:
self.log.debug("The file for connection is not specified. Skipping")
# The user may not specify any file.
return {}
return load_connections_dict(self.connections_file)
def get_connections(self, conn_id: str) -> List[Any]:
if conn_id in self._local_connections:
return [self._local_connections[conn_id]]
return []
def get_variable(self, key: str) -> Optional[str]:
return self._local_variables.get(key)
| 35.670807 | 110 | 0.670643 |
4a2611a8d3d41b8f8cc3849b8bf1324d7cba3a07 | 3,248 | py | Python | bin/eoIngestPd.py | lsst-camera-dh/eotask-gen3 | 41e7de97c607c5a8b21c4f7164b3852e7d07359a | [
"BSD-3-Clause-LBNL"
] | null | null | null | bin/eoIngestPd.py | lsst-camera-dh/eotask-gen3 | 41e7de97c607c5a8b21c4f7164b3852e7d07359a | [
"BSD-3-Clause-LBNL"
] | 33 | 2021-04-23T17:43:34.000Z | 2022-01-17T19:15:14.000Z | bin/eoIngestPd.py | lsst-camera-dh/eotask-gen3 | 41e7de97c607c5a8b21c4f7164b3852e7d07359a | [
"BSD-3-Clause-LBNL"
] | null | null | null | #! /usr/bin/env python
import os
import argparse
from astropy.table import Table
from lsst.daf.butler import Butler, DatasetType
def getDataIdRecords(butler, filename):
tokens = os.path.splitext(os.path.basename(filename))[0].split('_')
dayObs = int(tokens[2])
seqNum = int(tokens[3])
where = "exposure.day_obs = dayobs and exposure.seq_num = seqnum"
records = list(butler.registry.queryDimensionRecords("exposure",
where=where,
bind={"dayobs": dayObs, "seqnum": seqNum},
dataId={"instrument": "LSSTCam"}))
if records:
return records[0]
raise KeyError("Failed to find exposure ID for %s %06i" % (dayObs, seqNum))
def setTableMetaData(table, dataIdRecords):
table.columns[0].name = 'Time'
table.columns[1].name = 'Current'
table.columns[0].unit = 's'
table.columns[1].unit = 'A'
table.meta['OBSTYPE'] = dataIdRecords.observation_type
table.meta['INSTRUME'] = 'LSSTCam'
table.meta['CALIBDATE'] = dataIdRecords.timespan.end.isot
table.meta['CALIB_ID'] = 'calibDate=%s' % dataIdRecords.timespan.end.isot
table.meta['PD_SCEHMA'] = 'Simple'
table.meta['PD_SCHEMA_VERSION'] = 1
table.meta['DATE'] = dataIdRecords.timespan.end.isot
table.meta['CALIB_CREATION_DATE'] \
= dataIdRecords.timespan.end.strftime('%Y-%M-%d')
table.meta['CALIB_CREATION_TIME'] \
= dataIdRecords.timespan.end.strftime('%H:%m:%S')
def ingest_pd_files(repo, pd_files, output_run='LSSTCam/photodiode/all'):
butler = Butler(repo, writeable=True, run=output_run)
datasetType = DatasetType("photodiode", ("instrument", "exposure"),
"AstropyTable",
universe=butler.registry.dimensions)
try:
butler.registry.registerDatasetType(datasetType)
except Exception:
pass
print("Found %i photodiode files" % len(pd_files))
for i, aFile in enumerate(pd_files):
print(i, len(pd_files), os.path.basename(aFile))\
try:
dataIdRecords = getDataIdRecords(butler, aFile)
except KeyError as msg:
print(msg)
continue
table = Table.read(aFile, format='ascii')
setTableMetaData(table, dataIdRecords)
# dataId = dict(instrument=dataIdRecords.instrument,
# exposure=dataIdRecords.id)
dataId = dataIdRecords.dataId
with butler.transaction():
butler.put(table, "photodiode", dataId=dataId)
print("Done!")
def main():
# argument parser
parser = argparse.ArgumentParser(prog='eoIngestPd.py')
parser.add_argument('-b', '--butler', type=str, help='Butler Repo')
parser.add_argument('--output-run', type=str,
help="The name of the run datasets should be output to",
default="LSSTCam/photodiode/all")
parser.add_argument('files', type=str, nargs='+', help='Files to import')
# unpack options
args = parser.parse_args()
ingest_pd_files(args.butler, args.files, args.output_run)
if __name__ == '__main__':
main()
| 33.142857 | 99 | 0.615148 |
4a2612813fa6bd5a4d162fe061fbc0c123b91355 | 21,578 | py | Python | Projects/Parameter Estimation and Bayesian Statistics/_2_Exercise_2.py | aleixlopezpascual/aleixlopezpascual.github.io | 89b7449cf7f358d53a2b8f4030b88cbaf8884ef6 | [
"MIT"
] | null | null | null | Projects/Parameter Estimation and Bayesian Statistics/_2_Exercise_2.py | aleixlopezpascual/aleixlopezpascual.github.io | 89b7449cf7f358d53a2b8f4030b88cbaf8884ef6 | [
"MIT"
] | null | null | null | Projects/Parameter Estimation and Bayesian Statistics/_2_Exercise_2.py | aleixlopezpascual/aleixlopezpascual.github.io | 89b7449cf7f358d53a2b8f4030b88cbaf8884ef6 | [
"MIT"
] | null | null | null | import numpy as np
from scipy import stats, special
import matplotlib.pyplot as plt
from math import sqrt, factorial
exercise = 15
#################################################################
# exercise 2.2
if exercise == 2:
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 150, sqrt(150)), color="black")
plt.xlabel(r"$\hat{\nu}$")
plt.ylabel(r"f($\hat{\nu}$)")
plt.xlim(100,200)
plt.ylim(0, 0.035)
plt.xticks(np.arange(100,210,10))
plt.show()
#################################################################
# exercise 2.3
if exercise == 3:
q = 150 # estimator from the first experiment, we use it as "true" value for the following MC's
# we now genetare 10^6 MC experiments from a Poisson pdf. There is
# a module, so-called numpy.random, which implements pseudo-random number generators for various distributions.
# since we want to generate numbers Poisson distributed, we can directly use this module.
poisson_data = np.random.poisson(q, 10**6)
print(poisson_data)
print(len(poisson_data))
# the obtained values are directly N_i.
# Now we compute the estimator for each one, which in fact corresponds to q_i = N_i
# so we don't have to make any change
plt.hist(poisson_data, bins= 30, normed=True, histtype="step", color="black")
# we compare it with the result in exercise 2.2
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 150, sqrt(150)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (150,150)$")
# we compute the estimators of the mean and variance from poisson_data
mean_estimator = np.mean(poisson_data)
variance_estimator = np.var(poisson_data, ddof=1)
print(mean_estimator, variance_estimator)
plt.xlabel(r"$\hat{\nu}$")
plt.ylabel(r"N($\hat{\nu}$)")
plt.xlim(100,200)
plt.ylim(0, 0.035)
plt.xticks(np.arange(100,210,10))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
#################################################################
# exercise 2.4
if exercise == 4:
N = 150
nu = np.linspace(100,200,100000)
# posterior_pdf = ((nu**N)*np.exp(-nu))/special.gamma(N+1)
# be careful here. This last expresion overflows when n=113 since result exp(308)
# Remember that Max normal number with 64 bit floating point is 1.7x10308
# so we have to redefine the calculations
posterior_pdf = ((nu**(N/2))*(np.exp(-nu))*(nu**(N/2)))/special.gamma(N+1)
plt.plot(nu, posterior_pdf, color = "black", linewidth= "1", label = r"P($\nu$|150)")
plt.plot(nu, stats.norm.pdf(nu, 150, sqrt(150)), color="red", linewidth= "1",
label=r"$\mathcal{N} \ (150,150)$")
print(np.argmax(posterior_pdf))
print(nu[np.argmax(posterior_pdf)])
plt.xlabel(r"$\nu$")
plt.ylabel(r"P($\nu$|150)")
plt.xlim(100,200)
plt.ylim(0, 0.035)
plt.xticks(np.arange(100,210,10))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
#################################################################
# exercise 2.5
if exercise == 5:
N_obs=150
nu = np.random.uniform(0.0, N_obs + 10.0*sqrt(N_obs), size=10**7) # we generate nu that satisfies our prior
# N = np.array([])
count = 0
nu_accepted = np.array([])
for nu_i in nu:
N = np.random.poisson(nu_i, 1) # we generate one N for each value of nu
if N == N_obs:
nu_accepted = np.append(nu_accepted, nu_i)
count += 1
print(count)
plt.figure(1)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
# we compare it with the result in exercise 2.2
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 150, sqrt(150)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (150,150)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(100,200)
plt.ylim(0, 0.035)
plt.xticks(np.arange(100,210,10))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
# we compute the estimators of the mean and variance from poisson_data
mean_estimator = np.mean(nu_accepted)
variance_estimator = np.var(nu_accepted, ddof=1)
print(mean_estimator, variance_estimator)
print("{0:.2f},{1:.2f}".format(mean_estimator, variance_estimator))
# mean = 150.91 variance 150.56
# 151.06,151.09
# 150.95,152.57
# 151.04,151.19
# revise exercise 2.4: find mode and verify if its 150 o 151
plt.figure(2)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
plt.plot(x_axis, stats.norm.pdf(x_axis, mean_estimator, sqrt(variance_estimator)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (151,151)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(100,200)
plt.ylim(0, 0.035)
plt.xticks(np.arange(100,210,10))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
#################################################################
# exercise 2.6 (if 6 -> repeat ex 2.4. if 7 ex -> 7 repeat ex 2.5)
if exercise == 6:
N = 10
nu = np.linspace(0,50,100000)
# posterior_pdf = ((nu**N)*np.exp(-nu))/special.gamma(N+1)
# be careful here. This last expresion overflows when n=113 since result exp(308)
# Remember that Max normal number with 64 bit floating point is 1.7x10308
# so we have to redefine the calculations
posterior_pdf = ((nu**(N/2))*(np.exp(-nu))*(nu**(N/2)))/special.gamma(N+1)
plt.plot(nu, posterior_pdf, color = "black", linewidth= "1", label = r"P($\nu$|10)")
plt.plot(nu, stats.norm.pdf(nu, 10, sqrt(10)), color="red", linewidth= "1",
label=r"$\mathcal{N} \ (10,10)$")
print(np.argmax(posterior_pdf))
print(nu[np.argmax(posterior_pdf)])
plt.xlabel(r"$\nu$")
plt.ylabel(r"P($\nu$|10)")
plt.xlim(0,20)
plt.ylim(0, 0.14)
plt.xticks(np.arange(0,22,2))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
if exercise == 7:
N_obs=10
nu = np.random.uniform(0.0, N_obs + 10.0*sqrt(N_obs), size=10**7) # we generate nu that satisfies our prior
# N = np.array([])
count = 0
nu_accepted = np.array([])
for nu_i in nu:
N = np.random.poisson(nu_i, 1) # we generate one N for each value of nu
if N == N_obs:
nu_accepted = np.append(nu_accepted, nu_i)
count += 1
print(count)
plt.figure(1)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
# we compare it with the result in exercise 2.2
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 10, sqrt(10)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (10,10)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,20)
plt.ylim(0, 0.14)
plt.xticks(np.arange(0,22,2))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
# we compute the estimators of the mean and variance from poisson_data
mean_estimator = np.mean(nu_accepted)
variance_estimator = np.var(nu_accepted, ddof=1)
print(mean_estimator, variance_estimator)
print("{0:.2f},{1:.2f}".format(mean_estimator, variance_estimator))
plt.figure(2)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
plt.plot(x_axis, stats.norm.pdf(x_axis, mean_estimator, sqrt(variance_estimator)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (11,11)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,20)
plt.ylim(0, 0.14)
plt.xticks(np.arange(0,22,2))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
#################################################################
# exercise 2.7 (if 8 -> repeat ex 2.4. if 9 ex -> 7 repeat ex 2.5)
if exercise == 8:
N = 1
nu = np.linspace(0,10,10000)
# posterior_pdf = ((nu**N)*np.exp(-nu))/special.gamma(N+1)
# be careful here. This last expresion overflows when n=113 since result exp(308)
# Remember that Max normal number with 64 bit floating point is 1.7x10308
# so we have to redefine the calculations
posterior_pdf = ((nu**(N/2))*(np.exp(-nu))*(nu**(N/2)))/special.gamma(N+1)
plt.plot(nu, posterior_pdf, color = "black", linewidth= "1", label = r"P($\nu$|1)")
plt.plot(nu, stats.norm.pdf(nu, 1, sqrt(1)), color="red", linewidth= "1",
label=r"$\mathcal{N} \ (1,1)$")
print(np.argmax(posterior_pdf))
print(nu[np.argmax(posterior_pdf)])
plt.xlabel(r"$\nu$")
plt.ylabel(r"P($\nu$|1)")
plt.xlim(0,5)
plt.ylim(0, 0.45)
plt.xticks(np.arange(0,5.5,0.5))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
if exercise == 9:
N_obs=1
nu = np.random.uniform(0.0, N_obs + 10.0*sqrt(N_obs), size=10**7) # we generate nu that satisfies our prior
# N = np.array([])
count = 0
nu_accepted = np.array([])
for nu_i in nu:
N = np.random.poisson(nu_i, 1) # we generate one N for each value of nu
if N == N_obs:
nu_accepted = np.append(nu_accepted, nu_i)
count += 1
print(count)
plt.figure(1)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
# we compare it with the result in exercise 2.2
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 1, sqrt(1)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (1,1)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,5)
plt.ylim(0, 0.45)
plt.xticks(np.arange(0,5.5,0.5))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
# we compute the estimators of the mean and variance from poisson_data
mean_estimator = np.mean(nu_accepted)
variance_estimator = np.var(nu_accepted, ddof=1)
print(mean_estimator, variance_estimator)
print("{0:.2f},{1:.2f}".format(mean_estimator, variance_estimator))
plt.figure(2)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
plt.plot(x_axis, stats.norm.pdf(x_axis, mean_estimator, sqrt(variance_estimator)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (1,1)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,5)
plt.ylim(0, 0.45)
plt.xticks(np.arange(0,5.5,0.5))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
#################################################################
#################################################################
#################################################################
# exercise 2.8
if exercise == 10:
N = 150
nu = np.linspace(100,200,100000)
posterior_pdf = ((nu**((N-1)/2))*(np.exp(-nu))*(nu**((N-1)/2)))/special.gamma(N)
plt.plot(nu, posterior_pdf, color = "black", linewidth= "1", label = r"P($\nu$|150)")
plt.plot(nu, stats.norm.pdf(nu, 150, sqrt(150)), color="red", linewidth= "1",
label=r"$\mathcal{N} \ (150,150)$")
print(np.argmax(posterior_pdf))
print(nu[np.argmax(posterior_pdf)])
plt.xlabel(r"$\nu$")
plt.ylabel(r"P($\nu$|150)")
plt.xlim(100,200)
plt.ylim(0, 0.035)
plt.xticks(np.arange(100,210,10))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
#################################################################
if exercise == 11:
N_obs=150
# we generate nu that satisfies our prior.
# we have used the inv transformation MC method
# remember that we always use r = U[0,1] in this method
nu = np.exp(np.random.uniform(0.0, np.log(N_obs + 10.0*sqrt(N_obs)), size=10**7))
# N = np.array([])
count = 0
nu_accepted = np.array([])
for nu_i in nu:
N = np.random.poisson(nu_i, 1) # we generate one N for each value of nu
if N == N_obs:
nu_accepted = np.append(nu_accepted, nu_i)
count += 1
print(count, nu_i)
plt.figure(1)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
# we compare it with the result in exercise 2.2
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 150, sqrt(150)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (150,150)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(100,200)
plt.ylim(0, 0.035)
plt.xticks(np.arange(100,210,10))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
# we compute the estimators of the mean and variance from poisson_data
mean_estimator = np.mean(nu_accepted)
variance_estimator = np.var(nu_accepted, ddof=1)
print(mean_estimator, variance_estimator)
print("{0:.2f},{1:.2f}".format(mean_estimator, variance_estimator))
plt.figure(2)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
plt.plot(x_axis, stats.norm.pdf(x_axis, mean_estimator, sqrt(variance_estimator)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (151,151)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(100,200)
plt.ylim(0, 0.035)
plt.xticks(np.arange(100,210,10))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
#################################################################
# exercise 2.6 (if 6 -> repeat ex 2.4. if 7 ex -> 7 repeat ex 2.5)
if exercise == 12:
N = 10
nu = np.linspace(0,50,100000)
# posterior_pdf = ((nu**N)*np.exp(-nu))/special.gamma(N+1)
# be careful here. This last expresion overflows when n=113 since result exp(308)
# Remember that Max normal number with 64 bit floating point is 1.7x10308
# so we have to redefine the calculations
posterior_pdf = ((nu**((N-1)/2))*(np.exp(-nu))*(nu**((N-1)/2)))/special.gamma(N)
plt.plot(nu, posterior_pdf, color = "black", linewidth= "1", label = r"P($\nu$|10)")
plt.plot(nu, stats.norm.pdf(nu, 10, sqrt(10)), color="red", linewidth= "1",
label=r"$\mathcal{N} \ (10,10)$")
print(np.argmax(posterior_pdf))
print(nu[np.argmax(posterior_pdf)])
plt.xlabel(r"$\nu$")
plt.ylabel(r"P($\nu$|10)")
plt.xlim(0,20)
plt.ylim(0, 0.14)
plt.xticks(np.arange(0,22,2))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
if exercise == 13:
N_obs=10
nu = np.exp(np.random.uniform(0.0, np.log(N_obs + 10.0*sqrt(N_obs)), size=10**7)) # we generate nu that satisfies our prior
# N = np.array([])
count = 0
nu_accepted = np.array([])
for nu_i in nu:
N = np.random.poisson(nu_i, 1) # we generate one N for each value of nu
if N == N_obs:
nu_accepted = np.append(nu_accepted, nu_i)
count += 1
print(count)
plt.figure(1)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
# we compare it with the result in exercise 2.2
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 10, sqrt(10)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (10,10)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,20)
plt.ylim(0, 0.14)
plt.xticks(np.arange(0,22,2))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
# we compute the estimators of the mean and variance from poisson_data
mean_estimator = np.mean(nu_accepted)
variance_estimator = np.var(nu_accepted, ddof=1)
print(mean_estimator, variance_estimator)
print("{0:.2f},{1:.2f}".format(mean_estimator, variance_estimator))
plt.figure(2)
plt.hist(nu_accepted, bins= 30, normed=True, histtype="step", color="black")
plt.plot(x_axis, stats.norm.pdf(x_axis, mean_estimator, sqrt(variance_estimator)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (11,11)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,20)
plt.ylim(0, 0.14)
plt.xticks(np.arange(0,22,2))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
#################################################################
# exercise 2.7 (if 8 -> repeat ex 2.4. if 9 ex -> 7 repeat ex 2.5)
if exercise == 14:
N = 1
nu = np.linspace(0,10,10000)
# posterior_pdf = ((nu**N)*np.exp(-nu))/special.gamma(N+1)
# be careful here. This last expresion overflows when n=113 since result exp(308)
# Remember that Max normal number with 64 bit floating point is 1.7x10308
# so we have to redefine the calculations
posterior_pdf = ((nu**((N-1)/2))*(np.exp(-nu))*(nu**((N-1)/2)))/special.gamma(N)
plt.plot(nu, posterior_pdf, color = "black", linewidth= "1", label = r"P($\nu$|1)")
plt.plot(nu, stats.norm.pdf(nu, 1, sqrt(1)), color="red", linewidth= "1",
label=r"$\mathcal{N} \ (1,1)$")
print(np.argmax(posterior_pdf))
print(nu[np.argmax(posterior_pdf)])
plt.xlabel(r"$\nu$")
plt.ylabel(r"P($\nu$|1)")
plt.xlim(0,5)
plt.ylim(0, 1)
plt.xticks(np.arange(0,5.5,0.5))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
if exercise == 15:
N_obs=1
nu = np.exp(np.random.uniform(-100., np.log(N_obs + 10.0*sqrt(N_obs)), size=10**7)) # we generate nu that satisfies our prior
# N = np.array([])
count = 0
nu_accepted = np.array([])
for nu_i in nu:
N = np.random.poisson(nu_i, 1) # we generate one N for each value of nu
if N == N_obs:
nu_accepted = np.append(nu_accepted, nu_i)
count += 1
print(count)
plt.figure(1)
plt.hist(nu_accepted, bins= 40, normed=True, histtype="step", color="black")
# we compare it with the result in exercise 2.2
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 1, sqrt(1)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (1,1)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,5)
plt.ylim(0, 0.60)
plt.xticks(np.arange(0,5.5,0.5))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
# we compute the estimators of the mean and variance from poisson_data
mean_estimator = np.mean(nu_accepted)
variance_estimator = np.var(nu_accepted, ddof=1)
print(mean_estimator, variance_estimator)
print("{0:.2f},{1:.2f}".format(mean_estimator, variance_estimator))
plt.figure(2)
plt.hist(nu_accepted, bins= 50, normed=True, histtype="step", color="black")
plt.plot(x_axis, stats.norm.pdf(x_axis, mean_estimator, sqrt(variance_estimator)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (1,1)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,5)
plt.ylim(0, 1.0)
plt.xticks(np.arange(0,5.5,0.5))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show()
np.savetxt("nu_accepted", nu_accepted)
if exercise == 16:
nu_accepted=np.loadtxt("nu_accepted")
plt.figure(1)
plt.hist(nu_accepted, bins= 40, normed=True, histtype="step", color="black")
# we compare it with the result in exercise 2.2
x_axis = np.linspace(0,300, 10000)
plt.plot(x_axis, stats.norm.pdf(x_axis, 1, sqrt(1)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (1,1)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,5)
plt.ylim(0, 0.60)
plt.xticks(np.arange(0,5.5,0.5))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
# we compute the estimators of the mean and variance from poisson_data
mean_estimator = np.mean(nu_accepted)
variance_estimator = np.var(nu_accepted, ddof=1)
print(mean_estimator, variance_estimator)
print("{0:.2f},{1:.2f}".format(mean_estimator, variance_estimator))
plt.figure(2)
plt.hist(nu_accepted, bins= 50, normed=True, histtype="step", color="black")
plt.plot(x_axis, stats.norm.pdf(x_axis, mean_estimator, sqrt(variance_estimator)), color="red", linewidth = "1",
label= r"$\mathcal{N} \ (2,1)$")
plt.xlabel(r"$\nu$")
plt.ylabel(r"N($\nu$)")
plt.xlim(0,5)
plt.ylim(0, 1.0)
plt.xticks(np.arange(0,5.5,0.5))
plt.legend(loc="best")
leg = plt.legend()
leg.get_frame().set_edgecolor('black')
plt.show() | 34.469649 | 130 | 0.577579 |
4a2612c2df1749b8637876f31226895e87bc86e3 | 1,906 | py | Python | M.py | Hassan0072/CP_HS99 | 365ecd119e19fcf2c69ccaef42b278a1a9256741 | [
"MIT"
] | null | null | null | M.py | Hassan0072/CP_HS99 | 365ecd119e19fcf2c69ccaef42b278a1a9256741 | [
"MIT"
] | 3 | 2019-05-05T16:30:58.000Z | 2019-05-11T00:23:02.000Z | M.py | Hassan0072/CP_HS99 | 365ecd119e19fcf2c69ccaef42b278a1a9256741 | [
"MIT"
] | 1 | 2019-04-21T18:46:01.000Z | 2019-04-21T18:46:01.000Z | # server2.py
import socket
from threading import Thread
from socketserver import ThreadingMixIn
TCP_IP = 'localhost'
TCP_PORT = 9001
BUFFER_SIZE = 1024
class ClientThread(Thread):
def __init__(self,ip,port,sock):
Thread.__init__(self)
self.ip = ip
self.port = port
self.sock = sock
print (" New thread started for "+ip+":"+str(port))
def run(self):
filename='mytext.txt'
f = open(filename,'rb')
while True:
l = f.read(BUFFER_SIZE)
while (l):
self.sock.send(l)
#print('Sent ',repr(l))
l = f.read(BUFFER_SIZE)
if not l:
f.close()
self.sock.close()
break
tcpsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcpsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
tcpsock.bind((TCP_IP, TCP_PORT))
threads = []
while True:
tcpsock.listen(5)
print ("Waiting for incoming connections...")
(conn, (ip,port)) = tcpsock.accept()
print ('Got connection from ', (ip,port))
newthread = ClientThread(ip,port,conn)
newthread.start()
threads.append(newthread)
for t in threads:
t.join()
# client2.py
#!/usr/bin/env python
import socket
TCP_IP = 'localhost'
TCP_PORT = 9001
BUFFER_SIZE = 1024
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((TCP_IP, TCP_PORT))
with open('received_file', 'wb') as f:
print ('file opened')
while True:
#print('receiving data...')
data = s.recv(BUFFER_SIZE)
print('data=%s', (data))
if not data:
f.close()
print ('file close()')
break
# write data to a file
f.write(data)
print('Successfully get the file')
s.close()
print('connection closed') | 24.435897 | 62 | 0.570829 |
4a26132a2e3e7309e9f2a865b8a0fa72267cf888 | 27,428 | py | Python | seleniumbase/fixtures/js_utils.py | hyahiaoui/SeleniumBase | 8c7fc30cadf289731767953f22eeab3c92271760 | [
"MIT"
] | null | null | null | seleniumbase/fixtures/js_utils.py | hyahiaoui/SeleniumBase | 8c7fc30cadf289731767953f22eeab3c92271760 | [
"MIT"
] | null | null | null | seleniumbase/fixtures/js_utils.py | hyahiaoui/SeleniumBase | 8c7fc30cadf289731767953f22eeab3c92271760 | [
"MIT"
] | null | null | null | """
This module contains useful Javascript utility methods for base_case.py
These helper methods SHOULD NOT be called directly from tests.
"""
import re
import requests
import time
from selenium.common.exceptions import WebDriverException
from seleniumbase import config as sb_config
from seleniumbase.common import decorators
from seleniumbase.config import settings
from seleniumbase.core import style_sheet
from seleniumbase.fixtures import constants
from seleniumbase.fixtures import shared_utils
def wait_for_ready_state_complete(driver, timeout=settings.EXTREME_TIMEOUT):
"""
The DOM (Document Object Model) has a property called "readyState".
When the value of this becomes "complete", page resources are considered
fully loaded (although AJAX and other loads might still be happening).
This method will wait until document.readyState == "complete".
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
shared_utils.check_if_time_limit_exceeded()
try:
ready_state = driver.execute_script("return document.readyState")
except WebDriverException:
# Bug fix for: [Permission denied to access property "document"]
time.sleep(0.03)
return True
if ready_state == u"complete":
time.sleep(0.01) # Better be sure everything is done loading
return True
else:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
raise Exception("Page elements never fully loaded after %s seconds!" % timeout)
def execute_async_script(driver, script, timeout=settings.EXTREME_TIMEOUT):
driver.set_script_timeout(timeout)
return driver.execute_async_script(script)
def wait_for_angularjs(driver, timeout=settings.LARGE_TIMEOUT, **kwargs):
if not settings.WAIT_FOR_ANGULARJS:
return
NG_WRAPPER = (
"%(prefix)s"
"var $elm=document.querySelector("
"'[data-ng-app],[ng-app],.ng-scope')||document;"
"if(window.angular && angular.getTestability){"
"angular.getTestability($elm).whenStable(%(handler)s)"
"}else{"
"var $inj;try{$inj=angular.element($elm).injector()||"
"angular.injector(['ng'])}catch(ex){"
"$inj=angular.injector(['ng'])};$inj.get=$inj.get||"
"$inj;$inj.get('$browser')."
"notifyWhenNoOutstandingRequests(%(handler)s)}"
"%(suffix)s"
)
def_pre = "var cb=arguments[arguments.length-1];if(window.angular){"
prefix = kwargs.pop("prefix", def_pre)
handler = kwargs.pop("handler", "function(){cb(true)}")
suffix = kwargs.pop("suffix", "}else{cb(false)}")
script = NG_WRAPPER % {"prefix": prefix, "handler": handler, "suffix": suffix}
try:
execute_async_script(driver, script, timeout=timeout)
except Exception:
time.sleep(0.05)
def is_html_inspector_activated(driver):
try:
driver.execute_script("HTMLInspector") # Fails if not defined
return True
except Exception:
return False
def is_jquery_activated(driver):
try:
driver.execute_script("jQuery('html')") # Fails if jq is not defined
return True
except Exception:
return False
def wait_for_jquery_active(driver, timeout=None):
if not timeout:
timeout = int(settings.MINI_TIMEOUT * 10.0)
else:
timeout = int(timeout * 10.0)
for x in range(timeout):
# jQuery needs a small amount of time to activate.
try:
driver.execute_script("jQuery('html')")
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
return
except Exception:
time.sleep(0.1)
def raise_unable_to_load_jquery_exception(driver):
""" The most-likely reason for jQuery not loading on web pages. """
raise Exception(
"""Unable to load jQuery on "%s" due to a possible violation """
"""of the website's Content Security Policy directive. """
"""To override this policy, add "--disable-csp" on the """
"""command-line when running your tests.""" % driver.current_url
)
def activate_jquery(driver):
""" If "jQuery is not defined", use this method to activate it for use.
This happens because jQuery is not always defined on web sites. """
try:
# Let's first find out if jQuery is already defined.
driver.execute_script("jQuery('html')")
# Since that command worked, jQuery is defined. Let's return.
return
except Exception:
# jQuery is not currently defined. Let's proceed by defining it.
pass
jquery_js = constants.JQuery.MIN_JS
activate_jquery_script = (
"""var script = document.createElement('script');"""
"""script.src = "%s";document.getElementsByTagName('head')[0]"""
""".appendChild(script);""" % jquery_js
)
driver.execute_script(activate_jquery_script)
for x in range(int(settings.MINI_TIMEOUT * 10.0)):
# jQuery needs a small amount of time to activate.
try:
driver.execute_script("jQuery('html')")
return
except Exception:
time.sleep(0.1)
# Since jQuery still isn't activating, give up and raise an exception
raise_unable_to_load_jquery_exception(driver)
def are_quotes_escaped(string):
if string.count("\\'") != string.count("'") or (string.count('\\"') != string.count('"')):
return True
return False
def escape_quotes_if_needed(string):
"""
re.escape() works differently in Python 3.7.0 than earlier versions:
Python 3.6.5:
>>> import re
>>> re.escape('"')
'\\"'
Python 3.7.0:
>>> import re
>>> re.escape('"')
'"'
SeleniumBase needs quotes to be properly escaped for Javascript calls.
"""
if are_quotes_escaped(string):
if string.count("'") != string.count("\\'"):
string = string.replace("'", "\\'")
if string.count('"') != string.count('\\"'):
string = string.replace('"', '\\"')
return string
def safe_execute_script(driver, script):
""" When executing a script that contains a jQuery command,
it's important that the jQuery library has been loaded first.
This method will load jQuery if it wasn't already loaded. """
try:
driver.execute_script(script)
except Exception:
# The likely reason this fails is because: "jQuery is not defined"
activate_jquery(driver) # It's a good thing we can define it here
driver.execute_script(script)
def wait_for_css_query_selector(driver, selector, timeout=settings.SMALL_TIMEOUT):
element = None
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
try:
selector = re.escape(selector)
selector = escape_quotes_if_needed(selector)
element = driver.execute_script("""return document.querySelector('%s')""" % selector)
if element:
return element
except Exception:
element = None
if not element:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
raise Exception("Element {%s} was not present after %s seconds!" % (selector, timeout))
def highlight_with_js(driver, selector, loops, o_bs):
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 128, 128, 0.5)';"""
% selector
)
driver.execute_script(script)
for n in range(loops):
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(255, 0, 0, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 0, 128, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(0, 0, 255, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(0, 255, 0, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 128, 0, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 0, 128, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = """document.querySelector('%s').style.boxShadow =
'%s';""" % (
selector,
o_bs,
)
driver.execute_script(script)
def highlight_with_jquery(driver, selector, loops, o_bs):
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 128, 128, 0.5)');"""
% selector
)
safe_execute_script(driver, script)
for n in range(loops):
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(255, 0, 0, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 0, 128, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(0, 0, 255, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(0, 255, 0, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 128, 0, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 0, 128, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow', '%s');""" % (selector, o_bs)
driver.execute_script(script)
def add_css_link(driver, css_link):
script_to_add_css = """function injectCSS(css) {
var head = document.getElementsByTagName("head")[0];
var link = document.createElement("link");
link.rel = "stylesheet";
link.type = "text/css";
link.href = css;
link.crossorigin = "anonymous";
head.appendChild(link);
}
injectCSS("%s");"""
css_link = escape_quotes_if_needed(css_link)
driver.execute_script(script_to_add_css % css_link)
def add_js_link(driver, js_link):
script_to_add_js = """function injectJS(link) {
var body = document.getElementsByTagName("body")[0];
var script = document.createElement("script");
script.src = link;
script.defer;
script.type="text/javascript";
script.crossorigin = "anonymous";
script.onload = function() { null };
body.appendChild(script);
}
injectJS("%s");"""
js_link = escape_quotes_if_needed(js_link)
driver.execute_script(script_to_add_js % js_link)
def add_css_style(driver, css_style):
add_css_style_script = """function injectStyle(css) {
var head = document.getElementsByTagName("head")[0];
var style = document.createElement("style");
style.type = "text/css";
style.appendChild(document.createTextNode(css));
head.appendChild(style);
}
injectStyle("%s");"""
css_style = css_style.replace("\n", "")
css_style = escape_quotes_if_needed(css_style)
driver.execute_script(add_css_style_script % css_style)
def add_js_code_from_link(driver, js_link):
if js_link.startswith("//"):
js_link = "http:" + js_link
js_code = requests.get(js_link).text
add_js_code_script = (
"""var body = document.getElementsByTagName('body').item(0);"""
"""var script = document.createElement("script");"""
"""script.type = "text/javascript";"""
"""script.onload = function() { null };"""
"""script.appendChild(document.createTextNode("%s"));"""
"""body.appendChild(script);"""
)
js_code = js_code.replace("\n", " ")
js_code = escape_quotes_if_needed(js_code)
driver.execute_script(add_js_code_script % js_code)
def add_js_code(driver, js_code):
add_js_code_script = (
"""var body = document.getElementsByTagName('body').item(0);"""
"""var script = document.createElement("script");"""
"""script.type = "text/javascript";"""
"""script.onload = function() { null };"""
"""script.appendChild(document.createTextNode("%s"));"""
"""body.appendChild(script);"""
)
js_code = js_code.replace("\n", " ")
js_code = escape_quotes_if_needed(js_code)
driver.execute_script(add_js_code_script % js_code)
def add_meta_tag(driver, http_equiv=None, content=None):
if http_equiv is None:
http_equiv = "Content-Security-Policy"
if content is None:
content = (
"default-src *; style-src 'self' 'unsafe-inline'; "
"script-src: 'self' 'unsafe-inline' 'unsafe-eval'"
)
script_to_add_meta = """function injectMeta() {
var meta = document.createElement('meta');
meta.httpEquiv = "%s";
meta.content = "%s";
document.getElementsByTagName('head')[0].appendChild(meta);
}
injectMeta();""" % (
http_equiv,
content,
)
driver.execute_script(script_to_add_meta)
def is_jquery_confirm_activated(driver):
try:
driver.execute_script("jconfirm") # Fails if jq_confirm is not defined
return True
except Exception:
return False
def activate_jquery_confirm(driver):
jquery_js = constants.JQuery.MIN_JS
jq_confirm_css = constants.JqueryConfirm.MIN_CSS
jq_confirm_js = constants.JqueryConfirm.MIN_JS
if not is_jquery_activated(driver):
add_js_link(driver, jquery_js)
wait_for_jquery_active(driver, timeout=0.9)
add_css_link(driver, jq_confirm_css)
add_js_link(driver, jq_confirm_js)
for x in range(15):
# jQuery-Confirm needs a small amount of time to load & activate.
try:
driver.execute_script("jconfirm")
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
return
except Exception:
time.sleep(0.1)
def activate_html_inspector(driver):
jquery_js = constants.JQuery.MIN_JS
html_inspector_js = constants.HtmlInspector.MIN_JS
if is_html_inspector_activated(driver):
return
if not is_jquery_activated(driver):
add_js_link(driver, jquery_js)
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
wait_for_jquery_active(driver, timeout=1.5)
add_js_link(driver, html_inspector_js)
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
for x in range(15):
# HTML-Inspector needs a small amount of time to load & activate.
try:
driver.execute_script("HTMLInspector")
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
return
except Exception:
time.sleep(0.1)
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
def activate_messenger(driver):
jquery_js = constants.JQuery.MIN_JS
messenger_css = constants.Messenger.MIN_CSS
messenger_js = constants.Messenger.MIN_JS
msgr_theme_flat_js = constants.Messenger.THEME_FLAT_JS
msgr_theme_future_js = constants.Messenger.THEME_FUTURE_JS
msgr_theme_flat_css = constants.Messenger.THEME_FLAT_CSS
msgr_theme_future_css = constants.Messenger.THEME_FUTURE_CSS
msgr_theme_block_css = constants.Messenger.THEME_BLOCK_CSS
msgr_theme_air_css = constants.Messenger.THEME_AIR_CSS
msgr_theme_ice_css = constants.Messenger.THEME_ICE_CSS
spinner_css = constants.Messenger.SPINNER_CSS
underscore_js = constants.Underscore.MIN_JS
backbone_js = constants.Backbone.MIN_JS
msg_style = (
"Messenger.options = {'maxMessages': 8, "
"extraClasses: 'messenger-fixed "
"messenger-on-bottom messenger-on-right', "
"theme: 'future'}"
)
add_js_link(driver, jquery_js)
wait_for_jquery_active(driver, timeout=0.2)
add_css_link(driver, messenger_css)
add_css_link(driver, msgr_theme_flat_css)
add_css_link(driver, msgr_theme_future_css)
add_css_link(driver, msgr_theme_block_css)
add_css_link(driver, msgr_theme_air_css)
add_css_link(driver, msgr_theme_ice_css)
add_js_link(driver, underscore_js)
add_js_link(driver, backbone_js)
add_css_link(driver, spinner_css)
add_js_link(driver, messenger_js)
add_js_link(driver, msgr_theme_flat_js)
add_js_link(driver, msgr_theme_future_js)
add_css_style(driver, style_sheet.messenger_style)
for x in range(int(settings.MINI_TIMEOUT * 10.0)):
# Messenger needs a small amount of time to load & activate.
try:
driver.execute_script(msg_style)
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
return
except Exception:
time.sleep(0.1)
def set_messenger_theme(driver, theme="default", location="default", max_messages="default"):
if theme == "default":
theme = "future"
if location == "default":
location = "bottom_right"
if max_messages == "default":
max_messages = "8"
valid_themes = ["flat", "future", "block", "air", "ice"]
if theme not in valid_themes:
raise Exception("Theme: %s is not in %s!" % (theme, valid_themes))
valid_locations = [
"top_left",
"top_center",
"top_right" "bottom_left",
"bottom_center",
"bottom_right",
]
if location not in valid_locations:
raise Exception("Location: %s is not in %s!" % (location, valid_locations))
if location == "top_left":
messenger_location = "messenger-on-top messenger-on-left"
elif location == "top_center":
messenger_location = "messenger-on-top"
elif location == "top_right":
messenger_location = "messenger-on-top messenger-on-right"
elif location == "bottom_left":
messenger_location = "messenger-on-bottom messenger-on-left"
elif location == "bottom_center":
messenger_location = "messenger-on-bottom"
elif location == "bottom_right":
messenger_location = "messenger-on-bottom messenger-on-right"
msg_style = (
"Messenger.options = {'maxMessages': %s, "
"extraClasses: 'messenger-fixed %s', theme: '%s'}" % (max_messages, messenger_location, theme)
)
try:
driver.execute_script(msg_style)
except Exception:
activate_messenger(driver)
driver.execute_script(msg_style)
time.sleep(0.1)
def post_message(driver, message, msg_dur, style="info"):
""" A helper method to post a message on the screen with Messenger.
(Should only be called from post_message() in base_case.py) """
if not msg_dur:
msg_dur = settings.DEFAULT_MESSAGE_DURATION
msg_dur = float(msg_dur)
message = re.escape(message)
message = escape_quotes_if_needed(message)
messenger_script = (
"""Messenger().post({message: "%s", type: "%s", """
"""hideAfter: %s, hideOnNavigate: true});""" % (message, style, msg_dur)
)
try:
driver.execute_script(messenger_script)
except Exception:
activate_messenger(driver)
set_messenger_theme(driver)
try:
driver.execute_script(messenger_script)
except Exception:
time.sleep(0.2)
activate_messenger(driver)
time.sleep(0.2)
set_messenger_theme(driver)
time.sleep(0.5)
driver.execute_script(messenger_script)
def post_messenger_success_message(driver, message, msg_dur):
if not msg_dur:
msg_dur = settings.DEFAULT_MESSAGE_DURATION
msg_dur = float(msg_dur)
try:
theme = "future"
location = "bottom_right"
if sb_config.mobile_emulator:
theme = "block"
location = "top_center"
set_messenger_theme(driver, theme=theme, location=location)
post_message(driver, message, msg_dur, style="success")
time.sleep(msg_dur + 0.07)
except Exception:
pass
def post_messenger_error_message(driver, message, msg_dur):
if not msg_dur:
msg_dur = settings.DEFAULT_MESSAGE_DURATION
msg_dur = float(msg_dur)
try:
set_messenger_theme(driver, theme="block", location="top_center")
post_message(driver, message, msg_dur, style="error")
time.sleep(msg_dur + 0.07)
except Exception:
pass
def highlight_with_js_2(driver, message, selector, o_bs, msg_dur):
if selector == "html":
selector = "body"
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 128, 128, 0.5)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(205, 30, 0, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 0, 128, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(50, 50, 128, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(50, 205, 50, 1)';"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
post_messenger_success_message(driver, message, msg_dur)
script = """document.querySelector('%s').style.boxShadow =
'%s';""" % (
selector,
o_bs,
)
driver.execute_script(script)
def highlight_with_jquery_2(driver, message, selector, o_bs, msg_dur):
if selector == "html":
selector = "body"
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 128, 128, 0.5)');"""
% selector
)
safe_execute_script(driver, script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(205, 30, 0, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 0, 128, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(50, 50, 200, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
script = (
"""jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(50, 205, 50, 1)');"""
% selector
)
driver.execute_script(script)
time.sleep(0.0181)
post_messenger_success_message(driver, message, msg_dur)
script = """jQuery('%s').css('box-shadow', '%s');""" % (selector, o_bs)
driver.execute_script(script)
def scroll_to_element(driver, element):
element_location = None
try:
element_location = element.location["y"]
except Exception:
# element.location_once_scrolled_into_view # Old hack
return False
element_location = element_location - 130
if element_location < 0:
element_location = 0
scroll_script = "window.scrollTo(0, %s);" % element_location
# The old jQuery scroll_script required by=By.CSS_SELECTOR
# scroll_script = "jQuery('%s')[0].scrollIntoView()" % selector
try:
driver.execute_script(scroll_script)
return True
except Exception:
return False
def slow_scroll_to_element(driver, element, browser):
if browser == "ie":
# IE breaks on slow-scrolling. Do a fast scroll instead.
scroll_to_element(driver, element)
return
scroll_position = driver.execute_script("return window.scrollY;")
element_location = None
try:
element_location = element.location["y"]
except Exception:
element.location_once_scrolled_into_view
return
element_location = element_location - 130
if element_location < 0:
element_location = 0
distance = element_location - scroll_position
if distance != 0:
total_steps = int(abs(distance) / 50.0) + 2.0
step_value = float(distance) / total_steps
new_position = scroll_position
for y in range(int(total_steps)):
time.sleep(0.0114)
new_position += step_value
scroll_script = "window.scrollTo(0, %s);" % new_position
driver.execute_script(scroll_script)
time.sleep(0.01)
scroll_script = "window.scrollTo(0, %s);" % element_location
driver.execute_script(scroll_script)
time.sleep(0.01)
if distance > 430 or distance < -300:
# Add small recovery time for long-distance slow-scrolling
time.sleep(0.162)
@decorators.deprecated("Use re.escape() instead, which does what you want!")
def _jq_format(code):
"""
DEPRECATED - Use re.escape() instead, which performs the intended action.
Use before throwing raw code such as 'div[tab="advanced"]' into jQuery.
Selectors with quotes inside of quotes would otherwise break jQuery.
If you just want to escape quotes, there's escape_quotes_if_needed().
This is similar to "json.dumps(value)", but with one less layer of quotes.
"""
code = code.replace("\\", "\\\\").replace("\t", "\\t").replace("\n", "\\n")
code = code.replace('"', '\\"').replace("'", "\\'")
code = code.replace("\v", "\\v").replace("\a", "\\a").replace("\f", "\\f")
code = code.replace("\b", "\\b").replace(r"\u", "\\u").replace("\r", "\\r")
return code
| 34.500629 | 102 | 0.615575 |
4a26146997532734ab2746ca6f76c97f8535393c | 4,545 | py | Python | api/tests/test_run_kubeseal.py | UiP9AV6Y/kubeseal-webgui | 36f1460ff5bb74497f186d8655db018bf87bfe73 | [
"Apache-2.0"
] | null | null | null | api/tests/test_run_kubeseal.py | UiP9AV6Y/kubeseal-webgui | 36f1460ff5bb74497f186d8655db018bf87bfe73 | [
"Apache-2.0"
] | null | null | null | api/tests/test_run_kubeseal.py | UiP9AV6Y/kubeseal-webgui | 36f1460ff5bb74497f186d8655db018bf87bfe73 | [
"Apache-2.0"
] | null | null | null | import pytest
from app.kubeseal import decode_base64_string, run_kubeseal, valid_k8s_name
@pytest.mark.parametrize(
"value",
[
"abc",
"l" + "o" * 60 + "ng",
"some-1-too-check",
"ends-on-digit-1",
"1starts-with-it",
"some.dots.or_underscore",
"long-" + "a" * 248,
],
)
def test_valid_k8s_name(value):
# given a valid k8s-label-name
# when valid_k8s_name is called on the label-name
# then the label-name is returned unchanged
assert valid_k8s_name(value) == value
@pytest.mark.parametrize(
"value",
[
"",
"-something",
"too-l" + "o" * 247 + "ng",
"ähm",
"_not-valid",
"with spaces",
" not-trimmed ",
"no-special-chars-like/,#+%",
"ends-on-dash-",
"Uppercase",
"U",
"uPPer",
],
)
def test_invalid_k8s_name(value):
# given an invalid k8s-label-name
# when valid_k8s_name is called on the label-name
# then a ValueError is raised
with pytest.raises(ValueError, match="Invalid k8s name"):
valid_k8s_name(value)
def test_run_kubeseal_with_with_empty_string_namespace():
# given an empty string secretNamespace
# when run_kubeseal is called
# then raise ValueError
with pytest.raises(ValueError, match="secret_namespace was not given"):
run_kubeseal([{"key": "foo", "value": "YmFy"}], "", "secretName")
def test_run_kubeseal_with_with_none_namespace():
# given a None secretNamespace
# when run_kubeseal is called
# then raise ValueError
with pytest.raises(ValueError, match="secret_namespace was not given"):
run_kubeseal([{"key": "foo", "value": "YmFy"}], None, "secretName")
def test_run_kubeseal_with_with_empty_string_secret_name():
# given an empty string secretName
# when run_kubeseal is called
# then raise ValueError
with pytest.raises(ValueError, match="secret_name was not given"):
run_kubeseal([{"key": "foo", "value": "YmFy"}], "secretNamespace", "")
def test_run_kubeseal_with_with_none_secret_name():
# given a None secretName
# when run_kubeseal is called
# then raise ValueError
with pytest.raises(ValueError, match="secret_name was not given"):
run_kubeseal([{"key": "foo", "value": "YmFy"}], "secretNamespace", None)
def test_run_kubeseal_with_with_empty_secrets_list_but_otherwise_valid_inputs():
# given an empty list
# when run_kubeseal is called
sealed_secrets = run_kubeseal([], "secretNamespace", "secretName")
# then return empty list
assert sealed_secrets == []
@pytest.mark.container()
@pytest.mark.cluster()
def test_run_kubeseal_with_cli():
# given run test against cli with test cluster
# when run_kubeseal is called
# then return valid encrypted secret
pass
@pytest.mark.cluster()
def test_run_kubeseal_without_cli():
# given k8s cluster but no kubeseal cli
# when run_kubeseal is called
# then raise RuntimeError
with pytest.raises(RuntimeError):
run_kubeseal([{"key": "foo", "value": "YmFy"}], "secretNamespace", "secretName")
def test_run_kubeseal_with_invalid_secrets_list_but_otherwise_valid_inputs():
# given a secret list with string element
# when run_kubeseal is called
# then raise ValueError
with pytest.raises(
ValueError, match="Input of cleartext_secrets was not a list of dicts."
):
run_kubeseal(["this-should-be-a-dict-object"], "secretNamespace", "secretName")
@pytest.mark.container()
def test_run_kubeseal_without_k8s_cluster():
# given kubeseal cli but no k8s cluster
# when run_kubeseal is called
# then raise RuntimeError
with pytest.raises(RuntimeError) as error_cert_missing:
run_kubeseal([{"key": "foo", "value": "YmFy"}], "secretNamespace", "secretName")
assert "/kubeseal-webgui/cert/kubeseal-cert.pem: no such file or directory" in str(
error_cert_missing
)
@pytest.mark.parametrize(
("base64_input", "expected_output"),
[("YWJjZGVm", "abcdef"), ("w6TDtsO8", "äöü"), ("LV8jIT8kwqc=", "-_#!?$§")],
)
def test_decode_base64_string(base64_input, expected_output):
"""
Test decode_base64_string.
Given a tuple with a Base64 input string and the corresponding output string.
When calling decode_base64_string on input string.
Then return the corresponding output string.
"""
base64_encoded_string = decode_base64_string(base64_input)
assert base64_encoded_string == expected_output
| 31.5625 | 88 | 0.677668 |
4a261475bfd1809707105daaea185010bc5c754e | 782 | py | Python | Weighted_Strings.py | nabiharaza/LeetCode_Practise | eca35722ea671870c4d65a28df7b8d66184c6217 | [
"Apache-2.0"
] | null | null | null | Weighted_Strings.py | nabiharaza/LeetCode_Practise | eca35722ea671870c4d65a28df7b8d66184c6217 | [
"Apache-2.0"
] | null | null | null | Weighted_Strings.py | nabiharaza/LeetCode_Practise | eca35722ea671870c4d65a28df7b8d66184c6217 | [
"Apache-2.0"
] | null | null | null | dict_alphabets = dict()
def smallestString(weight):
ascii_value = 66
final_string = ''
sum = 1
dict_alphabets[1] = sum
for index in range(1, 26):
sum = (index + 1) * sum + sum
dict_alphabets[index + 1] = sum
ascii_value += 1
print(dict_alphabets)
while weight != 0:
largest_alphabet_index = find_largest_alphabet(weight)
final_string = chr(largest_alphabet_index + 65 - 1) + final_string
weight = weight - dict_alphabets[largest_alphabet_index]
print(final_string)
def find_largest_alphabet(weight):
for key, value in dict_alphabets.items():
if value > weight:
return previous
else:
previous = key
if __name__ == '__main__':
smallestString(4)
| 24.4375 | 74 | 0.626598 |
4a261541c2dc52ce08ff7e3252994195ba9340dc | 986 | py | Python | python3/koans/triangle.py | Dawn0fTime/python-koans-solutions | 76a3d292e541c26313aa50d837ff73e21c10d581 | [
"MIT"
] | null | null | null | python3/koans/triangle.py | Dawn0fTime/python-koans-solutions | 76a3d292e541c26313aa50d837ff73e21c10d581 | [
"MIT"
] | null | null | null | python3/koans/triangle.py | Dawn0fTime/python-koans-solutions | 76a3d292e541c26313aa50d837ff73e21c10d581 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Triangle Project Code.
# Triangle analyzes the lengths of the sides of a triangle
# (represented by a, b and c) and returns the type of triangle.
#
# It returns:
# 'equilateral' if all sides are equal
# 'isosceles' if exactly 2 sides are equal
# 'scalene' if no sides are equal
#
# The tests for this method can be found in
# about_triangle_project.py
# and
# about_triangle_project_2.py
#
def triangle(a, b, c):
# DELETE 'PASS' AND WRITE THIS CODE
if a <= 0 or b <= 0 or c <= 0:
raise TriangleError('No sides less than zero')
if (a + b < c) or (a + c < b) or (b + c < a):
raise TriangleError('The sum of any two sides must be greater than the third')
if a == b == c:
return 'equilateral'
elif a == b or b == c or c == a:
return 'isosceles'
else:
return 'scalene'
# Error class used in part 2. No need to change this code.
class TriangleError(Exception):
pass
| 26.648649 | 83 | 0.633874 |
4a261860f5ba1f5e30d2ef1a5af48a102c280f62 | 4,838 | py | Python | docs/conf.py | wuchaochen/ai-flow | 811029f05a6d192f91a738b15d1eba090bfaed4b | [
"Apache-2.0"
] | null | null | null | docs/conf.py | wuchaochen/ai-flow | 811029f05a6d192f91a738b15d1eba090bfaed4b | [
"Apache-2.0"
] | null | null | null | docs/conf.py | wuchaochen/ai-flow | 811029f05a6d192f91a738b15d1eba090bfaed4b | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import sys
import sphinx_rtd_theme
import myst_parser
import subprocess
sys.path.insert(0, os.path.abspath('../ai_flow/api'))
sys.path.insert(0, os.path.abspath('../ai_flow/context'))
sys.path.insert(0, os.path.abspath('../ai_flow/ai_graph'))
sys.path.insert(0, os.path.abspath('../ai_flow/endpoint/server'))
sys.path.insert(0, os.path.abspath('../ai_flow/workflow'))
sys.path.insert(0, os.path.abspath('../ai_flow/client'))
sys.path.insert(0, os.path.abspath('../ai_flow/common'))
sys.path.insert(0, os.path.abspath('../ai_flow/meta'))
sys.path.insert(0, os.path.abspath('../ai_flow_plugins'))
# -- Project information -----------------------------------------------------
project = 'AI Flow'
author = 'flink-extended'
# The full version, including alpha/beta/rc tags
release = '1.0'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.autosectionlabel',
'myst_parser',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# Following modules will be excluded as they are not public API
exclude_rst = [
'**ai_flow.metadata_store.test.rst',
'**ai_flow.runtime.rst',
'**ai_flow.metric.service.rst',
'**ai_flow.exception.rst',
'**ai_flow.model_center.entity.rst',
'**ai_flow.metadata_store.test.sbin.rst',
'**ai_flow.translator.rst',
'**ai_flow.model_center.rst',
'**ai_flow.model_center.service.rst',
'**ai_flow.scheduler.rst',
'**ai_flow.endpoint.client.rst',
'**ai_flow.util.rst',
'**ai_flow.metadata_store.rst',
'**ai_flow.store.rst',
'**ai_flow.log.rst',
'**ai_flow.protobuf.rst',
'**ai_flow.metadata_store.utils.rst',
'**ai_flow.util.model_util.rst',
'**ai_flow.metric.rst',
'**ai_flow.metadata_store.service.rst',
'**ai_flow.util.file_util.rst',
'**ai_flow_plugins.scheduler_plugins.airflow.rst',
'**ai_flow_plugins.job_plugins.utils.rst',
'**modules.rst',
]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
# We will not show any documents for API in genrated API doc
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**test**'] + exclude_rst
# Look at the first line of the docstring for function and method signatures.
autodoc_docstring_signature = True
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
source_suffix = ['.rst', '.md', '.MD']
source_parsers = {
'.md': myst_parser # noqa
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
autodoc_mock_imports = ['bs4', 'requests']
autoclass_content = 'both'
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = False
# Output file base name for HTML help builder.
htmlhelp_basename = 'flinkaiflowdoc'
def autodoc_skip_member(app, what, name, obj, skip, options):
exclusions = ('__doc__', '__module__', '__dict__')
exclude = name in exclusions
# return True if (skip or exclude) else None # Can interfere with subsequent skip functions.
return True if exclude else None
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
dir=os.path.dirname(os.path.abspath(__file__))
subprocess.call(['bash', dir+'/docgen.sh']) | 35.837037 | 97 | 0.696982 |
4a2618a51286fd1113255fde9ead50419cbe2d2e | 6,663 | py | Python | vod_converter/voc.py | supernlogn/vod-converter | d39b2357f5742fe347daaddc8584292b4c983a26 | [
"MIT"
] | null | null | null | vod_converter/voc.py | supernlogn/vod-converter | d39b2357f5742fe347daaddc8584292b4c983a26 | [
"MIT"
] | null | null | null | vod_converter/voc.py | supernlogn/vod-converter | d39b2357f5742fe347daaddc8584292b4c983a26 | [
"MIT"
] | 1 | 2018-07-10T08:34:14.000Z | 2018-07-10T08:34:14.000Z | """
Ingestor and egestor for VOC formats.
http://host.robots.ox.ac.uk/pascal/VOC/voc2012/htmldoc/index.html
"""
import os
import shutil
from converter import Ingestor, Egestor
import xml.etree.ElementTree as ET
class VOCIngestor(Ingestor):
def validate(self, root):
path = "{}/VOC2012".format(root)
for subdir in ["ImageSets", "JPEGImages", "Annotations"]:
if not os.path.isdir("{}/{}".format(path, subdir)):
return False, "Expected subdirectory {} within {}".format(subdir, path)
if not os.path.isfile("{}/ImageSets/Main/trainval.txt".format(path)):
return False, "Expected main image set ImageSets/Main/trainval.txt to exist within {}".format(path)
return True, None
def ingest(self, path):
image_names = self._get_image_ids(path)
return [self._get_image_detection(path, image_name) for image_name in image_names]
def _get_image_ids(self, root):
path = "{}/VOC2012".format(root)
with open("{}/ImageSets/Main/trainval.txt".format(path)) as f:
fnames = []
for line in f.read().strip().split('\n'):
cols = line.split()
if len(cols) > 1:
score = cols[1]
if score != '1':
continue
fnames.append(cols[0])
return fnames
def _get_image_detection(self, root, image_id):
path = "{}/VOC2012".format(root)
image_path = "{}/JPEGImages/{}.jpg".format(path, image_id)
if not os.path.isfile(image_path):
raise Exception("Expected {} to exist.".format(image_path))
annotation_path = "{}/Annotations/{}.xml".format(path, image_id)
if not os.path.isfile(annotation_path):
raise Exception("Expected annotation file {} to exist.".format(annotation_path))
tree = ET.parse(annotation_path)
xml_root = tree.getroot()
size = xml_root.find('size')
segmented = xml_root.find('segmented').text == '1'
segmented_path = None
if segmented:
segmented_path = "{}/SegmentationObject/{}.png".format(path, image_id)
if not os.path.isfile(segmented_path):
raise Exception("Expected segmentation file {} to exist.".format(segmented_path))
image_width = int(size.find('width').text)
image_height = int(size.find('height').text)
return {
'image': {
'id': image_id,
'path': image_path,
'segmented_path': segmented_path,
'width': image_width,
'height': image_height
},
'detections': [self._get_detection(node) for node in xml_root.findall('object')]
}
def _get_detection(self, node):
bndbox = node.find('bndbox')
return {
'label': node.find('name').text,
'top': float(bndbox.find('ymin').text) - 1,
'left': float(bndbox.find('xmin').text) - 1,
'right': float(bndbox.find('xmax').text) - 1,
'bottom': float(bndbox.find('ymax').text) - 1,
}
class VOCEgestor(Egestor):
def expected_labels(self):
return {
'aeroplane': [],
'bicycle': [],
'bird': [],
'boat': [],
'bottle': [],
'bus': [],
'car': [],
'cat': [],
'chair': [],
'cow': [],
'diningtable': [],
'dog': [],
'horse': [],
'motorbike': [],
'person': ['pedestrian'],
'pottedplant': [],
'sheep': [],
'sofa': [],
'train': [],
'tvmonitor': []
}
def egest(self, *, image_detections, root):
image_sets_path = "{}/VOC2012/ImageSets/Main".format(root)
images_path = "{}/VOC2012/JPEGImages".format(root)
annotations_path = "{}/VOC2012/Annotations".format(root)
segmentations_path = "{}/VOC2012/SegmentationObject".format(root)
segmentations_dir_created = False
for to_create in [image_sets_path, images_path, annotations_path]:
os.makedirs(to_create, exist_ok=True)
for image_detection in image_detections:
image = image_detection['image']
image_id = image['id']
src_extension = image['path'].split('.')[-1]
shutil.copyfile(image['path'], "{}/{}.{}".format(images_path, image_id, src_extension))
with open("{}/trainval.txt".format(image_sets_path), 'a') as out_image_index_file:
out_image_index_file.write('{}\n'.format(image_id))
if image['segmented_path'] is not None:
if not segmentations_dir_created:
os.makedirs(segmentations_path)
segmentations_dir_created = True
shutil.copyfile(image['segmented_path'], "{}/{}.png".format(segmentations_path, image_id))
xml_root = ET.Element('annotation')
add_text_node(xml_root, 'filename', "{}.{}".format(image_id, src_extension))
add_text_node(xml_root, 'folder', 'VOC2012')
add_text_node(xml_root, 'segmented', int(segmentations_dir_created))
add_sub_node(xml_root, 'size', {
'depth': 3,
'width': image['width'],
'height': image['height']
})
add_sub_node(xml_root, 'source', {
'annotation': 'Dummy',
'database': 'Dummy',
'image': 'Dummy'
})
for detection in image_detection['detections']:
x_object = add_sub_node(xml_root, 'object', {
'name': detection['label'],
'difficult': 0,
'occluded': 0,
'truncated': 0,
'pose': 'Unspecified'
})
add_sub_node(x_object, 'bndbox', {
'xmin': detection['left'] + 1,
'xmax': detection['right'] + 1,
'ymin': detection['top'] + 1,
'ymax': detection['bottom'] + 1
})
ET.ElementTree(xml_root).write("{}/{}.xml".format(annotations_path, image_id))
def add_sub_node(node, name, kvs):
subnode = ET.SubElement(node, name)
for k, v in kvs.items():
add_text_node(subnode, k, v)
return subnode
def add_text_node(node, name, text):
subnode = ET.SubElement(node, name)
subnode.text = "{}".format(text)
return subnode
| 36.016216 | 115 | 0.535194 |
4a261971638981c03ec8985ddde35748bb14296a | 1,948 | py | Python | app/models/util/join_token.py | bunchiestudios/schedulr | 996ae5c4d51d5df17946f65515adf5fdb2a468ab | [
"Apache-2.0"
] | 2 | 2019-01-20T09:37:40.000Z | 2019-02-03T09:15:50.000Z | app/models/util/join_token.py | bunchiestudios/schedulr | 996ae5c4d51d5df17946f65515adf5fdb2a468ab | [
"Apache-2.0"
] | 19 | 2019-01-10T05:39:13.000Z | 2019-02-06T01:31:45.000Z | app/models/util/join_token.py | bunchiestudios/schedulr | 996ae5c4d51d5df17946f65515adf5fdb2a468ab | [
"Apache-2.0"
] | null | null | null | from typing import Optional
from app import db
from app.models import JoinToken, Team
def team_by_join_token(join_token_str: str) -> Optional[Team]:
"""
Returns the team associated with a given join token.
:param join_token_str: The join token to search over
:return: Returns a Team if the join token exists, or None otherwise
"""
session = db.get_session()
join_token = (
session.query(JoinToken)
.filter(JoinToken.token_str == join_token_str)
.one_or_none()
)
if not join_token:
return None
return join_token.team
def by_team_id(team_id: int) -> Optional[JoinToken]:
"""
Returns existing join token for a given team.
:param team_id: ID of the team to look for a join token of.
:return: Returns a JoinToken object if the team exists and has one, or
None otherwise.
"""
session = db.get_session()
team: Team = session.query(Team).filter(Team.id == team_id).one_or_none()
if not team:
return None
return team.join_tokens[0] if team.join_tokens else None
def add_to_team(team_id: int, join_token_str: str) -> Optional[JoinToken]:
"""
Adds a given join token to the team, replacing any existing join token.
:param team_id: Team to which to add the join_token
:param join_token_str: Join token string to add to the team.
:return: Returns a JoinToken object if the team exists, or None otherwise.
"""
session = db.get_session()
team: Team = session.query(Team).filter(Team.id == team_id).one_or_none()
if not team:
return None
# This gets rid of the tokens and deletes the relationship as well
if team.join_tokens:
for join_token in team.join_tokens:
session.delete(join_token)
join_token = JoinToken(token_str=join_token_str)
team.join_tokens = [join_token]
session.add(join_token)
session.commit()
return join_token
| 29.969231 | 78 | 0.682238 |
4a261a9f24616625acd1c92dd6b66c82c5c8f064 | 3,610 | py | Python | ultimate_hosts_blacklist/test_launcher/defaults/pyfunceble.py | Ultimate-Hosts-Blacklist/test-launcher | 8b120b3ee844918866d7a15d4015ee3664bfee55 | [
"MIT"
] | 2 | 2021-03-11T08:05:16.000Z | 2021-05-03T09:37:09.000Z | ultimate_hosts_blacklist/test_launcher/defaults/pyfunceble.py | Ultimate-Hosts-Blacklist/test-launcher | 8b120b3ee844918866d7a15d4015ee3664bfee55 | [
"MIT"
] | null | null | null | ultimate_hosts_blacklist/test_launcher/defaults/pyfunceble.py | Ultimate-Hosts-Blacklist/test-launcher | 8b120b3ee844918866d7a15d4015ee3664bfee55 | [
"MIT"
] | 1 | 2021-05-03T09:30:24.000Z | 2021-05-03T09:30:24.000Z | """
The test launcher of the Ultimate-Hosts-Blacklist project.
This is the module that will provides all our pyfunceble related settings or
defaults.
Author:
Nissar Chababy, @funilrys, contactTATAfunilrysTODTODcom
License:
::
MIT License
Copyright (c) 2019, 2020, 2021 Ultimate-Hosts-Blacklist
Copyright (c) 2019, 2020, 2021 Nissar Chababy
Copyright (c) 2019, 2020, 2021 Mitchell Krog
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from PyFunceble.cli.continuous_integration.github_actions import GitHubActions
from PyFunceble.cli.continuous_integration.jenkins import Jenkins
LINKS_STABLE: dict = {
"license": {
"link": "https://raw.githubusercontent.com/funilrys/PyFunceble/master/LICENSE",
"destination": "LICENSE_PyFunceble",
},
}
LINKS_DEV: dict = {
"license": {
"link": "https://raw.githubusercontent.com/funilrys/PyFunceble/dev/LICENSE",
"destination": "LICENSE_PyFunceble",
},
}
LINKS: dict = dict(LINKS_DEV)
CONFIGURATION = {
"lookup": {
"timeout": 5.0,
"reputation": False,
"collection": True,
},
"share_logs": False,
"cli_testing": {
"whois_db": True,
"autocontinue": True,
"preload_file": False,
"cooldown_time": 0.09,
"ci": {
"active": Jenkins().guess_all_settings().authorized
or GitHubActions().guess_all_settings().authorized,
"commit_message": "[Autosave] Testing for Ultimate Hosts Blacklist",
"end_commit_message": "[Results] Testing for Ultimate Hosts Blacklist",
"max_exec_minutes": 15,
},
"file_generation": {"hosts": True, "plain": True},
"display_mode": {
"all": False,
"dots": True,
"execution_time": True,
"less": True,
"percentage": True,
"quiet": False,
"simple": False,
"status": "ALL",
},
"testing_mode": {
"availability": True,
"syntax": False,
"reputation": False,
},
"max_workers": None
if not Jenkins().guess_all_settings().authorized
and not GitHubActions().guess_all_settings().authorized
else 1,
},
"collection": {
"push": True,
"url_base": "https://collection.dead-hosts.funilrys.com",
},
"dns": {
"server": ["9.9.9.10", "149.112.112.10", "2620:fe::10"],
"protocol": "UDP",
"follow_server_order": False,
"trust_server": True,
},
}
| 32.818182 | 87 | 0.638504 |
4a261b2840aa333dea5bb87b531702f9ce7598a2 | 639 | py | Python | languages/python/networking_email1.py | Andilyn/learntosolveit | fd15345c74ef543e4e26f4691bf91cb6dac568a4 | [
"BSD-3-Clause"
] | 136 | 2015-03-06T18:11:21.000Z | 2022-03-10T22:31:40.000Z | languages/python/networking_email1.py | Andilyn/learntosolveit | fd15345c74ef543e4e26f4691bf91cb6dac568a4 | [
"BSD-3-Clause"
] | 27 | 2015-01-07T01:38:03.000Z | 2021-12-22T19:20:15.000Z | languages/python/networking_email1.py | Andilyn/learntosolveit | fd15345c74ef543e4e26f4691bf91cb6dac568a4 | [
"BSD-3-Clause"
] | 1,582 | 2015-01-01T20:37:06.000Z | 2022-03-30T12:29:24.000Z | # Import smtplib for the actual sending function
import smtplib
# Import the email modules we'll need
from email.mime.text import MIMEText
# Open a plain text file for reading. For this example, assume that
# the text file contains only ASCII characters.
fp = open('content.txt', 'rb')
# Create a text/plain message
msg = MIMEText(fp.read())
fp.close()
me = '[email protected]'
you = '[email protected]'
msg['Subject'] = 'Hello'
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('smtp.gmail.com')
s.sendmail(me, [you], msg.as_string())
s.quit()
| 25.56 | 68 | 0.716745 |
4a261bb60fd461e3a7fbdf1fb6df238bc12510ab | 785 | py | Python | monitor.py | spring520/fpeg | dac8164d793eb65e41f2a11e425f3f8a9f676864 | [
"RSA-MD"
] | null | null | null | monitor.py | spring520/fpeg | dac8164d793eb65e41f2a11e425f3f8a9f676864 | [
"RSA-MD"
] | null | null | null | monitor.py | spring520/fpeg | dac8164d793eb65e41f2a11e425f3f8a9f676864 | [
"RSA-MD"
] | null | null | null | class Monitor:
"""
Monitor of pipes in a pipeline.
Monitor gathers the data recieved and sended by pipes. When monitor is not waking, do not send data to it.
"""
def __init__(self):
self.data = []
self.logs = []
self.params = []
self.waking = False
def gather(self, name, data, log, params):
if not self.waking:
raise RuntimeError("Monitor is sleeping. Do not send message to the monitor.")
self.data[-1][name] = data
self.logs[-1][name] = log
self.params[-1][name] = params
self.sleep()
def report(self):
return self.data, self.logs
def wake(self):
self.waking = True
def sleep(self):
self.waking = False
def prepare(self):
self.data.append({})
self.logs.append({})
self.params.append({})
| 22.428571 | 108 | 0.626752 |
4a261c39361ebbbd674ab448ede0a56e30ca1731 | 2,144 | py | Python | homeassistant/components/lightwave/light.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 4 | 2021-07-11T09:11:00.000Z | 2022-02-27T14:43:50.000Z | homeassistant/components/lightwave/light.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 277 | 2021-10-04T06:39:33.000Z | 2021-12-28T22:04:17.000Z | homeassistant/components/lightwave/light.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 3 | 2022-01-02T18:49:54.000Z | 2022-01-25T02:03:54.000Z | """Support for LightwaveRF lights."""
from __future__ import annotations
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import LIGHTWAVE_LINK
MAX_BRIGHTNESS = 255
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Find and return LightWave lights."""
if not discovery_info:
return
lights = []
lwlink = hass.data[LIGHTWAVE_LINK]
for device_id, device_config in discovery_info.items():
name = device_config[CONF_NAME]
lights.append(LWRFLight(name, device_id, lwlink))
async_add_entities(lights)
class LWRFLight(LightEntity):
"""Representation of a LightWaveRF light."""
_attr_supported_features = SUPPORT_BRIGHTNESS
_attr_should_poll = False
def __init__(self, name, device_id, lwlink):
"""Initialize LWRFLight entity."""
self._attr_name = name
self._device_id = device_id
self._attr_brightness = MAX_BRIGHTNESS
self._lwlink = lwlink
async def async_turn_on(self, **kwargs):
"""Turn the LightWave light on."""
self._attr_is_on = True
if ATTR_BRIGHTNESS in kwargs:
self._attr_brightness = kwargs[ATTR_BRIGHTNESS]
if self._attr_brightness != MAX_BRIGHTNESS:
self._lwlink.turn_on_with_brightness(
self._device_id, self._attr_name, self._attr_brightness
)
else:
self._lwlink.turn_on_light(self._device_id, self._attr_name)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the LightWave light off."""
self._attr_is_on = False
self._lwlink.turn_off(self._device_id, self._attr_name)
self.async_write_ha_state()
| 29.369863 | 72 | 0.702425 |
4a261dc0a6703bcb051dddce61e369aad167aa6e | 4,812 | py | Python | neural_field.py | brianjsl/ndf_mnist | 936b6616c5aca2d555cf88c134ff92f3ff57c08b | [
"MIT"
] | 1 | 2022-03-15T04:32:40.000Z | 2022-03-15T04:32:40.000Z | neural_field.py | brianjsl/ndf_mnist | 936b6616c5aca2d555cf88c134ff92f3ff57c08b | [
"MIT"
] | null | null | null | neural_field.py | brianjsl/ndf_mnist | 936b6616c5aca2d555cf88c134ff92f3ff57c08b | [
"MIT"
] | null | null | null | import torch
from dataloader_ndf import OverlapMNISTNDF
import torchvision.transforms
from torch.utils.data import DataLoader
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as transforms
import torch.optim as optim
import time
from tqdm import tqdm
from constants import IMG_DIR
import torchvision.models as models
import copy
batch_size = 1
num_epochs = 10
learning_rate = 1e-4
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def initialize_encoder(num_classes, feature_extract, use_pretrained = True):
'''
Initializes a ResNet model with a given number of classes.
params:
@num_classes: defines number of classes to use for the model
@use_pretrained: defines whether or not to use pretrained weights
in training phase. Defaults to True.
'''
#fine-tuned model
model_ft = models.resnet18(pretrained=True)
num_ftrs = model_ft.fc.in_features
model_ft.fc = nn.Linear(num_ftrs, 128)
model_ft.conv1 = nn.Conv2d(1, 64, (7,7), (2,2), (3,3), bias = False)
model_ft.maxpool = nn.Identity()
return model_ft
class NeuralField(nn.Module):
def __init__(self, encoder):
super(NeuralField, self).__init__()
self.linear_relu_stack = nn.Sequential(
nn.Linear(130, 512),
nn.ReLU(),
nn.Linear(512, 512),
nn.ReLU(),
nn.Linear(512,1),
nn.Sigmoid()
)
self.encoder = encoder
def forward(self, x):
z = self.encoder(x[0])
coords = x[1].view(batch_size, 2)
input = torch.cat((z,coords), 1)
intensity = self.linear_relu_stack(input)
return intensity
def train_model(model, dataloaders, criterion, optimizer, num_epochs):
'''
Trains the model for given number of epochs.
params:
@ model: model to train
@ dataloaders: dictionary of dataloaders
@ criterion: loss function/criterion
@ optimizer: optimizer used
@ num_epochs: number of epochs to train for
'''
since = time.time()
#weights of best model
best_model_wts = copy.deepcopy(model.state_dict())
for epoch in range(num_epochs):
print('Epoch {}/{}'.format(epoch+31, num_epochs+30))
print('_'*10)
# Each epoch has a training and validation phase
for phase in ['train', 'val']:
if phase == 'train':
model.train()
else:
model.eval()
#keep track of losses and corrects
running_loss = 0.0
#Iterate over data
for inputs, labels in tqdm(dataloaders[phase]):
labels = labels.to(device).view(batch_size,-1)
#zero the parameter gradients
optimizer.zero_grad()
#forward and track history if train
with torch.set_grad_enabled(phase == 'train'):
outputs = model((inputs[0].to(device), inputs[1].to(device)))
loss = criterion(outputs, labels.float())
if phase == 'train':
loss.backward()
optimizer.step()
#statistics to keep track of
running_loss += loss.item()
epoch_loss = running_loss
print('{} Loss: {:.4f}'.format(phase, epoch_loss))
torch.save(model, './checkpoints/chkpt_{}.pt'.format(epoch+30))
print()
time_elapsed = time.time() - since
print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
# load best model weights
model.load_state_dict(best_model_wts)
return model
if __name__ == '__main__':
encoder = initialize_encoder(128, False, True)
model = torch.load('./checkpoints/chkpt_39.pt').to(device)
print('Initializing Datasets and Dataloaders...')
data_transforms = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize([0.5],[0.5])
])
#create training and validation datasets
image_datasets = {x: OverlapMNISTNDF(IMG_DIR, data_transforms, x) for x in ['train', 'val']}
#create training and validation dataloaders
dataloaders_dict = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size = batch_size,
shuffle = True, num_workers = 2) for x in ['train', 'val']}
print("Done Initializing Data.")
#Initilaize Optimizer
optimizer_ft = optim.Adam(model.parameters(), lr = learning_rate)
criterion = nn.MSELoss()
model = train_model(model, dataloaders_dict, criterion, optimizer_ft,
num_epochs=num_epochs)
torch.save(model, 'neural_field.pt')
| 32.513514 | 98 | 0.612219 |
4a261e3d6ad5c2780bf7610a256c7bcae8e89e13 | 1,156 | py | Python | scatter_squares.py | CowryGolden/matplotlib-test | 1ecaf7f2bbb3ef77f81958dc4272dd528c20a7f3 | [
"Apache-2.0"
] | null | null | null | scatter_squares.py | CowryGolden/matplotlib-test | 1ecaf7f2bbb3ef77f81958dc4272dd528c20a7f3 | [
"Apache-2.0"
] | null | null | null | scatter_squares.py | CowryGolden/matplotlib-test | 1ecaf7f2bbb3ef77f81958dc4272dd528c20a7f3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
r'''
绘制自然数平方数列的简单散点图
'''
import matplotlib.pyplot as plt
# x_values = [1, 2, 3, 4, 5]
# y_values = [1, 4, 9, 16, 25]
x_values = list(range(1, 1001))
y_values = [x ** 2 for x in x_values]
# plt.scatter(x_values, y_values, s=40) # s表示绘制点的尺寸
# plt.scatter(x_values, y_values, edgecolor='none', s=40) # s表示绘制点的尺寸,散点默认为蓝色点和黑色轮廓;但绘制很多点时,黑色轮廓可能会粘连在一起。要删除数据点的轮廓,使用属性:edgecolor='none'
# plt.scatter(x_values, y_values, c='red', edgecolor='none', s=40) # c表示数据点的颜色
# plt.scatter(x_values, y_values, c=(0, 0, 0.8), edgecolor='none', s=40) # c可以用RGB表示颜色,(红,绿,蓝)三个分量使用元组表示,其中包含三个0~1之间的小数【值越接近0,指定的颜色越深,值越接近1,指定的颜色越浅】;(0, 0, 0.8)为淡蓝色表示
plt.scatter(x_values, y_values, c=y_values, cmap=plt.cm.Blues, edgecolor='none', s=40) # 使用颜色映射,从开始颜色渐变到结束颜色,从而突出数据规律;如:数值小颜色浅,数值大颜色深
# 设置图表标题,并给坐标轴加上标签
plt.title("Square Numbers", fontsize=24)
plt.xlabel("Value", fontsize=14)
plt.ylabel("Square of Value", fontsize=14)
# 设置刻度标记的大小
plt.tick_params(axis='both', which='major', labelsize=14)
# 设置每个坐标的取值范围
plt.axis([0, 1100, 0, 1100000])
# 保存图表
plt.savefig('images/squares_plot.png', bbox_inches='tight')
# 显示图形
plt.show() | 39.862069 | 169 | 0.704152 |
4a261ea55672a663569b72325a91eebecbb3429a | 2,594 | py | Python | bloom/ll/models/application.py | A5rocks/bloom | c5426bf1a8070187736fca8fea3a487cafb76bda | [
"Apache-2.0"
] | 7 | 2021-05-01T03:55:54.000Z | 2022-02-02T02:02:16.000Z | bloom/ll/models/application.py | A5rocks/bloom | c5426bf1a8070187736fca8fea3a487cafb76bda | [
"Apache-2.0"
] | 13 | 2021-05-05T01:10:43.000Z | 2021-09-02T00:21:52.000Z | bloom/ll/models/application.py | A5rocks/bloom | c5426bf1a8070187736fca8fea3a487cafb76bda | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
import enum
import typing
import attr
from bloom.ll.models.base import UNKNOWN, Snowflake, Unknownish
from bloom.ll.models.teams import Team
# docs in this module are copied from the Discord Documentation
@attr.frozen(kw_only=True)
class Application:
#: the id of the app
id: Snowflake
#: the name of the app
name: str
#: the icon hash of the app
icon: typing.Optional[str]
#: the description of the app
description: str
#: when false only app owner can join the app's bot to guilds
bot_public: bool
#: when true the app's bot will only join upon completion of the full
#: oauth2 code grant flow
bot_require_code_grant: bool
#: if this application is a game sold on Discord, this field will be the
#: summary field for the store page of its primary sku
summary: str
#: the hex encoded key for verification in interactions and the GameSDK's
#: GetTicket
verify_key: str
#: if the application belongs to a team, this will be a list of the
#: members of that team
team: typing.Optional[Team]
#: an array of rpc origin urls, if rpc is enabled
rpc_origins: Unknownish[typing.List[str]] = UNKNOWN
#: the url of the app's terms of service
terms_of_service_url: Unknownish[str] = UNKNOWN
#: the url of the app's privacy policy
privacy_policy_url: Unknownish[str] = UNKNOWN
#: partial user object containing info on the owner of the application
owner: Unknownish[typing.Dict[str, typing.Any]] = UNKNOWN
#: if this application is a game sold on Discord, this field will be the
#: guild to which it has been linked
guild_id: Unknownish[Snowflake] = UNKNOWN
#: if this application is a game sold on Discord, this field will be the
#: id of the "Game SKU" that is created, if exists
primary_sku_id: Unknownish[Snowflake] = UNKNOWN
#: if this application is a game sold on Discord, this field will be the
#: URL slug that links to the store page
slug: Unknownish[str] = UNKNOWN
#: the application's default rich presence invite cover image hash
cover_image: Unknownish[str] = UNKNOWN
#: the application's public flags
flags: Unknownish[int] = UNKNOWN
class ApplicationFlags(enum.IntFlag):
GATEWAY_PRESENCE = 1 << 12
GATEWAY_PRESENCE_LIMITED = 1 << 13
GATEWAY_GUILD_MEMBERS = 1 << 14
GATEWAY_GUILD_MEMBERS_LIMITED = 1 << 15
VERIFICATION_PENDING_GUILD_LIMIT = 1 << 16
EMBEDDED = 1 << 17
GATEWAY_MESSAGE_CONTENT = 1 << 18
GATEWAY_MESSAGE_CONTENT_LIMITED = 1 << 19
| 37.057143 | 77 | 0.708558 |
4a26205b724fe9926e8bd6eba65649f1bf7c9268 | 381 | py | Python | edd/wsgi.py | TeselaGen/jbei-edd | 92792fb30bbd504143b2f75bf08d05b141a7ef6f | [
"BSD-3-Clause-LBNL"
] | null | null | null | edd/wsgi.py | TeselaGen/jbei-edd | 92792fb30bbd504143b2f75bf08d05b141a7ef6f | [
"BSD-3-Clause-LBNL"
] | null | null | null | edd/wsgi.py | TeselaGen/jbei-edd | 92792fb30bbd504143b2f75bf08d05b141a7ef6f | [
"BSD-3-Clause-LBNL"
] | null | null | null | """
WSGI config for edd project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "edd.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| 25.4 | 78 | 0.784777 |
4a26208847cd2086cb93fe3ed53665f0dece96d8 | 15,760 | py | Python | bokeh/util/compiler.py | kinghows/bokeh | aeb7abc1dbe2b67ce0f4422838a96fb8362c52c7 | [
"BSD-3-Clause"
] | null | null | null | bokeh/util/compiler.py | kinghows/bokeh | aeb7abc1dbe2b67ce0f4422838a96fb8362c52c7 | [
"BSD-3-Clause"
] | null | null | null | bokeh/util/compiler.py | kinghows/bokeh | aeb7abc1dbe2b67ce0f4422838a96fb8362c52c7 | [
"BSD-3-Clause"
] | null | null | null | ''' Provide functions and classes to help with various JS and CSS compilation.
'''
from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
import io
import re
import os
import sys
import six
import json
import hashlib
from os.path import dirname, join, abspath, exists, isabs
from subprocess import Popen, PIPE
from ..model import Model
from ..settings import settings
from .string import snakify
_plugin_umd = \
"""\
(function(root, factory) {
// if(typeof exports === 'object' && typeof module === 'object')
// factory(require("Bokeh"));
// else if(typeof define === 'function' && define.amd)
// define(["Bokeh"], factory);
// else if(typeof exports === 'object')
// factory(require("Bokeh"));
// else
factory(root["Bokeh"]);
})(this, function(Bokeh) {
var define;
return %(content)s;
});
"""
# XXX: this is the same as bokehjs/src/js/plugin-prelude.js
_plugin_prelude = \
"""\
(function outer(modules, entry) {
if (Bokeh != null) {
return Bokeh.register_plugin(modules, {}, entry);
} else {
throw new Error("Cannot find Bokeh. You have to load it prior to loading plugins.");
}
})
"""
_plugin_template = \
"""\
%(prelude)s\
({
"custom/main": function(require, module, exports) {
var models = {
%(exports)s
};
require("base").register_models(models);
module.exports = models;
},
%(modules)s
}, "custom/main");
"""
_style_template = \
"""\
(function() {
var head = document.getElementsByTagName('head')[0];
var style = document.createElement('style');
style.type = 'text/css';
var css = %(css)s;
if (style.styleSheet) {
style.styleSheet.cssText = css;
} else {
style.appendChild(document.createTextNode(css));
}
head.appendChild(style);
}());
"""
_export_template = \
""""%(name)s": require("%(module)s").%(name)s"""
_module_template = \
""""%(module)s": function(require, module, exports) {\n%(source)s\n}"""
class AttrDict(dict):
''' Provide a dict subclass that supports access by named attributes.
'''
def __getattr__(self, key):
return self[key]
class CompilationError(RuntimeError):
''' A ``RuntimeError`` subclass for reporting JS compilation errors.
'''
def __init__(self, error):
super(CompilationError, self).__init__()
self.line = error.get("line")
self.column = error.get("column")
self.message = error.get("message")
self.text = error.get("text")
self.annotated = error.get("annotated")
def __str__(self):
return self.text
bokehjs_dir = settings.bokehjsdir()
nodejs_min_version = (6, 10, 0)
def _detect_nodejs():
if settings.nodejs_path() is not None:
nodejs_paths = [settings.nodejs_path()]
else:
nodejs_paths = ["nodejs", "node"]
for nodejs_path in nodejs_paths:
try:
proc = Popen([nodejs_path, "--version"], stdout=PIPE, stderr=PIPE)
(stdout, _) = proc.communicate()
except OSError:
continue
if proc.returncode != 0:
continue
match = re.match(r"^v(\d+)\.(\d+)\.(\d+).*$", stdout.decode("utf-8"))
if match is not None:
version = tuple(int(v) for v in match.groups())
if version >= nodejs_min_version:
return nodejs_path
# if we've reached here, no valid version was found
version = ".".join(map(str, nodejs_min_version))
raise RuntimeError('node.js v%s or higher is needed to allow compilation of custom models ' % version +
'("conda install nodejs" or follow https://nodejs.org/en/download/)')
_nodejs = None
_npmjs = None
def _nodejs_path():
global _nodejs
if _nodejs is None:
_nodejs = _detect_nodejs()
return _nodejs
def _npmjs_path():
global _npmjs
if _npmjs is None:
_npmjs = join(dirname(_nodejs_path()), "npm")
if sys.platform == "win32":
_npmjs += '.cmd'
return _npmjs
def _crlf_cr_2_lf(s):
return re.sub(r"\\r\\n|\\r|\\n", r"\\n", s)
def _run(app, argv, input=None):
proc = Popen([app] + argv, stdout=PIPE, stderr=PIPE, stdin=PIPE)
(stdout, errout) = proc.communicate(input=None if input is None else json.dumps(input).encode())
if proc.returncode != 0:
raise RuntimeError(errout)
else:
return _crlf_cr_2_lf(stdout.decode('utf-8'))
def _run_nodejs(argv, input=None):
return _run(_nodejs_path(), argv, input)
def _run_npmjs(argv, input=None):
return _run(_npmjs_path(), argv, input)
def _version(run_app):
try:
version = run_app(["--version"])
except RuntimeError:
return None
else:
return version.strip()
def nodejs_version():
return _version(_run_nodejs)
def npmjs_version():
return _version(_run_npmjs)
def nodejs_compile(code, lang="javascript", file=None):
compilejs_script = join(bokehjs_dir, "js", "compiler.js")
output = _run_nodejs([compilejs_script], dict(code=code, lang=lang, file=file))
return AttrDict(json.loads(output))
class Implementation(object):
''' Base class for representing Bokeh custom model implementations.
'''
file = None
class Inline(Implementation):
''' Base class for representing Bokeh custom model implementations that may
be given as inline code in some language.
Args:
code (str) :
The source code for the implementation
file (str, optional)
A file path to a file containing the source text (default: None)
'''
def __init__(self, code, file=None):
self.code = code
self.file = file
class CoffeeScript(Inline):
''' An implementation for a Bokeh custom model in CoffeeScript.
Example:
.. code-block:: python
class MyExt(Model):
__implementation__ = CoffeeScript(""" <CoffeeScript code> """)
Note that ``CoffeeScript`` is the default implementation language for
custom model implementations. The following is equivalent to example above:
.. code-block:: python
class MyExt(Model):
__implementation__ == """ <some coffeescript code> """
'''
@property
def lang(self):
return "coffeescript"
class TypeScript(Inline):
''' An implementation for a Bokeh custom model in TypeScript
Example:
.. code-block:: python
class MyExt(Model):
__implementation__ = TypeScript(""" <TypeScript code> """)
'''
@property
def lang(self):
return "typescript"
class JavaScript(Inline):
''' An implementation for a Bokeh custom model in JavaScript
Example:
.. code-block:: python
class MyExt(Model):
__implementation__ = Javacript(""" <JavaScript code> """)
'''
@property
def lang(self):
return "javascript"
class Less(Inline):
''' An implementation of a Less CSS style sheet.
'''
@property
def lang(self):
return "less"
class FromFile(Implementation):
''' A custom model implementation read from a separate source file.
Args:
path (str) :
The path to the file containing the extension source code
'''
def __init__(self, path):
with io.open(path, encoding="utf-8") as f:
self.code = f.read()
self.file = path
@property
def lang(self):
if self.file.endswith(".coffee"):
return "coffeescript"
if self.file.endswith(".ts"):
return "typescript"
if self.file.endswith(".js"):
return "javascript"
if self.file.endswith((".css", ".less")):
return "less"
#: recognized extensions that can be compiled
exts = (".coffee", ".ts", ".js", ".css", ".less")
class CustomModel(object):
''' Represent a custom (user-defined) Bokeh model.
'''
def __init__(self, cls):
self.cls = cls
@property
def name(self):
return self.cls.__name__
@property
def full_name(self):
name = self.cls.__module__ + "." + self.name
return name.replace("__main__.", "")
@property
def file(self):
module = sys.modules[self.cls.__module__]
if hasattr(module, "__file__"):
return abspath(module.__file__)
else:
return None
@property
def path(self):
path = getattr(self.cls, "__base_path__", None)
if path is not None:
return path
elif self.file is not None:
return dirname(self.file)
else:
return os.getcwd()
@property
def implementation(self):
impl = self.cls.__implementation__
if isinstance(impl, six.string_types):
if "\n" not in impl and impl.endswith(exts):
impl = FromFile(impl if isabs(impl) else join(self.path, impl))
else:
impl = CoffeeScript(impl)
if isinstance(impl, Inline) and impl.file is None:
impl = impl.__class__(impl.code, (self.file or "<string>") + ":" + self.name)
return impl
@property
def dependencies(self):
return getattr(self.cls, "__dependencies__", {})
@property
def module(self):
return "custom/%s" % snakify(self.full_name)
def _model_cache_no_op(model, implementation):
"""Return cached compiled implementation"""
return None
_CACHING_IMPLEMENTATION = _model_cache_no_op
def get_cache_hook():
'''Returns the current cache hook used to look up the compiled
code given the CustomModel and Implementation'''
return _CACHING_IMPLEMENTATION
def set_cache_hook(hook):
'''Sets a compiled model cache hook used to look up the compiled
code given the CustomModel and Implementation'''
global _CACHING_IMPLEMENTATION
_CACHING_IMPLEMENTATION = hook
def _get_custom_models(models):
"""Returns CustomModels for models with a custom `__implementation__`"""
custom_models = {}
for cls in models:
impl = getattr(cls, "__implementation__", None)
if impl is not None:
model = CustomModel(cls)
custom_models[model.full_name] = model
if not custom_models:
return None
return custom_models
def _compile_models(custom_models):
"""Returns the compiled implementation of supplied `models`. """
ordered_models = sorted(custom_models.values(), key=lambda model: model.full_name)
custom_impls = {}
dependencies = []
for model in ordered_models:
dependencies.extend(list(model.dependencies.items()))
if dependencies:
dependencies = sorted(dependencies, key=lambda name_version: name_version[0])
_run_npmjs(["install", "--no-progress"] + [ name + "@" + version for (name, version) in dependencies ])
for model in ordered_models:
impl = model.implementation
compiled = _CACHING_IMPLEMENTATION(model, impl)
if compiled is None:
compiled = nodejs_compile(impl.code, lang=impl.lang, file=impl.file)
if "error" in compiled:
raise CompilationError(compiled.error)
custom_impls[model.full_name] = compiled
return custom_impls
def bundle_models(models):
"""Create a bundle of `models`. """
custom_models = _get_custom_models(models)
if custom_models is None:
return
exports = []
modules = []
def read_json(name):
with io.open(join(bokehjs_dir, "js", name + ".json"), encoding="utf-8") as f:
return json.loads(f.read())
bundles = ["bokeh", "bokeh-api", "bokeh-widgets", "bokeh-tables", "bokeh-gl"]
known_modules = set(sum([ read_json(name) for name in bundles ], []))
custom_impls = _compile_models(custom_models)
extra_modules = {}
def resolve_modules(to_resolve, root):
resolved = {}
for module in to_resolve:
if module.startswith(("./", "../")):
def mkpath(module, ext=""):
return abspath(join(root, *module.split("/")) + ext)
if module.endswith(exts):
path = mkpath(module)
if not exists(path):
raise RuntimeError("no such module: %s" % module)
else:
for ext in exts:
path = mkpath(module, ext)
if exists(path):
break
else:
raise RuntimeError("no such module: %s" % module)
impl = FromFile(path)
compiled = nodejs_compile(impl.code, lang=impl.lang, file=impl.file)
if "error" in compiled:
raise CompilationError(compiled.error)
if impl.lang == "less":
code = _style_template % dict(css=json.dumps(compiled.code))
deps = []
else:
code = compiled.code
deps = compiled.deps
sig = hashlib.sha256(code.encode('utf-8')).hexdigest()
resolved[module] = sig
deps_map = resolve_deps(deps, dirname(path))
if sig not in extra_modules:
extra_modules[sig] = True
modules.append((sig, code, deps_map))
else:
raise RuntimeError("no such module: %s" % module)
return resolved
def resolve_deps(deps, root):
custom_modules = set(model.module for model in custom_models.values())
missing = set(deps) - known_modules - custom_modules
return resolve_modules(missing, root)
for model in custom_models.values():
compiled = custom_impls[model.full_name]
deps_map = resolve_deps(compiled.deps, model.path)
exports.append((model.name, model.module))
modules.append((model.module, compiled.code, deps_map))
# sort everything by module name
exports = sorted(exports, key=lambda spec: spec[1])
modules = sorted(modules, key=lambda spec: spec[0])
for i, (module, code, deps) in enumerate(modules):
for name, ref in deps.items():
code = code.replace("""require("%s")""" % name, """require("%s")""" % ref)
code = code.replace("""require('%s')""" % name, """require('%s')""" % ref)
modules[i] = (module, code)
sep = ",\n"
exports = sep.join(_export_template % dict(name=name, module=module) for (name, module) in exports)
modules = sep.join(_module_template % dict(module=module, source=code) for (module, code) in modules)
content = _plugin_template % dict(prelude=_plugin_prelude, exports=exports, modules=modules)
return _plugin_umd % dict(content=content)
def calc_cache_key():
''' Generate a key to cache a custom extension implementation with.
There is no metadata other than the Model classes, so this is the only
base to generate a cache key.
We build the model keys from the list of ``model.full_name``. This is
not ideal but possibly a better solution can be found found later.
'''
models = Model.model_class_reverse_map.values()
custom_model_names = ""
for cls in models:
impl = getattr(cls, "__implementation__", None)
if impl is not None:
model = CustomModel(cls)
custom_model_names += model.full_name
key = hashlib.sha256(custom_model_names.encode('utf-8')).hexdigest()
return key
_bundle_cache = {}
def bundle_all_models():
key = calc_cache_key()
bundle = _bundle_cache.get(key, None)
if bundle is None:
_bundle_cache[key] = bundle = bundle_models(Model.model_class_reverse_map.values()) or ""
return bundle
| 28.602541 | 111 | 0.614467 |
4a2621943873c3202b949b62a414d552f4d74978 | 1,597 | py | Python | reinvent-2019/rhythm-cloud/lambda/Greengrass_startSong/boto3/compat.py | kienpham2000/aws-builders-fair-projects | 6c4075c0945a6318b217355a6fc663e35ffb9dba | [
"Apache-2.0"
] | 1,738 | 2017-09-21T10:59:12.000Z | 2022-03-31T21:05:46.000Z | reinvent-2019/rhythm-cloud/lambda/Greengrass_startSong/boto3/compat.py | kienpham2000/aws-builders-fair-projects | 6c4075c0945a6318b217355a6fc663e35ffb9dba | [
"Apache-2.0"
] | 427 | 2017-09-29T22:54:36.000Z | 2022-02-15T19:26:50.000Z | reinvent-2019/rhythm-cloud/lambda/Greengrass_startSong/boto3/compat.py | kienpham2000/aws-builders-fair-projects | 6c4075c0945a6318b217355a6fc663e35ffb9dba | [
"Apache-2.0"
] | 671 | 2017-09-21T08:04:01.000Z | 2022-03-29T14:30:07.000Z | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import sys
import os
import errno
import socket
from botocore.vendored import six
if six.PY3:
# In python3, socket.error is OSError, which is too general
# for what we want (i.e FileNotFoundError is a subclass of OSError).
# In py3 all the socket related errors are in a newly created
# ConnectionError
SOCKET_ERROR = ConnectionError
else:
SOCKET_ERROR = socket.error
if six.PY3:
import collections.abc as collections_abc
else:
import collections as collections_abc
if sys.platform.startswith('win'):
def rename_file(current_filename, new_filename):
try:
os.remove(new_filename)
except OSError as e:
if not e.errno == errno.ENOENT:
# We only want to a ignore trying to remove
# a file that does not exist. If it fails
# for any other reason we should be propagating
# that exception.
raise
os.rename(current_filename, new_filename)
else:
rename_file = os.rename
| 32.591837 | 73 | 0.693175 |
4a2621a1d2b14a1a8061e23944cb15c113b03b36 | 3,949 | py | Python | official/vision/gan/megengine_mimicry/metrics/fid/fid_utils.py | pepperonibo/Models | abc32993529e70cb019ab5ee2f1022ac01ba2398 | [
"Apache-2.0"
] | 294 | 2020-03-23T05:55:04.000Z | 2022-03-31T01:31:17.000Z | official/vision/gan/megengine_mimicry/metrics/fid/fid_utils.py | pepperonibo/Models | abc32993529e70cb019ab5ee2f1022ac01ba2398 | [
"Apache-2.0"
] | 83 | 2020-03-24T12:06:25.000Z | 2021-12-13T10:43:26.000Z | official/vision/gan/megengine_mimicry/metrics/fid/fid_utils.py | pepperonibo/Models | abc32993529e70cb019ab5ee2f1022ac01ba2398 | [
"Apache-2.0"
] | 103 | 2020-03-24T07:31:18.000Z | 2022-03-23T01:46:18.000Z | # Copyright (c) 2020 Kwot Sin Lee
# This code is licensed under MIT license
# (https://github.com/kwotsin/mimicry/blob/master/LICENSE)
# ------------------------------------------------------------------------------
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# This file has been modified by Megvii ("Megvii Modifications").
# All Megvii Modifications are Copyright (C) 2014-2019 Megvii Inc. All rights reserved.
# ------------------------------------------------------------------------------
"""
Helper functions for calculating FID as adopted from the official FID code:
https://github.com/kwotsin/dissertation/blob/master/eval/TTUR/fid.py
"""
import numpy as np
from scipy import linalg
from ..inception_model import inception_utils
def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
"""
Numpy implementation of the Frechet Distance.
The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
and X_2 ~ N(mu_2, C_2) is
d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).
Stable version by Dougal J. Sutherland.
Args:
mu1 : Numpy array containing the activations of the pool_3 layer of the
inception net ( like returned by the function 'get_predictions')
for generated samples.
mu2: The sample mean over activations of the pool_3 layer, precalcualted
on an representive data set.
sigma1 (ndarray): The covariance matrix over activations of the pool_3 layer for
generated samples.
sigma2: The covariance matrix over activations of the pool_3 layer,
precalcualted on an representive data set.
Returns:
np.float64: The Frechet Distance.
"""
if mu1.shape != mu2.shape or sigma1.shape != sigma2.shape:
raise ValueError(
"(mu1, sigma1) should have exactly the same shape as (mu2, sigma2)."
)
mu1 = np.atleast_1d(mu1)
mu2 = np.atleast_1d(mu2)
sigma1 = np.atleast_2d(sigma1)
sigma2 = np.atleast_2d(sigma2)
diff = mu1 - mu2
# Product might be almost singular
covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
if not np.isfinite(covmean).all():
print(
"WARNING: fid calculation produces singular product; adding {} to diagonal of cov estimates"
.format(eps))
offset = np.eye(sigma1.shape[0]) * eps
covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
# Numerical error might give slight imaginary component
if np.iscomplexobj(covmean):
if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
m = np.max(np.abs(covmean.imag))
raise ValueError("Imaginary component {}".format(m))
covmean = covmean.real
tr_covmean = np.trace(covmean)
return diff.dot(diff) + np.trace(sigma1) + np.trace(
sigma2) - 2 * tr_covmean
def calculate_activation_statistics(images, sess, batch_size=50, verbose=True):
"""
Calculation of the statistics used by the FID.
Args:
images (ndarray): Numpy array of shape (N, H, W, 3) and values in
the range [0, 255].
sess (Session): TensorFlow session object.
batch_size (int): Batch size for inference.
verbose (bool): If True, prints out logging information.
Returns:
ndarray: Mean of inception features from samples.
ndarray: Covariance of inception features from samples.
"""
act = inception_utils.get_activations(images, sess, batch_size, verbose)
mu = np.mean(act, axis=0)
sigma = np.cov(act, rowvar=False)
return mu, sigma
| 37.609524 | 104 | 0.646493 |
4a262337fd6f86f8f696ba205e79abf77c753d9c | 910 | py | Python | ezerase/source/ataeraser.py | ebangin127/nstools | 2a0bb4e6fd3688afd74afd4c7d69eeb46f096a99 | [
"MIT"
] | 15 | 2016-02-12T14:55:53.000Z | 2021-08-17T09:44:12.000Z | ezerase/source/ataeraser.py | ebangin127/nstools | 2a0bb4e6fd3688afd74afd4c7d69eeb46f096a99 | [
"MIT"
] | 1 | 2020-10-28T12:19:56.000Z | 2020-10-28T12:19:56.000Z | ezerase/source/ataeraser.py | ebangin127/nstools | 2a0bb4e6fd3688afd74afd4c7d69eeb46f096a99 | [
"MIT"
] | 7 | 2016-08-21T23:57:47.000Z | 2022-02-14T03:26:21.000Z | import passwordSetter
import atahdparmeraser
import passwordCleaner
import defroster
import confirmView
import completeView
import eraseType
import freezeType
class ATAEraser:
def erase(self, selected, model, serial):
erasestate = eraseType.EraseType.failed
passwordcleared = False
laststate = defroster.Defroster().defrost(selected)
if selected == freezeType.FreezeType.defrosting:
return True
try:
passwordset = passwordSetter.PasswordSetter().set(selected, laststate)
confirm = confirmView.ConfirmView(passwordset, model, serial).agree()
erasestate = atahdparmeraser.ATAHDParmEraser().erase(selected, confirm)
passwordcleared = passwordCleaner.PasswordCleaner().clean(selected, erasestate)
finally:
completeView.CompleteView(erasestate, model, serial)
return passwordcleared
| 37.916667 | 91 | 0.717582 |
4a2623575dd03c724745511d6e0aa64047877828 | 2,039 | py | Python | banner/tests/__init__.py | praekelt/jmbo-banner | f8eed0f28b989a8d5d305e95f6e6661b328cad0b | [
"BSD-3-Clause"
] | null | null | null | banner/tests/__init__.py | praekelt/jmbo-banner | f8eed0f28b989a8d5d305e95f6e6661b328cad0b | [
"BSD-3-Clause"
] | 4 | 2017-10-30T15:00:42.000Z | 2017-11-09T09:11:32.000Z | banner/tests/__init__.py | praekelt/jmbo-banner | f8eed0f28b989a8d5d305e95f6e6661b328cad0b | [
"BSD-3-Clause"
] | null | null | null | # import os
#
# from django.test import TestCase as BaseTestCase
# from django.test.client import Client as BaseClient, RequestFactory
# from django.contrib.auth.models import User
# from django.template import RequestContext, loader
# from django.core.files.base import ContentFile
#
# from banner.models import ImageBanner
#
# RES_DIR = os.path.join(os.path.dirname(__file__), "res")
# IMAGE_PATH = os.path.join(RES_DIR, "image.jpg")
#
#
# def set_image(obj):
# obj.image.save(
# os.path.basename(IMAGE_PATH),
# ContentFile(open(IMAGE_PATH, "rb").read())
# )
#
#
# class TestCase(BaseTestCase):
#
# @classmethod
# def setUpClass(cls):
# cls.request = RequestFactory()
# cls.client = BaseClient()
#
# # Editor
# cls.editor, dc = User.objects.get_or_create(
# username='editor',
# email='[email protected]'
# )
# cls.editor.set_password("password")
# cls.editor.save()
#
# # Image banner
# obj, dc = ImageBanner.objects.get_or_create(
# title='ImageBanner',
# owner=cls.editor, state='published',
# )
# obj.sites = [1]
# set_image(obj)
# obj.save()
# cls.imagebanner = obj
#
# def test_imagebanner_detail(self):
# response = self.client.get(self.imagebanner.get_absolute_url())
# self.assertEqual(response.status_code, 200)
# self.failUnless(
# """<a href="" rel="nofollow"><img src="" alt="ImageBanner" \
# alt="ImageBanner" /></a>""" in response.content
# )
#
# def test_imagebanner_list_item(self):
# t = loader.get_template(
# "banner/inclusion_tags/imagebanner_list_item.html"
# )
# context = RequestContext(self.request)
# context["object"] = self.imagebanner
# html = t.render(context)
# self.failUnless(
# """<a href="" rel="nofollow"><img src="" alt="ImageBanner" \
# alt="ImageBanner" /></a>""" in html
# )
| 30.893939 | 74 | 0.594899 |
4a26238a530d846f51771f92375edca99471cda4 | 318 | py | Python | birp/__version__.py | evtn/birp | fe54669f296ca99eb8cc5296ab3ec4896ad8cfc0 | [
"MIT"
] | 21 | 2021-10-09T07:30:12.000Z | 2022-01-04T12:15:14.000Z | birp/__version__.py | evtn/birp | fe54669f296ca99eb8cc5296ab3ec4896ad8cfc0 | [
"MIT"
] | 1 | 2021-10-18T10:49:59.000Z | 2021-10-18T10:49:59.000Z | birp/__version__.py | evtn/birp | fe54669f296ca99eb8cc5296ab3ec4896ad8cfc0 | [
"MIT"
] | null | null | null | """
__version__.py
~~~~~~~~~~~~~~
Information about the current version of birp package.
"""
__title__ = "birp"
__description__ = "birp — большой русский питон (BIg Russian Python)"
__version__ = "0.1.8"
__author__ = "evtn"
__author_email__ = "[email protected]"
__license__ = "MIT"
__url__ = "https://github.com/evtn/birp"
| 21.2 | 69 | 0.691824 |
4a2624270e9eae9cd68d80eb4a7df6143d859eae | 342 | py | Python | fluidlab/_version.py | fluiddyn/fluidlab | 77e7ae3129788089814e38eba93802026a07fcf6 | [
"CECILL-B"
] | 4 | 2020-01-19T04:12:12.000Z | 2021-11-09T08:30:25.000Z | fluidlab/_version.py | fluiddyn/fluidlab | 77e7ae3129788089814e38eba93802026a07fcf6 | [
"CECILL-B"
] | null | null | null | fluidlab/_version.py | fluiddyn/fluidlab | 77e7ae3129788089814e38eba93802026a07fcf6 | [
"CECILL-B"
] | null | null | null | """
Module where the version is written.
It is executed in setup.py and imported in fluiddyn/__init__.py.
See:
http://en.wikipedia.org/wiki/Software_versioning
http://legacy.python.org/dev/peps/pep-0386/
'a' or 'alpha' means alpha version (internal testing),
'b' or 'beta' means beta version (external testing).
"""
__version__ = "0.1.0"
| 22.8 | 64 | 0.733918 |
4a26243c1ad4e67759aa8fdd4e35b25d250e8df8 | 668 | py | Python | beautifulsoup.py | Smartproxy/BeautifulSoup | f4a1d91cb3c018a2eb7fa5e40a84d4c36c7b6350 | [
"MIT"
] | 6 | 2019-07-09T15:30:45.000Z | 2020-12-31T18:29:41.000Z | beautifulsoup/beautifulsoup.py | Kiofo/Smartproxy | 29685fa0f09e03597bf6474d3ba3f452f5143ecf | [
"MIT"
] | null | null | null | beautifulsoup/beautifulsoup.py | Kiofo/Smartproxy | 29685fa0f09e03597bf6474d3ba3f452f5143ecf | [
"MIT"
] | 2 | 2020-11-24T17:08:19.000Z | 2021-07-15T09:52:25.000Z | import requests
from bs4 import BeautifulSoup
url = 'https://www.whatismyip.com/'
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36'}
proxies = {'https': 'https://gate.smartproxy.com:7000'} # Your desired endpoint within punctuation marks ('')
auth = requests.auth.HTTPProxyAuth('username','password') # Your username and password for proxy authentication
soup = BeautifulSoup(requests.get(url, headers=headers, proxies=proxies, auth=auth).text, 'lxml')
info_box = soup.find('div', attrs={'class': 'card-body'})
ip = info_box.findAll('li')[0]
print(ip.get_text())
| 37.111111 | 148 | 0.730539 |
4a2624bfe03df9435e8a1fd87b0f7f0253dc0b47 | 21,390 | py | Python | cir/doc_views.py | xsunfeng/cir | 03790384c160276652fd87b614c719c6363f1802 | [
"MIT"
] | null | null | null | cir/doc_views.py | xsunfeng/cir | 03790384c160276652fd87b614c719c6363f1802 | [
"MIT"
] | null | null | null | cir/doc_views.py | xsunfeng/cir | 03790384c160276652fd87b614c719c6363f1802 | [
"MIT"
] | 1 | 2018-06-23T21:11:53.000Z | 2018-06-23T21:11:53.000Z | import json
from django.template.loader import render_to_string
from django.http import HttpResponse
from django.utils import timezone
from cir.models import *
def api_doc(request):
response = {}
action = request.REQUEST.get('action')
forum = Forum.objects.get(id=request.session['forum_id'])
if action == 'get-categories':
context = {}
# try:
context['forum_name'] = forum.full_name
# retrieve docs in a folder
folders = EntryCategory.objects.filter(forum_id=request.session['forum_id'], category_type='doc')
context['folders'] = []
for folder in folders:
folder_info = folder.getAttr()
folder_info['docs'] = []
docs = Doc.objects.filter(folder=folder)
for doc in docs:
folder_info['docs'].append(doc.getAttr())
context['folders'].append(folder_info)
# retrieve docs not in any folder
context['root_docs'] = []
root_docs = Doc.objects.filter(forum_id=request.session['forum_id'], folder__isnull=True).order_by("order")
for doc in root_docs:
context['root_docs'].append(doc.getAttr())
# retrieve aggregated tags
tags_count = {}
context['tags'] = []
all_sections = DocSection.objects.filter(forum_id=request.session['forum_id'])
for section in all_sections:
for tag in Tag.objects.filter(context=section):
if tag.content in tags_count:
tags_count[tag.content] += 1
else:
tags_count[tag.content] = 1
tags = sorted(tags_count, key=tags_count.get, reverse=True)
for tag in tags:
context['tags'].append({'content': tag, 'count': tags_count[tag]})
response['html'] = render_to_string("doc/doc-category.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
# except:
# return HttpResponse('Unknown error.', status=403)
if action == 'get-document':
doc_id = request.REQUEST.get('doc_id')
try:
doc = Doc.objects.get(id=doc_id)
context = {}
context['forum_phase'] = forum.phase
context['title'] = doc.title
context['sections'] = []
ordered_sections = doc.sections.filter(order__isnull=False).order_by('order')
for section in ordered_sections:
context['sections'].append(section.getAttr(forum))
unordered_sections = doc.sections.filter(order__isnull=True).order_by('updated_at')
for section in unordered_sections:
context['sections'].append(section.getAttr(forum))
response['html'] = render_to_string("doc/doc-content.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
except:
return HttpResponse('The document does not exist.', status=403)
def api_highlight(request):
response = {}
action = request.REQUEST.get('action')
now = timezone.now()
if action == 'create':
nugget_status = request.REQUEST.get('nugget_status')
if (nugget_status == 'new'):
if not request.user.is_authenticated():
return HttpResponse("Please log in first.", status=403)
content = request.REQUEST.get('content')
content_type = request.REQUEST.get('type')
start = request.REQUEST.get('start')
end = request.REQUEST.get('end')
context_id = request.REQUEST.get('contextId')
text = request.REQUEST.get('text')
# create highlight object
context = Entry.objects.get(id=context_id)
hl_type = request.REQUEST.get('hl_type')
if (hl_type == "question"):
highlight = Highlight(start_pos=start, end_pos=end, context=context, author=request.user, text=text, created_at = now, is_nugget = False)
else: # real nugget
highlight = Highlight(start_pos=start, end_pos=end, context=context, author=request.user, text=text, created_at = now, is_nugget = True)
if (request.REQUEST.get('theme_id')):
theme_id = request.REQUEST.get('theme_id')
highlight.theme = ClaimTheme.objects.get(id = theme_id)
highlight.save()
response['highlight_id'] = highlight.id
# then create the content
if 'actual_user_id' in request.session:
actual_author = User.objects.get(id=request.session['actual_user_id'])
else:
actual_author = None
if content_type == 'comment':
if actual_author:
Post.objects.create(forum_id=request.session['forum_id'], author=actual_author, delegator=request.user,
content=content, created_at=now, updated_at=now, highlight=highlight, content_type='comment')
else:
Post.objects.create(forum_id=request.session['forum_id'], author=request.user, content=content,
created_at=now, updated_at=now, highlight=highlight, content_type='comment')
elif content_type == 'question':
if actual_author:
Post.objects.create(forum_id=request.session['forum_id'], author=actual_author, delegator=request.user,
content=content, created_at=now, updated_at=now, highlight=highlight, content_type='question')
else:
Post.objects.create(forum_id=request.session['forum_id'], author=request.user, content=content,
created_at=now, updated_at=now, highlight=highlight, content_type='question')
elif content_type == 'claim':
print ""
# 2016/11/30: now a nugget is directly converted to a claim
category = request.REQUEST.get('category')
now = timezone.now()
newClaim = Claim(forum_id=request.session['forum_id'], author=request.user, created_at=now, updated_at=now, content=content, claim_category=category)
newClaim.save()
claim_version = ClaimVersion(forum_id=request.session['forum_id'], author=request.user, content=text, created_at=now, updated_at=now, claim=newClaim, is_adopted=True)
claim_version.save()
newHighlightClaim = HighlightClaim(claim_id=newClaim.id, highlight_id=highlight.id)
newHighlightClaim.save()
claim = newClaim
else: # if (nugget_status == 'exist'):
nugget_id = request.REQUEST['nugget_id'].split(" ")[0]
claim = HighlightClaim.objects.filter(highlight_id= Highlight.objects.get(id=nugget_id))[0].claim
slot = Claim.objects.get(id=request.REQUEST['slot_id'])
if not ClaimReference.objects.filter(refer_type='stmt', from_claim=claim, to_claim=slot).exists():
ClaimReference.objects.create(refer_type='stmt', from_claim=claim, to_claim=slot)
if 'actual_user_id' in request.session:
actual_author = User.objects.get(id=request.session['actual_user_id'])
SlotAssignment.objects.create(forum_id=request.session['forum_id'], user=actual_author, delegator=request.user,
entry=claim, created_at=now, slot=slot, event_type='add')
else:
SlotAssignment.objects.create(forum_id=request.session['forum_id'], user=request.user,
entry=claim, created_at=now, slot=slot, event_type='add')
response["slot_id"] = slot.id
response["slot_order"] = slot.stmt_order
return HttpResponse(json.dumps(response), mimetype='application/json')
if action == 'load-doc':
doc_id = request.REQUEST.get('doc_id')
doc = Doc.objects.get(id=doc_id)
response['highlights'] = []
mytags = set()
alltags = set()
for section in doc.sections.all():
highlights = section.highlights.all()
for highlight in highlights:
highlight_info = highlight.getAttr()
response['highlights'].append(highlight_info)
if highlight_info['type'] == 'tag':
if highlight_info['author_id'] == request.user.id:
mytags.add(highlight_info['content'])
alltags.add(highlight_info['content'])
response['html'] = render_to_string('doc/doc-tag-area.html', {'mytags': mytags, 'alltags': alltags})
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_annotation(request):
response = {}
action = request.REQUEST.get('action')
forum = Forum.objects.get(id=request.session['forum_id'])
if action == 'load-thread':
context = {}
highlight_id = request.REQUEST.get('highlight_id')
highlight = Highlight.objects.get(id=highlight_id)
if (highlight.is_nugget):
context['theme'] = highlight.theme
context['author'] = highlight.author
context['forum_phase'] = forum.phase
context['source'] = 'highlight'
context['entries'] = []
posts = highlight.posts_of_highlight.all()
for post in posts:
for comment in post.getTree(exclude_root=False):
context['entries'].append(comment.getAttr(forum))
claims = highlight.claim_set.all()
for claim in claims:
context['entries'].append(claim.getAttr(forum))
context['entries'] = sorted(context['entries'], key=lambda en: en['created_at_full'], reverse=True)
response['html'] = render_to_string("feed/activity-feed-doc.html", context)
return HttpResponse(json.dumps(response), mimetype='application/json')
if action == 'create':
if not request.user.is_authenticated():
return HttpResponse("Please log in first.", status=403)
now = timezone.now()
newPost = Post(forum_id=request.session['forum_id'], content_type='comment', created_at=now, updated_at=now)
if 'actual_user_id' in request.session:
newPost.author = User.objects.get(id=request.session['actual_user_id'])
newPost.delegator = request.user
else:
newPost.author = request.user
newPost.content = request.REQUEST.get('content')
# depending on whether this post is a reply, decide who are to be notified
request_action = request.REQUEST.get('request_action', 'false')
request_action_bool = False
if request_action == 'true':
request_action_bool = True
reply_type = request.REQUEST.get('reply_type')
if reply_type: # replying another post, or event
reply_id = request.REQUEST.get('reply_id')
if request_action_bool:
message_type = 'reply-action'
else:
message_type = 'reply'
if reply_type == 'event':
event = Event.objects.get(id=reply_id)
newPost.target_event = event
receiver = event.user
elif reply_type == 'entry':
entry = Entry.objects.get(id=reply_id)
newPost.target_entry = entry
receiver = entry.author
newPost.save()
Message.objects.create(
forum=forum,
sender=newPost.author,
receiver=receiver,
content=newPost.content,
created_at=now,
content_type=message_type,
target_entry=newPost
)
else: # targeting at a highlight or a claim
source = request.REQUEST.get('type')
if source == 'highlight':
highlight = Highlight.objects.get(id=request.REQUEST.get('highlight_id'))
newPost.highlight = highlight
elif source == 'claim':
slot = Claim.objects.get(id=request.REQUEST.get('slot_id'))
newPost.target_entry = slot
collective = request.REQUEST.get('collective', 'false')
if collective == 'true':
newPost.collective = True
else:
newPost.collective = False
newPost.save()
if not reply_type: # notify everyone
message_type = 'post'
if request_action_bool:
message_type = 'post-action'
for panelist in forum.members.filter(role='panelist'):
Message.objects.create(
forum=forum,
sender=newPost.author,
receiver=panelist.user,
content=newPost.content,
created_at=now,
content_type=message_type,
target_entry=newPost
)
return HttpResponse(json.dumps(response), mimetype='application/json')
if action == 'delete':
entry_id = request.REQUEST.get('entry_id')
now = timezone.now()
post = Post.objects.get(id=entry_id)
post.is_deleted = True
post.updated_at = now
post.save()
return HttpResponse(json.dumps(response), mimetype='application/json')
def api_qa(request):
response = {}
forum = Forum.objects.get(id=request.session['forum_id'])
action = request.REQUEST.get('action')
if action == 'load-thread':
# given a question, load its discussions
question = ClaimComment.objects.get(id=request.REQUEST.get('question_id'))
context = {
'entries': [],
'source': 'qa'
}
comments = question.get_descendants(include_self=True)
context["comments"] = question.get_descendants(include_self=True)
response['html'] = render_to_string("qa/question-expand.html", context)
if action == 'raise-question':
author = request.user
forum = Forum.objects.get(id=request.session['forum_id'])
comment_type = "question"
text = request.REQUEST.get('text')
created_at = timezone.now()
nugget_id = request.REQUEST.get('nugget_id')
if (nugget_id == ""):
newClaimComment = ClaimComment(author = author, text = text, created_at = created_at, comment_type = comment_type, forum = forum)
else:
newClaimComment = ClaimComment(author = author, text = text, created_at = created_at, comment_type = comment_type, forum = forum, nugget_id = nugget_id)
newClaimComment.save()
if action == 'get-question-list' or action == 'raise-question':
author = request.user
context = {
'questions': []
}
questions = ClaimComment.objects.filter(forum = forum, comment_type='question', parent__isnull = True)
context['questions'] = context['questions']
for question in questions:
entry = {}
entry["reply_count"] = question.get_descendant_count()
if (entry["reply_count"] > 0):
entry["last_reply_pretty"] = utils.pretty_date(question.get_descendants(include_self=True).order_by("-created_at")[0].created_at)
entry["text"] = question.text
entry["id"] = question.id
entry["is_answered"] = question.is_answered
entry["created_at"] = question.created_at
entry['comments'] = question.get_descendants(include_self=True)
entry["entry_type"] = "claim_" + str(question.comment_type)
entry["author_name"] = question.author.first_name + " " + question.author.last_name
entry["author_id"] = question.author.id
entry["is_author"] = (question.author == request.user)
if (Role.objects.filter(user_id = request.user.id, forum =forum).count == 0):
entry["is_facilitator"] = (Role.objects.get(user = request.user, forum =forum).role) == "facilitator"
entry["author_role"] = Role.objects.get(user = question.author, forum =forum).role
entry["author_intro"] = UserInfo.objects.get(user = question.author).description
entry["created_at_pretty"] = utils.pretty_date(question.created_at)
# vote for importance
entry["vote_count"] = ClaimQuestionVote.objects.filter(question_id = question.id).count()
if (entry["vote_count"] > 0):
tmp = []
for vote in ClaimQuestionVote.objects.filter(question_id = question.id):
tmp.append(vote.voter.last_name + " " + vote.voter.first_name)
entry["voted_authors"] = ", ".join(tmp)
# vote for experts
entry["expert_vote_count"] = QuestionNeedExpertVote.objects.filter(question_id = question.id).count()
entry["has_facilitator_vote"] = False
if (entry["expert_vote_count"] > 0):
tmp = []
for vote in QuestionNeedExpertVote.objects.filter(question_id = question.id):
tmp.append(vote.voter.last_name + " " + vote.voter.first_name)
if (Role.objects.get(user = vote.voter, forum =forum).role == "facilitator"):
entry["has_facilitator_vote"] = True
entry["expert_voted_authors"] = ", ".join(tmp)
if (ClaimQuestionVote.objects.filter(voter_id = author.id, question_id = question.id).count() > 0):
entry["voted"] = True
if (QuestionNeedExpertVote.objects.filter(voter_id = author.id, question_id = question.id).count() > 0):
entry["expert_voted"] = True
try:
claimVersion = ClaimVersion.objects.filter(claim = question.claim, is_adopted = True).order_by("-created_at")[0]
entry['claim_id'] = question.claim.id
entry['claim_content'] = claimVersion.content
entry['claim_author'] = question.claim.author.first_name + " " + question.claim.author.last_name
entry['claim_created_at'] = utils.pretty_date(question.claim.created_at)
except:
pass
if (question.nugget):
docsection = DocSection.objects.get(id=question.nugget.context.id)
entry['doc_id'] = docsection.doc.id
entry['nugget_id'] = question.nugget.id
context['questions'].append(entry)
context['questions'] = sorted(context['questions'], key=lambda en: (en['is_answered'], en['created_at']), reverse=True)
response['html'] = render_to_string('qa/question-list.html', context)
return HttpResponse(json.dumps(response), mimetype='application/json')
# def api_qa_backup(request):
# response = {}
# forum = Forum.objects.get(id=request.session['forum_id'])
# action = request.REQUEST.get('action')
# if action == 'load-thread':
# # given a question, load its discussions
# question = Post.objects.get(id=request.REQUEST.get('question_id'))
# context = {
# 'entries': [],
# 'source': 'qa'
# }
# for post in question.getTree(exclude_root=True): # don't include root
# context['entries'].append(post.getAttr(forum))
# context['entries'] = sorted(context['entries'], key=lambda en: en['created_at_full'], reverse=True)
# response['html'] = render_to_string("feed/activity-feed-doc.html", context)
# if action == 'raise-question':
# now = timezone.now()
# content = request.REQUEST.get('content')
# Post.objects.create(forum_id=request.session['forum_id'], author=request.user, content=content,
# created_at=now, updated_at=now, content_type='question')
# if action == 'get-all-questions' or action == 'raise-question':
# context = {
# 'questions': []
# }
# questions = Post.objects.filter(forum=forum, content_type='question', is_deleted=False)
# for question in questions:
# question_info = question.getAttr(forum)
# all_replies = question.getTree(exclude_root=True)
# question_info['treesize'] = len(all_replies)
# if question_info['treesize'] > 0:
# last_reply = sorted(all_replies, key=lambda en: en.created_at, reverse=True)[0]
# last_reply_info = last_reply.getAttr(forum)
# question_info['last_reply'] = last_reply_info['updated_at']
# question_info['last_reply_full'] = last_reply_info['updated_at_full']
# else:
# question_info['last_reply_full'] = question_info['created_at_full']
# try:
# docsection = DocSection.objects.get(id=question.highlight.context.id)
# question_info['doc_name'] = docsection.doc.title
# question_info['doc_id'] = docsection.doc.id
# question_info['highlight_id'] = question.highlight.id
# except:
# pass
# context['questions'].append(question_info)
# context['questions'] = sorted(context['questions'], key=lambda en: (en['last_reply_full'], en['created_at_full']), reverse=True)
# response['html'] = render_to_string('doc/qa-panel.html', context)
# return HttpResponse(json.dumps(response), mimetype='application/json') | 52.814815 | 178 | 0.60561 |
4a2624eb6f811e1664aa8d54e894814659c3cabf | 819 | py | Python | tests/test_requests.py | libero/search | f13c7fe2aa5f3cd1e2f62234995788bed7147b91 | [
"MIT"
] | null | null | null | tests/test_requests.py | libero/search | f13c7fe2aa5f3cd1e2f62234995788bed7147b91 | [
"MIT"
] | 14 | 2019-01-31T08:34:30.000Z | 2019-11-21T10:06:13.000Z | tests/test_requests.py | libero/search | f13c7fe2aa5f3cd1e2f62234995788bed7147b91 | [
"MIT"
] | 3 | 2019-01-30T10:49:01.000Z | 2019-06-11T14:42:03.000Z | import logging
from search.requests import request_data
def test_request_data_returns_response(requests_mock):
url = 'http://some-url.com'
response_text = 'response received'
requests_mock.get(url, text=response_text)
response = request_data(url)
assert response.status_code == 200
assert response.text == response_text
def test_request_data_logs_errors_and_returns_none(requests_mock, caplog):
caplog.set_level(logging.ERROR, logger='search')
url = 'http://some-url.com'
reason = 'Not Found'
status_code = 404
requests_mock.get(url, reason=reason, status_code=status_code)
response = request_data(url)
assert response is None
expected_log = f'{reason} {status_code}: Unable to retrieve content from {url}.'
assert caplog.messages[-1] == expected_log
| 29.25 | 84 | 0.737485 |
4a2628394093227b6adb7e6d3afbed3ad99adbb2 | 1,291 | py | Python | ccontrol/model.py | ThomasLecat/udacity-continuous-control | fa63aef00abc4446c860160ef17ef28c32ec71d8 | [
"MIT"
] | null | null | null | ccontrol/model.py | ThomasLecat/udacity-continuous-control | fa63aef00abc4446c860160ef17ef28c32ec71d8 | [
"MIT"
] | null | null | null | ccontrol/model.py | ThomasLecat/udacity-continuous-control | fa63aef00abc4446c860160ef17ef28c32ec71d8 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
class Actor(nn.Module):
"""MultiLayer perceptron. Approximates \argmax_a{Q(s,a)}"""
def __init__(self, state_size, num_actions):
super().__init__()
self.fc1 = nn.Linear(state_size, 128)
self.fc2 = nn.Linear(128, 128)
self.fc3 = nn.Linear(128, 64)
self.fc4 = nn.Linear(64, num_actions)
self.batch_norm = nn.BatchNorm1d(128)
def forward(self, observations):
x = F.relu(self.batch_norm(self.fc1(observations)))
x = F.relu(self.fc2(x))
x = F.relu(self.fc3(x))
return F.tanh(self.fc4(x))
class Critic(nn.Module):
"""Q-network model"""
def __init__(self, state_size, action_size):
super(Critic, self).__init__()
self.fc1 = nn.Linear(state_size, 128)
self.fc2 = nn.Linear(128 + action_size, 128)
self.fc3 = nn.Linear(128, 64)
self.fc4 = nn.Linear(64, 1)
self.batch_norm = nn.BatchNorm1d(128)
def forward(self, observations, actions):
"""Get Q(s,a) for all agents."""
x = F.relu(self.batch_norm(self.fc1(observations)))
x = torch.cat((x, actions), dim=1)
x = F.relu(self.fc2(x))
x = F.relu(self.fc3(x))
return self.fc4(x)
| 29.340909 | 63 | 0.60031 |
4a262850658056b2f99c0bd0225c1f7e5593496e | 188 | py | Python | Radius.py | Dhiviani/python-program | 30f1a84ff8888987ecb7fb5e25543305a9f93984 | [
"MIT"
] | null | null | null | Radius.py | Dhiviani/python-program | 30f1a84ff8888987ecb7fb5e25543305a9f93984 | [
"MIT"
] | null | null | null | Radius.py | Dhiviani/python-program | 30f1a84ff8888987ecb7fb5e25543305a9f93984 | [
"MIT"
] | null | null | null | #area of circle
r=1.1
A=3.14*r**2
Print("Area of circle:", A)
#file extension
File=input("enter the file:")
Extension=File.spilt(".")
Print("Extension of file:",repr(Extension[-1]))
| 18.8 | 48 | 0.664894 |
4a262934201bb00c171ab64c91ae79223b4f5314 | 2,210 | py | Python | training/misc.py | hedscan/stylegan2-ada | d6142988f6301cd7089833c4793e83a42ea03682 | [
"BSD-Source-Code"
] | null | null | null | training/misc.py | hedscan/stylegan2-ada | d6142988f6301cd7089833c4793e83a42ea03682 | [
"BSD-Source-Code"
] | null | null | null | training/misc.py | hedscan/stylegan2-ada | d6142988f6301cd7089833c4793e83a42ea03682 | [
"BSD-Source-Code"
] | null | null | null | import glob
import os
import re
from pathlib import Path
def get_parent_dir(run_dir):
out_dir = Path(run_dir).parent
return out_dir
def locate_latest_pkl(out_dir):
all_pickle_names = sorted(glob.glob(os.path.join(out_dir, '0*', 'network-*.pkl')))
try:
latest_pickle_name = all_pickle_names[-1]
except IndexError:
latest_pickle_name = None
return latest_pickle_name
def parse_kimg_from_network_name(network_pickle_name):
if network_pickle_name is not None:
resume_run_id = os.path.basename(os.path.dirname(network_pickle_name))
RE_KIMG = re.compile(r'network-snapshot-(\d+).pkl')
try:
kimg = int(RE_KIMG.match(os.path.basename(network_pickle_name)).group(1))
except AttributeError:
kimg = 0.0
else:
kimg = 0.0
return float(kimg)
def locate_latest_log_file(out_dir):
all_runs = sorted(glob.glob(os.path.join(out_dir, '0*')))
try:
last_run = all_runs[-2] # last dir is the current run
except IndexError:
raise ValueError("Could not find dir for last run")
try:
latest_log_file = glob.glob(os.path.join(last_run, 'log.txt'))[0]
except IndexError:
raise ValueError("log.txt from last run not found")
return latest_log_file
def parse_resume_augment_val_from_log_file(logfile, kimg):
# Open log file
with open(logfile) as f:
# read training tick summaries from log file into list
ticklines = [line.rstrip('\n') for line in f if 'tick' in line]
if not ticklines:
raise ValueError(
f"{os.path.basename(logfile)} does not contain training ticks"
" did you specify the right out_dir?"
)
# Create a dict of param: value pairs per tick
ticks = [
{i.split(maxsplit=1)[0]: i.split(maxsplit=1)[1].strip()
for i in splitline}
# Regex splits on whitespace followed by char a-z
for splitline in [re.split(r'\s(?=[a-z])', line) for line in ticklines]]
# Get actual tick we are resuming from (not necessarily last)
resume_tick = next(tick for tick in ticks if float(tick['kimg']) == kimg)
return float(resume_tick['augment'])
| 32.985075 | 86 | 0.659729 |
4a26295bf7ff3eae5912aa9b90e5266f6ad03d39 | 805 | py | Python | core/showlist.py | plonibarploni/DarkSpiritz | f4c63db82e0785dc7f4594dca44891a1bd359d4e | [
"MIT"
] | null | null | null | core/showlist.py | plonibarploni/DarkSpiritz | f4c63db82e0785dc7f4594dca44891a1bd359d4e | [
"MIT"
] | null | null | null | core/showlist.py | plonibarploni/DarkSpiritz | f4c63db82e0785dc7f4594dca44891a1bd359d4e | [
"MIT"
] | 1 | 2019-02-01T14:37:05.000Z | 2019-02-01T14:37:05.000Z | # -*- coding: utf-8 -*-
import glob
def showlist():
print("""
Categories:
===========
""")
for d in glob.iglob('*'):
if "LICENSE" not in d:
if "README.md" not in d:
if "config" not in d:
if "core" not in d:
if "install" not in d:
if "storage" not in d:
if "CODE_OF_CONDUCT.md" not in d:
if "modules" not in d:
if "start.py" not in d:
if "requirements.txt" not in d:
print("""%s""" % (d.upper()))
| 38.333333 | 85 | 0.295652 |
4a262976f54eb54dbe27c6170cc784a8ab0beb9b | 91,097 | py | Python | tests/test_50_server.py | geops/pysaml2 | b2335fc07d6feeaa9a05fd6f9c84f5137a48b040 | [
"Apache-2.0"
] | null | null | null | tests/test_50_server.py | geops/pysaml2 | b2335fc07d6feeaa9a05fd6f9c84f5137a48b040 | [
"Apache-2.0"
] | null | null | null | tests/test_50_server.py | geops/pysaml2 | b2335fc07d6feeaa9a05fd6f9c84f5137a48b040 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import base64
import copy
import os
from contextlib import closing
from six.moves.urllib.parse import parse_qs
import uuid
from saml2.cert import OpenSSLWrapper
from saml2.sigver import make_temp, DecryptError, EncryptError, CertificateError
from saml2.assertion import Policy
from saml2.authn_context import INTERNETPROTOCOLPASSWORD
from saml2.saml import NameID, NAMEID_FORMAT_TRANSIENT
from saml2.samlp import response_from_string
from saml2.server import Server
from saml2 import samlp
from saml2 import saml
from saml2 import client
from saml2 import config
from saml2 import extension_elements_to_elements
from saml2 import s_utils
from saml2 import sigver
from saml2 import time_util
from saml2.s_utils import OtherError
from saml2.s_utils import do_attribute_statement
from saml2.s_utils import factory
from saml2.soap import make_soap_enveloped_saml_thingy
from saml2 import BINDING_HTTP_POST
from saml2 import BINDING_HTTP_REDIRECT
from pytest import raises
from pathutils import full_path
import saml2.xmldsig as ds
nid = NameID(name_qualifier="foo", format=NAMEID_FORMAT_TRANSIENT,
text="123456")
AUTHN = {
"class_ref": INTERNETPROTOCOLPASSWORD,
"authn_auth": "http://www.example.com/login"
}
def _eq(l1, l2):
return set(l1) == set(l2)
BASEDIR = os.path.abspath(os.path.dirname(__file__))
def get_ava(assertion):
ava = {}
for statement in assertion.attribute_statement:
for attr in statement.attribute:
value = []
for tmp_val in attr.attribute_value:
value.append(tmp_val.text)
key = attr.friendly_name
if key is None or len(key) == 0:
key = attr.text
ava[key] = value
return ava
def generate_cert():
sn = uuid.uuid4().urn
cert_info = {
"cn": "localhost",
"country_code": "se",
"state": "ac",
"city": "Umea",
"organization": "ITS",
"organization_unit": "DIRG"
}
osw = OpenSSLWrapper()
ca_cert_str = osw.read_str_from_file(
full_path("root_cert/localhost.ca.crt"))
ca_key_str = osw.read_str_from_file(
full_path("root_cert/localhost.ca.key"))
req_cert_str, req_key_str = osw.create_certificate(cert_info, request=True,
sn=sn, key_length=2048)
cert_str = osw.create_cert_signed_certificate(ca_cert_str, ca_key_str,
req_cert_str)
return cert_str, req_key_str
class TestServer1():
def setup_class(self):
self.server = Server("idp_conf")
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = client.Saml2Client(conf)
self.name_id = self.server.ident.transient_nameid(
"urn:mace:example.com:saml:roland:sp", "id12")
self.ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["[email protected]"], "title": "The man"}
def teardown_class(self):
self.server.close()
def verify_assertion(self, assertion):
assert assertion
assert assertion[0].attribute_statement
ava = ava = get_ava(assertion[0])
assert ava ==\
{'mail': ['[email protected]'], 'givenName': ['Derek'],
'sn': ['Jeter'], 'title': ['The man']}
def verify_encrypted_assertion(self, assertion, decr_text):
self.verify_assertion(assertion)
assert assertion[0].signature is None
assert 'EncryptedAssertion><encas1:Assertion xmlns:encas0="http://www.w3.org/2001/XMLSchema-instance" ' \
'xmlns:encas1="urn:oasis:names:tc:SAML:2.0:assertion"' in decr_text
def verify_advice_assertion(self, resp, decr_text):
assert resp.assertion[0].signature is None
assert resp.assertion[0].advice.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.assertion[0].advice.encrypted_assertion[0].extension_elements,
[saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_issuer(self):
issuer = self.server._issuer()
assert isinstance(issuer, saml.Issuer)
assert _eq(issuer.keyswv(), ["text", "format"])
assert issuer.format == saml.NAMEID_FORMAT_ENTITY
assert issuer.text == self.server.config.entityid
def test_assertion(self):
assertion = s_utils.assertion_factory(
subject=factory(
saml.Subject, text="_aaa",
name_id=factory(saml.NameID,
format=saml.NAMEID_FORMAT_TRANSIENT)),
attribute_statement=do_attribute_statement(
{
("", "", "sn"): ("Jeter", ""),
("", "", "givenName"): ("Derek", ""),
}
),
issuer=self.server._issuer(),
)
assert _eq(assertion.keyswv(), ['attribute_statement', 'issuer', 'id',
'subject', 'issue_instant', 'version'])
assert assertion.version == "2.0"
assert assertion.issuer.text == "urn:mace:example.com:saml:roland:idp"
#
assert assertion.attribute_statement
attribute_statement = assertion.attribute_statement
assert len(attribute_statement.attribute) == 2
attr0 = attribute_statement.attribute[0]
attr1 = attribute_statement.attribute[1]
if attr0.attribute_value[0].text == "Derek":
assert attr0.friendly_name == "givenName"
assert attr1.friendly_name == "sn"
assert attr1.attribute_value[0].text == "Jeter"
else:
assert attr1.friendly_name == "givenName"
assert attr1.attribute_value[0].text == "Derek"
assert attr0.friendly_name == "sn"
assert attr0.attribute_value[0].text == "Jeter"
subject = assertion.subject
assert _eq(subject.keyswv(), ["text", "name_id"])
assert subject.text == "_aaa"
assert subject.name_id.format == saml.NAMEID_FORMAT_TRANSIENT
def test_response(self):
response = sigver.response_factory(
in_response_to="_012345",
destination="https:#www.example.com",
status=s_utils.success_status_factory(),
assertion=s_utils.assertion_factory(
subject=factory(saml.Subject, text="_aaa",
name_id=saml.NAMEID_FORMAT_TRANSIENT),
attribute_statement=do_attribute_statement(
{
("", "", "sn"): ("Jeter", ""),
("", "", "givenName"): ("Derek", ""),
}
),
issuer=self.server._issuer(),
),
issuer=self.server._issuer(),
)
print(response.keyswv())
assert _eq(response.keyswv(), ['destination', 'assertion', 'status',
'in_response_to', 'issue_instant',
'version', 'issuer', 'id'])
assert response.version == "2.0"
assert response.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert response.destination == "https:#www.example.com"
assert response.in_response_to == "_012345"
#
status = response.status
print(status)
assert status.status_code.value == samlp.STATUS_SUCCESS
def test_parse_faulty_request(self):
req_id, authn_request = self.client.create_authn_request(
destination="http://www.example.com", id="id1")
# should raise an error because faulty spentityid
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(
binding, "%s" % authn_request, "http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
with raises(OtherError):
self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
def test_parse_faulty_request_to_err_status(self):
req_id, authn_request = self.client.create_authn_request(
destination="http://www.example.com")
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(binding, "%s" % authn_request,
"http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
try:
self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
status = None
except OtherError as oe:
print(oe.args)
status = s_utils.error_status_factory(oe)
assert status
print(status)
assert _eq(status.keyswv(), ["status_code", "status_message"])
assert status.status_message.text == 'Not destined for me!'
status_code = status.status_code
assert _eq(status_code.keyswv(), ["status_code", "value"])
assert status_code.value == samlp.STATUS_RESPONDER
assert status_code.status_code.value == samlp.STATUS_UNKNOWN_PRINCIPAL
def test_parse_ok_request(self):
req_id, authn_request = self.client.create_authn_request(
message_id="id1", destination="http://localhost:8088/sso")
print(authn_request)
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(binding, "%s" % authn_request,
"http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
req = self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
# returns a dictionary
print(req)
resp_args = self.server.response_args(req.message, [BINDING_HTTP_POST])
assert resp_args["destination"] == "http://lingon.catalogix.se:8087/"
assert resp_args["in_response_to"] == "id1"
name_id_policy = resp_args["name_id_policy"]
assert _eq(name_id_policy.keyswv(), ["format"])
assert name_id_policy.format == saml.NAMEID_FORMAT_TRANSIENT
assert resp_args[
"sp_entity_id"] == "urn:mace:example.com:saml:roland:sp"
def test_sso_response_with_identity(self):
name_id = self.server.ident.transient_nameid(
"https://example.com/sp", "id12")
resp = self.server.create_authn_response(
{
"eduPersonEntitlement": "Short stop",
"sn": "Jeter",
"givenName": "Derek",
"mail": "[email protected]",
"title": "The man"
},
"id12", # in_response_to
"http://localhost:8087/", # destination
"https://example.com/sp", # sp_entity_id
name_id=name_id,
authn=AUTHN
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'assertion',
'in_response_to', 'issue_instant',
'version', 'id', 'issuer'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
assert resp.status.status_code.value == samlp.STATUS_SUCCESS
assert resp.assertion
assertion = resp.assertion
print(assertion)
assert assertion.authn_statement
assert assertion.conditions
assert assertion.attribute_statement
attribute_statement = assertion.attribute_statement
print(attribute_statement)
assert len(attribute_statement[0].attribute) == 4
# Pick out one attribute
attr = None
for attr in attribute_statement[0].attribute:
if attr.friendly_name == "givenName":
break
assert len(attr.attribute_value) == 1
assert attr.name == "urn:mace:dir:attribute-def:givenName"
assert attr.name_format == "urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
value = attr.attribute_value[0]
assert value.text.strip() == "Derek"
assert value.get_type() == "xs:string"
assert assertion.subject
assert assertion.subject.name_id
assert assertion.subject.subject_confirmation
confirmation = assertion.subject.subject_confirmation[0]
print(confirmation.keyswv())
print(confirmation.subject_confirmation_data)
assert confirmation.subject_confirmation_data.in_response_to == "id12"
def test_sso_response_without_identity(self):
resp = self.server.create_authn_response(
{},
"id12", # in_response_to
"http://localhost:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
userid="USER1",
authn=AUTHN,
release_policy=Policy(),
best_effort=True
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer',
'assertion'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
assert resp.status.status_code.value == samlp.STATUS_SUCCESS
assert resp.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert not resp.assertion.attribute_statement
def test_sso_response_specific_instant(self):
_authn = AUTHN.copy()
_authn["authn_instant"] = 1234567890
resp = self.server.create_authn_response(
{},
"id12", # in_response_to
"http://localhost:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
userid="USER1",
authn=_authn,
best_effort=True
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer',
'assertion'])
authn_statement = resp.assertion.authn_statement[0]
assert authn_statement.authn_instant == '2009-02-13T23:31:30Z'
def test_sso_failure_response(self):
exc = s_utils.MissingValue("eduPersonAffiliation missing")
resp = self.server.create_error_response(
"id12", "http://localhost:8087/", exc)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
print(resp.status)
assert resp.status.status_code.value == samlp.STATUS_RESPONDER
assert resp.status.status_code.status_code.value == \
samlp.STATUS_REQUEST_UNSUPPORTED
assert resp.status.status_message.text == \
"eduPersonAffiliation missing"
assert resp.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert not resp.assertion
def test_authn_response_0(self):
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = client.Saml2Client(conf)
ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["[email protected]"], "title": "The man"}
npolicy = samlp.NameIDPolicy(format=saml.NAMEID_FORMAT_TRANSIENT,
allow_create="true")
resp_str = "%s" % self.server.create_authn_response(
ava, "id1", "http://local:8087/",
"urn:mace:example.com:saml:roland:sp", npolicy,
"[email protected]", authn=AUTHN)
response = samlp.response_from_string(resp_str)
print(response.keyswv())
assert _eq(response.keyswv(), ['status', 'destination', 'assertion',
'in_response_to', 'issue_instant',
'version', 'issuer', 'id'])
print(response.assertion[0].keyswv())
assert len(response.assertion) == 1
assert _eq(response.assertion[0].keyswv(), ['attribute_statement',
'issue_instant', 'version',
'subject', 'conditions',
'id', 'issuer',
'authn_statement'])
assertion = response.assertion[0]
assert len(assertion.attribute_statement) == 1
astate = assertion.attribute_statement[0]
print(astate)
assert len(astate.attribute) == 4
def test_signed_response(self):
name_id = self.server.ident.transient_nameid(
"urn:mace:example.com:saml:roland:sp", "id12")
ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["[email protected]"], "title": "The man"}
signed_resp = self.server.create_authn_response(
ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=name_id,
sign_assertion=True
)
print(signed_resp)
assert signed_resp
sresponse = response_from_string(signed_resp)
# It's the assertions that are signed not the response per se
assert len(sresponse.assertion) == 1
assertion = sresponse.assertion[0]
# Since the reponse is created dynamically I don't know the signature
# value. Just that there should be one
assert assertion.signature.signature_value.text != ""
def test_signed_response_1(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id,
id_attr="")
assert valid
self.verify_assertion(sresponse.assertion)
def test_signed_response_2(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=False,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
assert sresponse.assertion[0].signature == None
def test_signed_response_3(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=True,
)
sresponse = response_from_string(signed_resp)
assert sresponse.signature == None
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id,
id_attr="")
assert valid
self.verify_assertion(sresponse.assertion)
def test_encrypted_signed_response_1(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(
signed_resp, self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id, id_attr="")
assert valid
valid = self.server.sec.verify_signature(
signed_resp, self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id, id_attr="")
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(signed_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
assert resp.assertion[0].advice.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(
resp.assertion[0].advice.encrypted_assertion[0].extension_elements,
[saml, samlp])
self.verify_assertion(assertion)
#PEFIM never signs assertions.
assert assertion[0].signature is None
#valid = self.server.sec.verify_signature(decr_text,
# self.server.config.cert_file,
# node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
# node_id=assertion[0].id,
# id_attr="")
assert valid
def test_encrypted_signed_response_2(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
decr_text_old = copy.deepcopy("%s" % signed_resp)
with raises(DecryptError):
decr_text = self.server.sec.decrypt(
signed_resp,
self.client.config.encryption_keypairs[0]["key_file"],
)
decr_text = self.server.sec.decrypt(signed_resp, self.client.config.encryption_keypairs[1]["key_file"])
assert decr_text != decr_text_old
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
assert resp.assertion[0].signature == None
self.verify_assertion(resp.assertion)
def test_encrypted_signed_response_3(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=True,
encrypt_assertion_self_contained=False,
encrypt_cert_assertion=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(signed_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
valid = self.server.sec.verify_signature(decr_text,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=resp.assertion[0].id,
id_attr="")
assert valid
self.verify_assertion(resp.assertion)
assert 'xmlns:encas' not in decr_text
def test_encrypted_signed_response_4(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
decr_text = self.server.sec.decrypt(signed_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
valid = self.server.sec.verify_signature(decr_text,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=resp.assertion[0].id,
id_attr="")
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(decr_text, key_fd.name)
resp = samlp.response_from_string(decr_text)
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
assertion = \
extension_elements_to_elements(assertion[0].advice.encrypted_assertion[0].extension_elements,[saml, samlp])
self.verify_assertion(assertion)
#PEFIM never signs assertion in advice
assert assertion[0].signature is None
#valid = self.server.sec.verify_signature(decr_text,
# self.server.config.cert_file,
# node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
# node_id=assertion[0].id,
# id_attr="")
assert valid
def test_encrypted_response_1(self):
cert_str_advice, cert_key_str_advice = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
)
_resp = "%s" % _resp
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd = make_temp(cert_key_str_advice, decode=False)
decr_text = self.server.sec.decrypt(_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
self.verify_advice_assertion(resp, decr_text)
def test_encrypted_response_2(self):
cert_str_advice, cert_key_str_advice = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text_1 = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
key_fd = make_temp(cert_key_str_advice, decode=False)
decr_text_2 = self.server.sec.decrypt(decr_text_1, key_fd.name)
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_3(self):
cert_str_assertion, cert_key_str_assertion = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion=cert_str_assertion
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd = make_temp(cert_key_str_assertion, decode=False)
decr_text = self.server.sec.decrypt(_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
assert resp.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_encrypted_response_4(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
assert resp.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_encrypted_response_5(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True
)
_resp = "%s" % _resp
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
self.verify_advice_assertion(resp, decr_text)
def test_encrypted_response_6(self):
_server = Server("idp_conf_verify_cert")
cert_str_advice, cert_key_str_advice = generate_cert()
cert_str_assertion, cert_key_str_assertion = generate_cert()
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
encrypt_cert_assertion=cert_str_assertion
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd1 = make_temp(cert_key_str_assertion, decode=False)
decr_text_1 = _server.sec.decrypt(_resp, key_fd1.name)
key_fd2 = make_temp(cert_key_str_advice, decode=False)
decr_text_2 = _server.sec.decrypt(decr_text_1, key_fd2.name)
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_7(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text_1 = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
decr_text_2 = self.server.sec.decrypt(decr_text_1, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_8(self):
with raises(EncryptError):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
encrypt_cert_assertion="whatever"
)
with raises(EncryptError):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
)
with raises(EncryptError):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion="whatever"
)
_server = Server("idp_conf_verify_cert")
with raises(CertificateError):
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
encrypt_cert_assertion="whatever"
)
with raises(CertificateError):
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
)
with raises(CertificateError):
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion="whatever"
)
def test_encrypted_response_9(self):
_server = Server("idp_conf_sp_no_encrypt")
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
)
self.verify_assertion(_resp.assertion.advice.assertion)
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True
)
self.verify_assertion(_resp.assertion.advice.assertion)
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
)
self.verify_assertion([_resp.assertion])
def test_slo_http_post(self):
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
self.client.users.add_information_about_person(sinfo)
req_id, logout_request = self.client.create_logout_request(
destination="http://localhost:8088/slop", name_id=nid,
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
intermed = base64.b64encode(str(logout_request).encode('utf-8'))
#saml_soap = make_soap_enveloped_saml_thingy(logout_request)
request = self.server.parse_logout_request(intermed, BINDING_HTTP_POST)
assert request
def test_slo_soap(self):
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
sp = client.Saml2Client(config_file="server_conf")
sp.users.add_information_about_person(sinfo)
req_id, logout_request = sp.create_logout_request(
name_id=nid, destination="http://localhost:8088/slo",
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
#_ = s_utils.deflate_and_base64_encode("%s" % (logout_request,))
saml_soap = make_soap_enveloped_saml_thingy(logout_request)
self.server.ident.close()
with closing(Server("idp_soap_conf")) as idp:
request = idp.parse_logout_request(saml_soap)
idp.ident.close()
assert request
# ------------------------------------------------------------------------
class TestServer1NonAsciiAva():
def setup_class(self):
self.server = Server("idp_conf")
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = client.Saml2Client(conf)
self.name_id = self.server.ident.transient_nameid(
"urn:mace:example.com:saml:roland:sp", "id12")
self.ava = {"givenName": ["Dave"], "sn": ["Concepción"],
"mail": ["[email protected]"], "title": "#13"}
def teardown_class(self):
self.server.close()
def verify_assertion(self, assertion):
assert assertion
assert assertion[0].attribute_statement
ava = get_ava(assertion[0])
assert ava == \
{"givenName": ["Dave"], "sn": [u"Concepción"],
"mail": ["[email protected]"], "title": ["#13"]}
def verify_encrypted_assertion(self, assertion, decr_text):
self.verify_assertion(assertion)
assert assertion[0].signature is None
assert 'EncryptedAssertion><encas1:Assertion xmlns:encas0="http://www.w3.org/2001/XMLSchema-instance" ' \
'xmlns:encas1="urn:oasis:names:tc:SAML:2.0:assertion"' in decr_text
def verify_advice_assertion(self, resp, decr_text):
assert resp.assertion[0].signature is None
assert resp.assertion[0].advice.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.assertion[0].advice.encrypted_assertion[0].extension_elements,
[saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_issuer(self):
issuer = self.server._issuer()
assert isinstance(issuer, saml.Issuer)
assert _eq(issuer.keyswv(), ["text", "format"])
assert issuer.format == saml.NAMEID_FORMAT_ENTITY
assert issuer.text == self.server.config.entityid
def test_assertion(self):
assertion = s_utils.assertion_factory(
subject=factory(
saml.Subject, text="_aaa",
name_id=factory(saml.NameID,
format=saml.NAMEID_FORMAT_TRANSIENT)),
attribute_statement=do_attribute_statement(
{
("", "", "sn"): ("Jeter", ""),
("", "", "givenName"): ("Derek", ""),
}
),
issuer=self.server._issuer(),
)
assert _eq(assertion.keyswv(), ['attribute_statement', 'issuer', 'id',
'subject', 'issue_instant', 'version'])
assert assertion.version == "2.0"
assert assertion.issuer.text == "urn:mace:example.com:saml:roland:idp"
#
assert assertion.attribute_statement
attribute_statement = assertion.attribute_statement
assert len(attribute_statement.attribute) == 2
attr0 = attribute_statement.attribute[0]
attr1 = attribute_statement.attribute[1]
if attr0.attribute_value[0].text == "Derek":
assert attr0.friendly_name == "givenName"
assert attr1.friendly_name == "sn"
assert attr1.attribute_value[0].text == "Jeter"
else:
assert attr1.friendly_name == "givenName"
assert attr1.attribute_value[0].text == "Derek"
assert attr0.friendly_name == "sn"
assert attr0.attribute_value[0].text == "Jeter"
#
subject = assertion.subject
assert _eq(subject.keyswv(), ["text", "name_id"])
assert subject.text == "_aaa"
assert subject.name_id.format == saml.NAMEID_FORMAT_TRANSIENT
def test_response(self):
response = sigver.response_factory(
in_response_to="_012345",
destination="https:#www.example.com",
status=s_utils.success_status_factory(),
assertion=s_utils.assertion_factory(
subject=factory(saml.Subject, text="_aaa",
name_id=saml.NAMEID_FORMAT_TRANSIENT),
attribute_statement=do_attribute_statement(
{
("", "", "sn"): ("Jeter", ""),
("", "", "givenName"): ("Derek", ""),
}
),
issuer=self.server._issuer(),
),
issuer=self.server._issuer(),
)
print(response.keyswv())
assert _eq(response.keyswv(), ['destination', 'assertion', 'status',
'in_response_to', 'issue_instant',
'version', 'issuer', 'id'])
assert response.version == "2.0"
assert response.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert response.destination == "https:#www.example.com"
assert response.in_response_to == "_012345"
#
status = response.status
print(status)
assert status.status_code.value == samlp.STATUS_SUCCESS
def test_parse_faulty_request(self):
req_id, authn_request = self.client.create_authn_request(
destination="http://www.example.com", id="id1")
# should raise an error because faulty spentityid
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(
binding, "%s" % authn_request, "http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
with raises(OtherError):
self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
def test_parse_faulty_request_to_err_status(self):
req_id, authn_request = self.client.create_authn_request(
destination="http://www.example.com")
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(binding, "%s" % authn_request,
"http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
try:
self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
status = None
except OtherError as oe:
print(oe.args)
status = s_utils.error_status_factory(oe)
assert status
print(status)
assert _eq(status.keyswv(), ["status_code", "status_message"])
assert status.status_message.text == 'Not destined for me!'
status_code = status.status_code
assert _eq(status_code.keyswv(), ["status_code", "value"])
assert status_code.value == samlp.STATUS_RESPONDER
assert status_code.status_code.value == samlp.STATUS_UNKNOWN_PRINCIPAL
def test_parse_ok_request(self):
req_id, authn_request = self.client.create_authn_request(
message_id="id1", destination="http://localhost:8088/sso")
print(authn_request)
binding = BINDING_HTTP_REDIRECT
htargs = self.client.apply_binding(binding, "%s" % authn_request,
"http://www.example.com", "abcd")
_dict = parse_qs(htargs["headers"][0][1].split('?')[1])
print(_dict)
req = self.server.parse_authn_request(_dict["SAMLRequest"][0], binding)
# returns a dictionary
print(req)
resp_args = self.server.response_args(req.message, [BINDING_HTTP_POST])
assert resp_args["destination"] == "http://lingon.catalogix.se:8087/"
assert resp_args["in_response_to"] == "id1"
name_id_policy = resp_args["name_id_policy"]
assert _eq(name_id_policy.keyswv(), ["format"])
assert name_id_policy.format == saml.NAMEID_FORMAT_TRANSIENT
assert resp_args[
"sp_entity_id"] == "urn:mace:example.com:saml:roland:sp"
def test_sso_response_with_identity(self):
name_id = self.server.ident.transient_nameid(
"https://example.com/sp", "id12")
resp = self.server.create_authn_response(
{
"eduPersonEntitlement": "Short stop",
"sn": "Jeter",
"givenName": "Derek",
"mail": "[email protected]",
"title": "The man"
},
"id12", # in_response_to
"http://localhost:8087/", # destination
"https://example.com/sp", # sp_entity_id
name_id=name_id,
authn=AUTHN
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'assertion',
'in_response_to', 'issue_instant',
'version', 'id', 'issuer'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
assert resp.status.status_code.value == samlp.STATUS_SUCCESS
assert resp.assertion
assertion = resp.assertion
print(assertion)
assert assertion.authn_statement
assert assertion.conditions
assert assertion.attribute_statement
attribute_statement = assertion.attribute_statement
print(attribute_statement)
assert len(attribute_statement[0].attribute) == 4
# Pick out one attribute
attr = None
for attr in attribute_statement[0].attribute:
if attr.friendly_name == "givenName":
break
assert len(attr.attribute_value) == 1
assert attr.name == "urn:mace:dir:attribute-def:givenName"
assert attr.name_format == "urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
value = attr.attribute_value[0]
assert value.text.strip() == "Derek"
assert value.get_type() == "xs:string"
assert assertion.subject
assert assertion.subject.name_id
assert assertion.subject.subject_confirmation
confirmation = assertion.subject.subject_confirmation[0]
print(confirmation.keyswv())
print(confirmation.subject_confirmation_data)
assert confirmation.subject_confirmation_data.in_response_to == "id12"
def test_sso_response_without_identity(self):
resp = self.server.create_authn_response(
{},
"id12", # in_response_to
"http://localhost:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
userid="USER1",
authn=AUTHN,
release_policy=Policy(),
best_effort=True
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer',
'assertion'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
assert resp.status.status_code.value == samlp.STATUS_SUCCESS
assert resp.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert not resp.assertion.attribute_statement
def test_sso_response_specific_instant(self):
_authn = AUTHN.copy()
_authn["authn_instant"] = 1234567890
resp = self.server.create_authn_response(
{},
"id12", # in_response_to
"http://localhost:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
userid="USER1",
authn=_authn,
best_effort=True
)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer',
'assertion'])
authn_statement = resp.assertion.authn_statement[0]
assert authn_statement.authn_instant == '2009-02-13T23:31:30Z'
def test_sso_failure_response(self):
exc = s_utils.MissingValue("eduPersonAffiliation missing")
resp = self.server.create_error_response(
"id12", "http://localhost:8087/", exc)
print(resp.keyswv())
assert _eq(resp.keyswv(), ['status', 'destination', 'in_response_to',
'issue_instant', 'version', 'id', 'issuer'])
assert resp.destination == "http://localhost:8087/"
assert resp.in_response_to == "id12"
assert resp.status
print(resp.status)
assert resp.status.status_code.value == samlp.STATUS_RESPONDER
assert resp.status.status_code.status_code.value == \
samlp.STATUS_REQUEST_UNSUPPORTED
assert resp.status.status_message.text == \
"eduPersonAffiliation missing"
assert resp.issuer.text == "urn:mace:example.com:saml:roland:idp"
assert not resp.assertion
def test_authn_response_0(self):
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = client.Saml2Client(conf)
ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["[email protected]"], "title": "The man"}
npolicy = samlp.NameIDPolicy(format=saml.NAMEID_FORMAT_TRANSIENT,
allow_create="true")
resp_str = "%s" % self.server.create_authn_response(
ava, "id1", "http://local:8087/",
"urn:mace:example.com:saml:roland:sp", npolicy,
"[email protected]", authn=AUTHN)
response = samlp.response_from_string(resp_str)
print(response.keyswv())
assert _eq(response.keyswv(), ['status', 'destination', 'assertion',
'in_response_to', 'issue_instant',
'version', 'issuer', 'id'])
print(response.assertion[0].keyswv())
assert len(response.assertion) == 1
assert _eq(response.assertion[0].keyswv(), ['attribute_statement',
'issue_instant', 'version',
'subject', 'conditions',
'id', 'issuer',
'authn_statement'])
assertion = response.assertion[0]
assert len(assertion.attribute_statement) == 1
astate = assertion.attribute_statement[0]
print(astate)
assert len(astate.attribute) == 4
def test_signed_response(self):
name_id = self.server.ident.transient_nameid(
"urn:mace:example.com:saml:roland:sp", "id12")
ava = {"givenName": ["Derek"], "sn": ["Jeter"],
"mail": ["[email protected]"], "title": "The man"}
signed_resp = self.server.create_authn_response(
ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=name_id,
sign_assertion=True
)
print(signed_resp)
assert signed_resp
sresponse = response_from_string(signed_resp)
# It's the assertions that are signed not the response per se
assert len(sresponse.assertion) == 1
assertion = sresponse.assertion[0]
# Since the reponse is created dynamically I don't know the signature
# value. Just that there should be one
assert assertion.signature.signature_value.text != ""
def test_signed_response_1(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id,
id_attr="")
assert valid
self.verify_assertion(sresponse.assertion)
def test_signed_response_2(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=False,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
assert sresponse.assertion[0].signature == None
def test_signed_response_3(self):
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=True,
)
sresponse = response_from_string(signed_resp)
assert sresponse.signature == None
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id,
id_attr="")
assert valid
self.verify_assertion(sresponse.assertion)
def test_encrypted_signed_response_1(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(
signed_resp, self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id, id_attr="")
assert valid
valid = self.server.sec.verify_signature(
signed_resp, self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=sresponse.assertion[0].id, id_attr="")
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(signed_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
assert resp.assertion[0].advice.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(
resp.assertion[0].advice.encrypted_assertion[0].extension_elements,
[saml, samlp])
self.verify_assertion(assertion)
#PEFIM never signs assertions.
assert assertion[0].signature is None
#valid = self.server.sec.verify_signature(decr_text,
# self.server.config.cert_file,
# node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
# node_id=assertion[0].id,
# id_attr="")
assert valid
def test_encrypted_signed_response_2(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
decr_text_old = copy.deepcopy("%s" % signed_resp)
with raises(DecryptError):
decr_text = self.server.sec.decrypt(
signed_resp,
self.client.config.encryption_keypairs[0]["key_file"],
)
decr_text = self.server.sec.decrypt(signed_resp, self.client.config.encryption_keypairs[1]["key_file"])
assert decr_text != decr_text_old
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
assert resp.assertion[0].signature == None
self.verify_assertion(resp.assertion)
def test_encrypted_signed_response_3(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=True,
encrypt_assertion_self_contained=False,
encrypt_cert_assertion=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(signed_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
valid = self.server.sec.verify_signature(decr_text,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=resp.assertion[0].id,
id_attr="")
assert valid
self.verify_assertion(resp.assertion)
assert 'xmlns:encas' not in decr_text
def test_encrypted_signed_response_4(self):
cert_str, cert_key_str = generate_cert()
signed_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=True,
sign_assertion=True,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str,
)
sresponse = response_from_string(signed_resp)
valid = self.server.sec.verify_signature(signed_resp,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:protocol:Response',
node_id=sresponse.id,
id_attr="")
assert valid
decr_text = self.server.sec.decrypt(signed_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
valid = self.server.sec.verify_signature(decr_text,
self.server.config.cert_file,
node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
node_id=resp.assertion[0].id,
id_attr="")
assert valid
key_fd = make_temp(cert_key_str, decode=False)
decr_text = self.server.sec.decrypt(decr_text, key_fd.name)
resp = samlp.response_from_string(decr_text)
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
assertion = \
extension_elements_to_elements(assertion[0].advice.encrypted_assertion[0].extension_elements,[saml, samlp])
self.verify_assertion(assertion)
#PEFIM never signs assertion in advice
assert assertion[0].signature is None
#valid = self.server.sec.verify_signature(decr_text,
# self.server.config.cert_file,
# node_name='urn:oasis:names:tc:SAML:2.0:assertion:Assertion',
# node_id=assertion[0].id,
# id_attr="")
assert valid
def test_encrypted_response_1(self):
cert_str_advice, cert_key_str_advice = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
)
_resp = "%s" % _resp
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd = make_temp(cert_key_str_advice, decode=False)
decr_text = self.server.sec.decrypt(_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
self.verify_advice_assertion(resp, decr_text)
def test_encrypted_response_2(self):
cert_str_advice, cert_key_str_advice = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text_1 = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
key_fd = make_temp(cert_key_str_advice, decode=False)
decr_text_2 = self.server.sec.decrypt(decr_text_1, key_fd.name)
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_3(self):
cert_str_assertion, cert_key_str_assertion = generate_cert()
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion=cert_str_assertion
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd = make_temp(cert_key_str_assertion, decode=False)
decr_text = self.server.sec.decrypt(_resp, key_fd.name)
resp = samlp.response_from_string(decr_text)
assert resp.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_encrypted_response_4(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
assert resp.encrypted_assertion[0].extension_elements
assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_encrypted_assertion(assertion, decr_text)
def test_encrypted_response_5(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True
)
_resp = "%s" % _resp
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text)
self.verify_advice_assertion(resp, decr_text)
def test_encrypted_response_6(self):
_server = Server("idp_conf_verify_cert")
cert_str_advice, cert_key_str_advice = generate_cert()
cert_str_assertion, cert_key_str_assertion = generate_cert()
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice=cert_str_advice,
encrypt_cert_assertion=cert_str_assertion
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
key_fd1 = make_temp(cert_key_str_assertion, decode=False)
decr_text_1 = _server.sec.decrypt(_resp, key_fd1.name)
key_fd2 = make_temp(cert_key_str_advice, decode=False)
decr_text_2 = _server.sec.decrypt(decr_text_1, key_fd2.name)
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_7(self):
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True
)
sresponse = response_from_string(_resp)
assert sresponse.signature is None
decr_text_1 = self.server.sec.decrypt(_resp, self.client.config.encryption_keypairs[1]["key_file"])
decr_text_2 = self.server.sec.decrypt(decr_text_1, self.client.config.encryption_keypairs[1]["key_file"])
resp = samlp.response_from_string(decr_text_2)
resp.assertion = extension_elements_to_elements(resp.encrypted_assertion[0].extension_elements, [saml, samlp])
self.verify_advice_assertion(resp, decr_text_2)
def test_encrypted_response_8(self):
try:
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
encrypt_cert_assertion="whatever"
)
assert False, "Must throw an exception"
except EncryptError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
try:
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
)
assert False, "Must throw an exception"
except EncryptError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
try:
_resp = self.server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion="whatever"
)
assert False, "Must throw an exception"
except EncryptError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
_server = Server("idp_conf_verify_cert")
try:
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
encrypt_cert_assertion="whatever"
)
assert False, "Must throw an exception"
except CertificateError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
try:
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True,
encrypt_cert_advice="whatever",
)
assert False, "Must throw an exception"
except CertificateError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
try:
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
encrypt_cert_assertion="whatever"
)
assert False, "Must throw an exception"
except CertificateError as ex:
pass
except Exception as ex:
assert False, "Wrong exception!"
def test_encrypted_response_9(self):
_server = Server("idp_conf_sp_no_encrypt")
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
pefim=True,
)
self.verify_assertion(_resp.assertion.advice.assertion)
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=False,
encrypt_assertion_self_contained=True,
pefim=True
)
self.verify_assertion(_resp.assertion.advice.assertion)
_resp = _server.create_authn_response(
self.ava,
"id12", # in_response_to
"http://lingon.catalogix.se:8087/", # consumer_url
"urn:mace:example.com:saml:roland:sp", # sp_entity_id
name_id=self.name_id,
sign_response=False,
sign_assertion=False,
encrypt_assertion=True,
encrypt_assertion_self_contained=True,
encrypted_advice_attributes=False,
)
self.verify_assertion([_resp.assertion])
def test_slo_http_post(self):
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
self.client.users.add_information_about_person(sinfo)
req_id, logout_request = self.client.create_logout_request(
destination="http://localhost:8088/slop", name_id=nid,
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
intermed = base64.b64encode(str(logout_request).encode('utf-8'))
#saml_soap = make_soap_enveloped_saml_thingy(logout_request)
request = self.server.parse_logout_request(intermed, BINDING_HTTP_POST)
assert request
def test_slo_soap(self):
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
sp = client.Saml2Client(config_file="server_conf")
sp.users.add_information_about_person(sinfo)
req_id, logout_request = sp.create_logout_request(
name_id=nid, destination="http://localhost:8088/slo",
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
#_ = s_utils.deflate_and_base64_encode("%s" % (logout_request,))
saml_soap = make_soap_enveloped_saml_thingy(logout_request)
self.server.ident.close()
with closing(Server("idp_soap_conf")) as idp:
request = idp.parse_logout_request(saml_soap)
idp.ident.close()
assert request
# ------------------------------------------------------------------------
IDENTITY = {"eduPersonAffiliation": ["staff", "member"],
"sn": ["Jeter"], "givenName": ["Derek"],
"mail": ["[email protected]"], "title": "The man"}
class TestServer2():
def setup_class(self):
self.server = Server("restrictive_idp_conf")
def teardown_class(self):
self.server.close()
def test_do_attribute_reponse(self):
aa_policy = self.server.config.getattr("policy", "idp")
print(aa_policy.__dict__)
response = self.server.create_attribute_response(
IDENTITY.copy(), "aaa", "http://example.com/sp/",
"http://www.example.com/roland/sp")
assert response is not None
assert response.destination == "http://example.com/sp/"
assert response.in_response_to == "aaa"
assert response.version == "2.0"
assert response.issuer.text == "urn:mace:example.com:saml:roland:idpr"
assert response.status.status_code.value == samlp.STATUS_SUCCESS
assert response.assertion
assertion = response.assertion
assert assertion.version == "2.0"
subject = assertion.subject
#assert subject.name_id.format == saml.NAMEID_FORMAT_TRANSIENT
assert subject.subject_confirmation
subject_conf = subject.subject_confirmation[0]
assert subject_conf.subject_confirmation_data.in_response_to == "aaa"
def _logout_request(conf_file):
conf = config.SPConfig()
conf.load_file(conf_file)
sp = client.Saml2Client(conf)
soon = time_util.in_a_while(days=1)
sinfo = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": soon,
"user": {
"givenName": "Leo",
"sn": "Laport",
}
}
sp.users.add_information_about_person(sinfo)
return sp.create_logout_request(
name_id=nid,
destination="http://localhost:8088/slo",
issuer_entity_id="urn:mace:example.com:saml:roland:idp",
reason="I'm tired of this")
class TestServerLogout():
def test_1(self):
with closing(Server("idp_slo_redirect_conf")) as server:
req_id, request = _logout_request("sp_slo_redirect_conf")
print(request)
bindings = [BINDING_HTTP_REDIRECT]
response = server.create_logout_response(request, bindings)
binding, destination = server.pick_binding("single_logout_service",
bindings, "spsso",
request)
http_args = server.apply_binding(binding, "%s" % response, destination,
"relay_state", response=True)
assert len(http_args) == 4
assert http_args["headers"][0][0] == "Location"
assert http_args["data"] == []
if __name__ == "__main__":
ts = TestServer1()
ts.setup_class()
ts.test_encrypted_signed_response_1()
| 39.03042 | 120 | 0.581446 |
4a2629daf63d7e67302e15bfa60687ea669c14f6 | 23,223 | py | Python | pandas/core/reshape/concat.py | mavismonica/pandas | dbdc55c9d59f25589d58cc60247af193f06c3c66 | [
"BSD-3-Clause"
] | 2 | 2021-06-11T12:31:28.000Z | 2021-08-17T10:56:52.000Z | pandas/core/reshape/concat.py | mavismonica/pandas | dbdc55c9d59f25589d58cc60247af193f06c3c66 | [
"BSD-3-Clause"
] | null | null | null | pandas/core/reshape/concat.py | mavismonica/pandas | dbdc55c9d59f25589d58cc60247af193f06c3c66 | [
"BSD-3-Clause"
] | null | null | null | """
Concat routines.
"""
from collections import abc
from typing import (
TYPE_CHECKING,
Iterable,
List,
Mapping,
Optional,
Type,
Union,
cast,
overload,
)
import numpy as np
from pandas._typing import FrameOrSeriesUnion, Label
from pandas.core.dtypes.concat import concat_compat
from pandas.core.dtypes.generic import ABCDataFrame, ABCSeries
from pandas.core.dtypes.missing import isna
import pandas.core.algorithms as algos
from pandas.core.arrays.categorical import (
factorize_from_iterable,
factorize_from_iterables,
)
import pandas.core.common as com
from pandas.core.indexes.api import (
Index,
MultiIndex,
all_indexes_same,
ensure_index,
get_objs_combined_axis,
get_unanimous_names,
)
import pandas.core.indexes.base as ibase
from pandas.core.internals import concatenate_block_managers
if TYPE_CHECKING:
from pandas import DataFrame, Series
from pandas.core.generic import NDFrame
# ---------------------------------------------------------------------
# Concatenate DataFrame objects
@overload
def concat(
objs: Union[Iterable["DataFrame"], Mapping[Label, "DataFrame"]],
axis=0,
join: str = "outer",
ignore_index: bool = False,
keys=None,
levels=None,
names=None,
verify_integrity: bool = False,
sort: bool = False,
copy: bool = True,
) -> "DataFrame":
...
@overload
def concat(
objs: Union[Iterable["NDFrame"], Mapping[Label, "NDFrame"]],
axis=0,
join: str = "outer",
ignore_index: bool = False,
keys=None,
levels=None,
names=None,
verify_integrity: bool = False,
sort: bool = False,
copy: bool = True,
) -> FrameOrSeriesUnion:
...
def concat(
objs: Union[Iterable["NDFrame"], Mapping[Label, "NDFrame"]],
axis=0,
join="outer",
ignore_index: bool = False,
keys=None,
levels=None,
names=None,
verify_integrity: bool = False,
sort: bool = False,
copy: bool = True,
) -> FrameOrSeriesUnion:
"""
Concatenate pandas objects along a particular axis with optional set logic
along the other axes.
Can also add a layer of hierarchical indexing on the concatenation axis,
which may be useful if the labels are the same (or overlapping) on
the passed axis number.
Parameters
----------
objs : a sequence or mapping of Series or DataFrame objects
If a mapping is passed, the sorted keys will be used as the `keys`
argument, unless it is passed, in which case the values will be
selected (see below). Any None objects will be dropped silently unless
they are all None in which case a ValueError will be raised.
axis : {0/'index', 1/'columns'}, default 0
The axis to concatenate along.
join : {'inner', 'outer'}, default 'outer'
How to handle indexes on other axis (or axes).
ignore_index : bool, default False
If True, do not use the index values along the concatenation axis. The
resulting axis will be labeled 0, ..., n - 1. This is useful if you are
concatenating objects where the concatenation axis does not have
meaningful indexing information. Note the index values on the other
axes are still respected in the join.
keys : sequence, default None
If multiple levels passed, should contain tuples. Construct
hierarchical index using the passed keys as the outermost level.
levels : list of sequences, default None
Specific levels (unique values) to use for constructing a
MultiIndex. Otherwise they will be inferred from the keys.
names : list, default None
Names for the levels in the resulting hierarchical index.
verify_integrity : bool, default False
Check whether the new concatenated axis contains duplicates. This can
be very expensive relative to the actual data concatenation.
sort : bool, default False
Sort non-concatenation axis if it is not already aligned when `join`
is 'outer'.
This has no effect when ``join='inner'``, which already preserves
the order of the non-concatenation axis.
.. versionchanged:: 1.0.0
Changed to not sort by default.
copy : bool, default True
If False, do not copy data unnecessarily.
Returns
-------
object, type of objs
When concatenating all ``Series`` along the index (axis=0), a
``Series`` is returned. When ``objs`` contains at least one
``DataFrame``, a ``DataFrame`` is returned. When concatenating along
the columns (axis=1), a ``DataFrame`` is returned.
See Also
--------
Series.append : Concatenate Series.
DataFrame.append : Concatenate DataFrames.
DataFrame.join : Join DataFrames using indexes.
DataFrame.merge : Merge DataFrames by indexes or columns.
Notes
-----
The keys, levels, and names arguments are all optional.
A walkthrough of how this method fits in with other tools for combining
pandas objects can be found `here
<https://pandas.pydata.org/pandas-docs/stable/user_guide/merging.html>`__.
Examples
--------
Combine two ``Series``.
>>> s1 = pd.Series(['a', 'b'])
>>> s2 = pd.Series(['c', 'd'])
>>> pd.concat([s1, s2])
0 a
1 b
0 c
1 d
dtype: object
Clear the existing index and reset it in the result
by setting the ``ignore_index`` option to ``True``.
>>> pd.concat([s1, s2], ignore_index=True)
0 a
1 b
2 c
3 d
dtype: object
Add a hierarchical index at the outermost level of
the data with the ``keys`` option.
>>> pd.concat([s1, s2], keys=['s1', 's2'])
s1 0 a
1 b
s2 0 c
1 d
dtype: object
Label the index keys you create with the ``names`` option.
>>> pd.concat([s1, s2], keys=['s1', 's2'],
... names=['Series name', 'Row ID'])
Series name Row ID
s1 0 a
1 b
s2 0 c
1 d
dtype: object
Combine two ``DataFrame`` objects with identical columns.
>>> df1 = pd.DataFrame([['a', 1], ['b', 2]],
... columns=['letter', 'number'])
>>> df1
letter number
0 a 1
1 b 2
>>> df2 = pd.DataFrame([['c', 3], ['d', 4]],
... columns=['letter', 'number'])
>>> df2
letter number
0 c 3
1 d 4
>>> pd.concat([df1, df2])
letter number
0 a 1
1 b 2
0 c 3
1 d 4
Combine ``DataFrame`` objects with overlapping columns
and return everything. Columns outside the intersection will
be filled with ``NaN`` values.
>>> df3 = pd.DataFrame([['c', 3, 'cat'], ['d', 4, 'dog']],
... columns=['letter', 'number', 'animal'])
>>> df3
letter number animal
0 c 3 cat
1 d 4 dog
>>> pd.concat([df1, df3], sort=False)
letter number animal
0 a 1 NaN
1 b 2 NaN
0 c 3 cat
1 d 4 dog
Combine ``DataFrame`` objects with overlapping columns
and return only those that are shared by passing ``inner`` to
the ``join`` keyword argument.
>>> pd.concat([df1, df3], join="inner")
letter number
0 a 1
1 b 2
0 c 3
1 d 4
Combine ``DataFrame`` objects horizontally along the x axis by
passing in ``axis=1``.
>>> df4 = pd.DataFrame([['bird', 'polly'], ['monkey', 'george']],
... columns=['animal', 'name'])
>>> pd.concat([df1, df4], axis=1)
letter number animal name
0 a 1 bird polly
1 b 2 monkey george
Prevent the result from including duplicate index values with the
``verify_integrity`` option.
>>> df5 = pd.DataFrame([1], index=['a'])
>>> df5
0
a 1
>>> df6 = pd.DataFrame([2], index=['a'])
>>> df6
0
a 2
>>> pd.concat([df5, df6], verify_integrity=True)
Traceback (most recent call last):
...
ValueError: Indexes have overlapping values: ['a']
"""
op = _Concatenator(
objs,
axis=axis,
ignore_index=ignore_index,
join=join,
keys=keys,
levels=levels,
names=names,
verify_integrity=verify_integrity,
copy=copy,
sort=sort,
)
return op.get_result()
class _Concatenator:
"""
Orchestrates a concatenation operation for BlockManagers
"""
def __init__(
self,
objs: Union[Iterable["NDFrame"], Mapping[Label, "NDFrame"]],
axis=0,
join: str = "outer",
keys=None,
levels=None,
names=None,
ignore_index: bool = False,
verify_integrity: bool = False,
copy: bool = True,
sort=False,
):
if isinstance(objs, (ABCSeries, ABCDataFrame, str)):
raise TypeError(
"first argument must be an iterable of pandas "
f'objects, you passed an object of type "{type(objs).__name__}"'
)
if join == "outer":
self.intersect = False
elif join == "inner":
self.intersect = True
else: # pragma: no cover
raise ValueError(
"Only can inner (intersect) or outer (union) join the other axis"
)
if isinstance(objs, abc.Mapping):
if keys is None:
keys = list(objs.keys())
objs = [objs[k] for k in keys]
else:
objs = list(objs)
if len(objs) == 0:
raise ValueError("No objects to concatenate")
if keys is None:
objs = list(com.not_none(*objs))
else:
# #1649
clean_keys = []
clean_objs = []
for k, v in zip(keys, objs):
if v is None:
continue
clean_keys.append(k)
clean_objs.append(v)
objs = clean_objs
name = getattr(keys, "name", None)
keys = Index(clean_keys, name=name)
if len(objs) == 0:
raise ValueError("All objects passed were None")
# consolidate data & figure out what our result ndim is going to be
ndims = set()
for obj in objs:
if not isinstance(obj, (ABCSeries, ABCDataFrame)):
msg = (
f"cannot concatenate object of type '{type(obj)}'; "
"only Series and DataFrame objs are valid"
)
raise TypeError(msg)
# consolidate
obj._consolidate_inplace()
ndims.add(obj.ndim)
# get the sample
# want the highest ndim that we have, and must be non-empty
# unless all objs are empty
sample: Optional["NDFrame"] = None
if len(ndims) > 1:
max_ndim = max(ndims)
for obj in objs:
if obj.ndim == max_ndim and np.sum(obj.shape):
sample = obj
break
else:
# filter out the empties if we have not multi-index possibilities
# note to keep empty Series as it affect to result columns / name
non_empties = [
obj for obj in objs if sum(obj.shape) > 0 or isinstance(obj, ABCSeries)
]
if len(non_empties) and (
keys is None and names is None and levels is None and not self.intersect
):
objs = non_empties
sample = objs[0]
if sample is None:
sample = objs[0]
self.objs = objs
# Standardize axis parameter to int
if isinstance(sample, ABCSeries):
axis = sample._constructor_expanddim._get_axis_number(axis)
else:
axis = sample._get_axis_number(axis)
# Need to flip BlockManager axis in the DataFrame special case
self._is_frame = isinstance(sample, ABCDataFrame)
if self._is_frame:
axis = sample._get_block_manager_axis(axis)
self._is_series = isinstance(sample, ABCSeries)
if not 0 <= axis <= sample.ndim:
raise AssertionError(
f"axis must be between 0 and {sample.ndim}, input was {axis}"
)
# if we have mixed ndims, then convert to highest ndim
# creating column numbers as needed
if len(ndims) > 1:
current_column = 0
max_ndim = sample.ndim
self.objs, objs = [], self.objs
for obj in objs:
ndim = obj.ndim
if ndim == max_ndim:
pass
elif ndim != max_ndim - 1:
raise ValueError(
"cannot concatenate unaligned mixed "
"dimensional NDFrame objects"
)
else:
name = getattr(obj, "name", None)
if ignore_index or name is None:
name = current_column
current_column += 1
# doing a row-wise concatenation so need everything
# to line up
if self._is_frame and axis == 1:
name = 0
# mypy needs to know sample is not an NDFrame
sample = cast("FrameOrSeriesUnion", sample)
obj = sample._constructor({name: obj})
self.objs.append(obj)
# note: this is the BlockManager axis (since DataFrame is transposed)
self.bm_axis = axis
self.axis = 1 - self.bm_axis if self._is_frame else 0
self.keys = keys
self.names = names or getattr(keys, "names", None)
self.levels = levels
self.sort = sort
self.ignore_index = ignore_index
self.verify_integrity = verify_integrity
self.copy = copy
self.new_axes = self._get_new_axes()
def get_result(self):
cons: Type[FrameOrSeriesUnion]
sample: FrameOrSeriesUnion
# series only
if self._is_series:
sample = cast("Series", self.objs[0])
# stack blocks
if self.bm_axis == 0:
name = com.consensus_name_attr(self.objs)
cons = sample._constructor
arrs = [ser._values for ser in self.objs]
res = concat_compat(arrs, axis=0)
result = cons(res, index=self.new_axes[0], name=name, dtype=res.dtype)
return result.__finalize__(self, method="concat")
# combine as columns in a frame
else:
data = dict(zip(range(len(self.objs)), self.objs))
# GH28330 Preserves subclassed objects through concat
cons = sample._constructor_expanddim
index, columns = self.new_axes
df = cons(data, index=index)
df.columns = columns
return df.__finalize__(self, method="concat")
# combine block managers
else:
sample = cast("DataFrame", self.objs[0])
mgrs_indexers = []
for obj in self.objs:
indexers = {}
for ax, new_labels in enumerate(self.new_axes):
# ::-1 to convert BlockManager ax to DataFrame ax
if ax == self.bm_axis:
# Suppress reindexing on concat axis
continue
# 1-ax to convert BlockManager axis to DataFrame axis
obj_labels = obj.axes[1 - ax]
if not new_labels.equals(obj_labels):
# We have to remove the duplicates from obj_labels
# in new labels to make them unique, otherwise we would
# duplicate or duplicates again
if not obj_labels.is_unique:
new_labels = algos.make_duplicates_of_left_unique_in_right(
np.asarray(obj_labels), np.asarray(new_labels)
)
indexers[ax] = obj_labels.reindex(new_labels)[1]
mgrs_indexers.append((obj._mgr, indexers))
new_data = concatenate_block_managers(
mgrs_indexers, self.new_axes, concat_axis=self.bm_axis, copy=self.copy
)
if not self.copy:
new_data._consolidate_inplace()
cons = sample._constructor
return cons(new_data).__finalize__(self, method="concat")
def _get_result_dim(self) -> int:
if self._is_series and self.bm_axis == 1:
return 2
else:
return self.objs[0].ndim
def _get_new_axes(self) -> List[Index]:
ndim = self._get_result_dim()
return [
self._get_concat_axis() if i == self.bm_axis else self._get_comb_axis(i)
for i in range(ndim)
]
def _get_comb_axis(self, i: int) -> Index:
data_axis = self.objs[0]._get_block_manager_axis(i)
return get_objs_combined_axis(
self.objs,
axis=data_axis,
intersect=self.intersect,
sort=self.sort,
copy=self.copy,
)
def _get_concat_axis(self) -> Index:
"""
Return index to be used along concatenation axis.
"""
if self._is_series:
if self.bm_axis == 0:
indexes = [x.index for x in self.objs]
elif self.ignore_index:
idx = ibase.default_index(len(self.objs))
return idx
elif self.keys is None:
names: List[Label] = [None] * len(self.objs)
num = 0
has_names = False
for i, x in enumerate(self.objs):
if not isinstance(x, ABCSeries):
raise TypeError(
f"Cannot concatenate type 'Series' with "
f"object of type '{type(x).__name__}'"
)
if x.name is not None:
names[i] = x.name
has_names = True
else:
names[i] = num
num += 1
if has_names:
return Index(names)
else:
return ibase.default_index(len(self.objs))
else:
return ensure_index(self.keys).set_names(self.names)
else:
indexes = [x.axes[self.axis] for x in self.objs]
if self.ignore_index:
idx = ibase.default_index(sum(len(i) for i in indexes))
return idx
if self.keys is None:
concat_axis = _concat_indexes(indexes)
else:
concat_axis = _make_concat_multiindex(
indexes, self.keys, self.levels, self.names
)
self._maybe_check_integrity(concat_axis)
return concat_axis
def _maybe_check_integrity(self, concat_index: Index):
if self.verify_integrity:
if not concat_index.is_unique:
overlap = concat_index[concat_index.duplicated()].unique()
raise ValueError(f"Indexes have overlapping values: {overlap}")
def _concat_indexes(indexes) -> Index:
return indexes[0].append(indexes[1:])
def _make_concat_multiindex(indexes, keys, levels=None, names=None) -> MultiIndex:
if (levels is None and isinstance(keys[0], tuple)) or (
levels is not None and len(levels) > 1
):
zipped = list(zip(*keys))
if names is None:
names = [None] * len(zipped)
if levels is None:
_, levels = factorize_from_iterables(zipped)
else:
levels = [ensure_index(x) for x in levels]
else:
zipped = [keys]
if names is None:
names = [None]
if levels is None:
levels = [ensure_index(keys)]
else:
levels = [ensure_index(x) for x in levels]
if not all_indexes_same(indexes):
codes_list = []
# things are potentially different sizes, so compute the exact codes
# for each level and pass those to MultiIndex.from_arrays
for hlevel, level in zip(zipped, levels):
to_concat = []
for key, index in zip(hlevel, indexes):
# Find matching codes, include matching nan values as equal.
mask = (isna(level) & isna(key)) | (level == key)
if not mask.any():
raise ValueError(f"Key {key} not in level {level}")
i = np.nonzero(mask)[0][0]
to_concat.append(np.repeat(i, len(index)))
codes_list.append(np.concatenate(to_concat))
concat_index = _concat_indexes(indexes)
# these go at the end
if isinstance(concat_index, MultiIndex):
levels.extend(concat_index.levels)
codes_list.extend(concat_index.codes)
else:
codes, categories = factorize_from_iterable(concat_index)
levels.append(categories)
codes_list.append(codes)
if len(names) == len(levels):
names = list(names)
else:
# make sure that all of the passed indices have the same nlevels
if not len({idx.nlevels for idx in indexes}) == 1:
raise AssertionError(
"Cannot concat indices that do not have the same number of levels"
)
# also copies
names = list(names) + list(get_unanimous_names(*indexes))
return MultiIndex(
levels=levels, codes=codes_list, names=names, verify_integrity=False
)
new_index = indexes[0]
n = len(new_index)
kpieces = len(indexes)
# also copies
new_names = list(names)
new_levels = list(levels)
# construct codes
new_codes = []
# do something a bit more speedy
for hlevel, level in zip(zipped, levels):
hlevel = ensure_index(hlevel)
mapped = level.get_indexer(hlevel)
mask = mapped == -1
if mask.any():
raise ValueError(f"Values not found in passed level: {hlevel[mask]!s}")
new_codes.append(np.repeat(mapped, n))
if isinstance(new_index, MultiIndex):
new_levels.extend(new_index.levels)
new_codes.extend([np.tile(lab, kpieces) for lab in new_index.codes])
else:
new_levels.append(new_index)
new_codes.append(np.tile(np.arange(n), kpieces))
if len(new_names) < len(new_levels):
new_names.extend(new_index.names)
return MultiIndex(
levels=new_levels, codes=new_codes, names=new_names, verify_integrity=False
)
| 32.031724 | 88 | 0.552986 |
4a262a1267035c54c1a0f9b5b44bb0a4c5750b4a | 1,798 | py | Python | observations/r/freeny.py | hajime9652/observations | 2c8b1ac31025938cb17762e540f2f592e302d5de | [
"Apache-2.0"
] | 199 | 2017-07-24T01:34:27.000Z | 2022-01-29T00:50:55.000Z | observations/r/freeny.py | hajime9652/observations | 2c8b1ac31025938cb17762e540f2f592e302d5de | [
"Apache-2.0"
] | 46 | 2017-09-05T19:27:20.000Z | 2019-01-07T09:47:26.000Z | observations/r/freeny.py | hajime9652/observations | 2c8b1ac31025938cb17762e540f2f592e302d5de | [
"Apache-2.0"
] | 45 | 2017-07-26T00:10:44.000Z | 2022-03-16T20:44:59.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def freeny(path):
"""Freeny's Revenue Data
Freeny's data on quarterly revenue and explanatory variables.
There are three ‘freeny’ data sets.
`freeny.y` is a time series with 39 observations on quarterly revenue
from (1962,2Q) to (1971,4Q).
`freeny.x` is a matrix of explanatory variables. The columns are
`freeny.y` lagged 1 quarter, price index, income level, and market
potential.
Finally, `freeny` is a data frame with variables `y`,
`lag.quarterly.revenue`, `price.index`, `income.level`, and
`market.potential` obtained from the above two data objects.
A. E. Freeny (1977) *A Portable Linear Regression Package with Test
Programs*. Bell Laboratories memorandum.
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `freeny.csv`.
Returns:
Tuple of np.ndarray `x_train` with 39 rows and 5 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'freeny.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/datasets/freeny.csv'
maybe_download_and_extract(path, url,
save_file_name='freeny.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata
| 29.47541 | 71 | 0.697998 |
4a262ca81c1827af6bd559bab4224b9a268018ae | 2,967 | py | Python | flakeheaven/_patched/_style_guide.py | snmishra/flakeheaven | 20f94457744c47d965d4520d3b22def538b0cc49 | [
"MIT"
] | 24 | 2022-01-06T14:20:37.000Z | 2022-03-31T14:47:38.000Z | flakeheaven/_patched/_style_guide.py | snmishra/flakeheaven | 20f94457744c47d965d4520d3b22def538b0cc49 | [
"MIT"
] | 47 | 2022-01-04T09:19:34.000Z | 2022-03-30T12:53:01.000Z | flakeheaven/_patched/_style_guide.py | snmishra/flakeheaven | 20f94457744c47d965d4520d3b22def538b0cc49 | [
"MIT"
] | 5 | 2022-01-05T03:32:59.000Z | 2022-02-05T14:47:58.000Z | # built-in
from functools import lru_cache
# external
from flake8.style_guide import StyleGuide, StyleGuideManager
# app
from ._violation import FlakeHeavenViolation
class FlakeHeavenStyleGuideManager(StyleGuideManager):
def __init__(self, options, formatter, decider=None):
"""Initialize our StyleGuide.
.. todo:: Add parameter documentation.
"""
super().__init__(options, formatter, decider)
self.default_style_guide = FlakeHeavenStyleGuide(
options, formatter, self.stats, decider=decider,
)
self.style_guides = [self.default_style_guide]
self.style_guides.extend(self.populate_style_guides_with(options))
@lru_cache(maxsize=None)
def style_guide_for(self, filename: str):
"""Patched styleguide finder to give priority to flakeheaven's stileguides
"""
guides = sorted(
(g for g in self.style_guides if g.applies_to(filename)),
key=lambda g: len(g.filename or ''),
reverse=True,
)
for guide in guides:
if isinstance(guide, FlakeHeavenStyleGuide):
return guide
return guides[0]
def handle_error(
self,
code: str,
filename: str,
line_number: int,
column_number: int,
text: str,
plugin: str,
physical_line: str = None,
):
guide = self.style_guide_for(filename)
params = dict(
code=code,
filename=filename,
line_number=line_number,
column_number=column_number,
text=text,
physical_line=physical_line,
)
if isinstance(guide, FlakeHeavenStyleGuide):
params['plugin'] = plugin
return guide.handle_error(**params)
class FlakeHeavenStyleGuide(StyleGuide):
def handle_error(
self,
code: str,
filename: str,
line_number: int,
column_number: int,
text: str,
plugin: str,
physical_line: str = None,
):
"""
This function copied as is, but:
1. Violation is replaced by FlakeHeavenViolation
2. `error_is_selected` dropped. If we get here, the error IS selected.
"""
disable_noqa = self.options.disable_noqa
if not column_number:
column_number = 0
error = FlakeHeavenViolation(
code=code,
filename=filename,
line_number=line_number,
column_number=column_number + 1,
text=text,
physical_line=physical_line,
plugin=plugin,
)
is_not_inline_ignored = error.is_inline_ignored(disable_noqa) is False
is_included_in_diff = error.is_in(self._parsed_diff)
if is_not_inline_ignored and is_included_in_diff:
self.formatter.handle(error)
self.stats.record(error)
return 1
return 0
| 30.587629 | 82 | 0.607347 |
4a262cbb6180f533ce24e26ffe9f59c70a598423 | 2,347 | py | Python | configs/r3det/datasets/dotav1_rotational_detection.py | cameronchoi/r3det-docker | 30af0b845b2baa4dcb7dccdc4ccd3238fd72bf75 | [
"Apache-2.0"
] | 176 | 2020-06-18T12:35:30.000Z | 2022-03-28T02:20:57.000Z | configs/r3det/datasets/dotav1_rotational_detection.py | cameronchoi/r3det-docker | 30af0b845b2baa4dcb7dccdc4ccd3238fd72bf75 | [
"Apache-2.0"
] | 35 | 2020-06-28T07:03:24.000Z | 2022-01-09T01:20:46.000Z | configs/r3det/datasets/dotav1_rotational_detection.py | cameronchoi/r3det-docker | 30af0b845b2baa4dcb7dccdc4ccd3238fd72bf75 | [
"Apache-2.0"
] | 44 | 2020-06-20T07:51:01.000Z | 2022-02-10T01:17:54.000Z | # dataset settings
dataset_type = 'DOTADatasetV1'
# dataset root path:
data_root = '/data/dota/'
trainsplit_ann_folder = 'trainsplit/labelTxt'
trainsplit_img_folder = 'trainsplit/images'
valsplit_ann_folder = 'valsplit/labelTxt'
valsplit_img_folder = 'valsplit/images'
val_ann_folder = 'val/labelTxt'
val_img_folder = 'val/images'
test_img_folder = 'test/images'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='RResize', img_scale=(800, 800)),
dict(type='RRandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size=(800, 800)),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_bboxes_ignore']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='CroppedTilesFlipAug',
tile_scale=(800, 800),
tile_shape=(600, 600),
tile_overlap=(150, 150),
flip=False,
transforms=[
dict(type='RResize', img_scale=(800, 800)),
dict(type='RRandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size=(800, 800)),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=6,
workers_per_gpu=2,
train=[
dict(
type=dataset_type,
data_root=data_root,
ann_file=trainsplit_ann_folder,
img_prefix=trainsplit_img_folder,
pipeline=train_pipeline),
dict(
type=dataset_type,
data_root=data_root,
ann_file=valsplit_ann_folder,
img_prefix=valsplit_img_folder,
pipeline=train_pipeline),
],
val=dict(
type=dataset_type,
data_root=data_root,
ann_file=val_ann_folder,
difficulty_thresh=1,
img_prefix=val_img_folder,
pipeline=test_pipeline),
test=dict(
type=dataset_type,
data_root=data_root,
ann_file=test_img_folder,
difficulty_thresh=1,
img_prefix=test_img_folder,
pipeline=test_pipeline))
| 30.881579 | 85 | 0.618662 |
4a262ce2753411137ab683833823302d6ff655fb | 2,523 | py | Python | utils/estimate_pose.py | chingchichuang/PRNet | 4b7125bcf465d178093bc5e1ce3e4926c72c3016 | [
"MIT"
] | null | null | null | utils/estimate_pose.py | chingchichuang/PRNet | 4b7125bcf465d178093bc5e1ce3e4926c72c3016 | [
"MIT"
] | null | null | null | utils/estimate_pose.py | chingchichuang/PRNet | 4b7125bcf465d178093bc5e1ce3e4926c72c3016 | [
"MIT"
] | null | null | null | import numpy as np
from math import cos, sin, atan2, asin
def isRotationMatrix(R):
''' checks if a matrix is a valid rotation matrix(whether orthogonal or not)
'''
Rt = np.transpose(R)
shouldBeIdentity = np.dot(Rt, R)
I = np.identity(3, dtype = R.dtype)
n = np.linalg.norm(I - shouldBeIdentity)
return n < 1e-6
def matrix2angle(R):
''' compute three Euler angles from a Rotation Matrix. Ref: http://www.gregslabaugh.net/publications/euler.pdf
Args:
R: (3,3). rotation matrix
Returns:
x: yaw
y: pitch
z: roll
'''
# assert(isRotationMatrix(R))
if R[2,0] !=1 or R[2,0] != -1:
x = asin(R[2,0])
y = atan2(R[2,1]/cos(x), R[2,2]/cos(x))
z = atan2(R[1,0]/cos(x), R[0,0]/cos(x))
else:# Gimbal lock
z = 0 #can be anything
if R[2,0] == -1:
x = np.pi/2
y = z + atan2(R[0,1], R[0,2])
else:
x = -np.pi/2
y = -z + atan2(-R[0,1], -R[0,2])
return x/np.pi*180.0, y/np.pi*180.0, z/np.pi*180.0
def P2sRt(P):
''' decompositing camera matrix P.
Args:
P: (3, 4). Affine Camera Matrix.
Returns:
s: scale factor.
R: (3, 3). rotation matrix.
t2d: (2,). 2d translation.
'''
t2d = P[:2, 3]
R1 = P[0:1, :3]
R2 = P[1:2, :3]
s = (np.linalg.norm(R1) + np.linalg.norm(R2))/2.0
r1 = R1/np.linalg.norm(R1)
r2 = R2/np.linalg.norm(R2)
r3 = np.cross(r1, r2)
R = np.concatenate((r1, r2, r3), 0)
return s, R, t2d
def compute_similarity_transform(points_static, points_to_transform):
#http://nghiaho.com/?page_id=671
p0 = np.copy(points_static).T
p1 = np.copy(points_to_transform).T
t0 = -np.mean(p0, axis=1).reshape(3,1)
t1 = -np.mean(p1, axis=1).reshape(3,1)
t_final = t1 -t0
p0c = p0+t0
p1c = p1+t1
covariance_matrix = p0c.dot(p1c.T)
U,S,V = np.linalg.svd(covariance_matrix)
R = U.dot(V)
if np.linalg.det(R) < 0:
R[:,2] *= -1
rms_d0 = np.sqrt(np.mean(np.linalg.norm(p0c, axis=0)**2))
rms_d1 = np.sqrt(np.mean(np.linalg.norm(p1c, axis=0)**2))
s = (rms_d0/rms_d1)
P = np.c_[s*np.eye(3).dot(R), t_final]
return P
def estimate_pose(vertices):
canonical_vertices = np.load('Data/uv-data/canonical_vertices.npy')
P = compute_similarity_transform(vertices, canonical_vertices)
_,R,_ = P2sRt(P) # decompose affine matrix to s, R, t
pose = matrix2angle(R)
return P, pose
| 26.28125 | 114 | 0.561237 |
4a262d31341c8d1909219928f4cc6482ba541cc5 | 16,436 | py | Python | scripts/Crawlers/OSFCrawler.py | mathdugre/conp-dataset | f117b9a2f8191d654de6d3aad57e6c352a6e881e | [
"MIT"
] | null | null | null | scripts/Crawlers/OSFCrawler.py | mathdugre/conp-dataset | f117b9a2f8191d654de6d3aad57e6c352a6e881e | [
"MIT"
] | null | null | null | scripts/Crawlers/OSFCrawler.py | mathdugre/conp-dataset | f117b9a2f8191d654de6d3aad57e6c352a6e881e | [
"MIT"
] | null | null | null | from scripts.Crawlers.BaseCrawler import BaseCrawler
from git import Repo
import os
import json
import requests
import humanize
import datetime
def _create_osf_tracker(path, dataset):
with open(path, "w") as f:
data = {
"version": dataset["version"],
"title": dataset["title"]
}
json.dump(data, f, indent=4)
class OSFCrawler(BaseCrawler):
def __init__(self, github_token, config_path, verbose, force):
super().__init__(github_token, config_path, verbose, force)
self.osf_token = self._get_token()
def _get_token(self):
if os.path.isfile(self.config_path):
with open(self.config_path, "r") as f:
data = json.load(f)
if "osf_token" in data.keys():
return data["osf_token"]
def _get_request_with_bearer_token(self, link, redirect=True):
header = {'Authorization': f'Bearer {self.osf_token}'}
r = requests.get(link, headers=header, allow_redirects=redirect)
if r.ok:
return r
else:
raise Exception(f'Request to {r.url} failed: {r.content}')
def _query_osf(self):
query = (
'https://api.osf.io/v2/nodes/?filter[tags]=canadian-open-neuroscience-platform'
)
r_json = self._get_request_with_bearer_token(query).json()
results = r_json["data"]
# Retrieve results from other pages
if r_json["links"]["meta"]["total"] > r_json["links"]["meta"]["per_page"]:
next_page = r_json["links"]["next"]
while next_page is not None:
next_page_json = self._get_request_with_bearer_token(next_page).json()
results.extend(next_page_json["data"])
next_page = next_page_json["links"]["next"]
if self.verbose:
print("OSF query: {}".format(query))
return results
def _download_files(self, link, current_dir, inner_path, d, annex, sizes):
r_json = self._get_request_with_bearer_token(link).json()
files = r_json["data"]
# Retrieve the files in the other pages if there are more than 1 page
if "links" in r_json.keys() and r_json["links"]["meta"]["total"] > r_json["links"]["meta"]["per_page"]:
next_page = r_json["links"]["next"]
while next_page is not None:
next_page_json = self._get_request_with_bearer_token(next_page).json()
files.extend(next_page_json["data"])
next_page = next_page_json["links"]["next"]
for file in files:
# Handle folders
if file["attributes"]["kind"] == "folder":
folder_path = os.path.join(current_dir, file["attributes"]["name"])
os.mkdir(folder_path)
self._download_files(
file["relationships"]["files"]["links"]["related"]["href"],
folder_path,
os.path.join(inner_path, file["attributes"]["name"]),
d, annex, sizes
)
# Handle single files
elif file["attributes"]["kind"] == "file":
# Check if file is private
r = requests.get(file["links"]["download"], allow_redirects=False)
if 'https://accounts.osf.io/login' in r.headers['location']: # Redirects to login, private file
correct_download_link = self._get_request_with_bearer_token(
file["links"]["download"], redirect=False).headers['location']
if 'https://accounts.osf.io/login' not in correct_download_link:
zip_file = True if file["attributes"]["name"].split(".")[-1] == "zip" else False
d.download_url(correct_download_link, path=os.path.join(inner_path, ""), archive=zip_file)
else: # Token did not work for downloading file, return
print(f'Unable to download file {file["links"]["download"]} with current token, skipping file')
return
# Public file
else:
# Handle zip files
if file["attributes"]["name"].split(".")[-1] == "zip":
d.download_url(file["links"]["download"], path=os.path.join(inner_path, ""), archive=True)
elif file['attributes']['name'] in ['DATS.json', 'README.md']:
d.download_url(file['links']['download'], path=os.path.join(inner_path, ''))
else:
annex("addurl", file["links"]["download"], "--fast", "--file",
os.path.join(inner_path, file["attributes"]["name"]))
d.save()
# append the size of the downloaded file to the sizes array
file_size = file['attributes']['size']
if not file_size:
# if the file size cannot be found in the OSF API response, then get it from git annex info
inner_file_path = os.path.join(inner_path, file["attributes"]["name"])
annex_info_dict = json.loads(annex('info', '--bytes', '--json', inner_file_path))
file_size = int(annex_info_dict['size'])
sizes.append(file_size)
def _download_components(self, components_list, current_dir, inner_path, d, annex, dataset_size):
# Loop through each available components and download their files
for component in components_list:
component_title = self._clean_dataset_title(component['attributes']['title'])
component_inner_path = os.path.join(inner_path, 'components', component_title)
os.makedirs(os.path.join(current_dir, component_inner_path))
self._download_files(
component['relationships']['files']['links']['related']['href'],
os.path.join(current_dir, component_inner_path),
component_inner_path,
d,
annex,
dataset_size
)
# check if the component contains (sub)components, in which case, download the (sub)components data
subcomponents_list = self._get_components(
component['relationships']['children']['links']['related']['href'])
if subcomponents_list:
self._download_components(
subcomponents_list,
current_dir,
os.path.join(component_inner_path),
d,
annex,
dataset_size
)
# Once we have downloaded all the components files, check to see if there are any empty
# directories (in the case the 'OSF parent' dataset did not have any downloaded files
list_of_empty_dirs = [
dirpath
for (dirpath, dirnames, filenames) in os.walk(current_dir)
if len(dirnames) == 0 and len(filenames) == 0
]
for empty_dir in list_of_empty_dirs:
os.rmdir(empty_dir)
def _get_contributors(self, link):
r = self._get_request_with_bearer_token(link)
contributors = [
contributor["embeds"]["users"]["data"]["attributes"]["full_name"]
for contributor in r.json()["data"]
]
return contributors
def _get_license(self, link):
r = self._get_request_with_bearer_token(link)
return r.json()["data"]["attributes"]["name"]
def _get_components(self, link):
r = self._get_request_with_bearer_token(link)
return r.json()['data']
def _get_institutions(self, link):
r = self._get_request_with_bearer_token(link)
if r.json()['data']:
institutions = [
institution['attributes']['name'] for institution in r.json()['data']
]
return institutions
def _get_identifier(self, link):
r = self._get_request_with_bearer_token(link)
return r.json()['data'][0]['attributes']['value'] if r.json()['data'] else False
def get_all_dataset_description(self):
osf_dois = []
datasets = self._query_osf()
for dataset in datasets:
# skip datasets that have a parent since the files' components will
# go into the parent dataset.
if 'parent' in dataset['relationships'].keys():
continue
attributes = dataset["attributes"]
# Retrieve keywords/tags
keywords = list(map(lambda x: {"value": x}, attributes["tags"]))
# Retrieve contributors/creators
contributors = self._get_contributors(
dataset["relationships"]["contributors"]["links"]["related"]["href"])
# Retrieve license
license_ = "None"
if "license" in dataset["relationships"].keys():
license_ = self._get_license(dataset["relationships"]["license"]["links"]["related"]["href"])
# Retrieve institution information
institutions = self._get_institutions(
dataset['relationships']['affiliated_institutions']['links']['related']['href'])
# Retrieve identifier information
identifier = self._get_identifier(dataset['relationships']['identifiers']['links']['related']['href'])
# Get link for the dataset files
files_link = dataset['relationships']['files']['links']['related']['href']
# Get components list
components_list = self._get_components(dataset['relationships']['children']['links']['related']['href'])
# Gather extra properties
extra_properties = [
{
"category": "logo",
"values": [
{
"value": "https://osf.io/static/img/institutions/shields/cos-shield.png"
}
],
}
]
if institutions:
extra_properties.append(
{
"category": "origin_institution",
"values": list(
map(lambda x: {'value': x}, institutions)
)
}
)
# Retrieve dates
date_created = datetime.datetime.strptime(attributes['date_created'], '%Y-%m-%dT%H:%M:%S.%f')
date_modified = datetime.datetime.strptime(attributes['date_modified'], '%Y-%m-%dT%H:%M:%S.%f')
dataset_dats_content = {
"title": attributes["title"],
"files": files_link,
"components_list": components_list,
"homepage": dataset["links"]["html"],
"creators": list(
map(lambda x: {"name": x}, contributors)
),
"description": attributes["description"],
"version": attributes["date_modified"],
"licenses": [
{
"name": license_
}
],
"dates": [
{
"date": date_created.strftime('%Y-%m-%d %H:%M:%S'),
"type": {
"value": "Date Created"
}
},
{
"date": date_modified.strftime('%Y-%m-%d %H:%M:%S'),
"type": {
"value": "Date Modified"
}
}
],
"keywords": keywords,
"distributions": [
{
"size": 0,
"unit": {"value": "B"},
"access": {
"landingPage": dataset["links"]["html"],
"authorizations": [
{
"value": "public" if attributes['public'] else "private"
}
],
},
}
],
"extraProperties": extra_properties
}
if identifier:
source = 'OSF DOI' if 'OSF.IO' in identifier else 'DOI'
dataset_dats_content['identifier'] = {
"identifier": identifier,
"identifierSource": source
}
osf_dois.append(dataset_dats_content)
if self.verbose:
print("Retrieved OSF DOIs: ")
for osf_doi in osf_dois:
print(
"- Title: {}, Last modified: {}".format(
osf_doi["title"],
osf_doi["version"]
)
)
return osf_dois
def add_new_dataset(self, dataset, dataset_dir):
d = self.datalad.Dataset(dataset_dir)
d.no_annex(".conp-osf-crawler.json")
d.save()
annex = Repo(dataset_dir).git.annex
dataset_size = []
self._download_files(dataset["files"], dataset_dir, "", d, annex, dataset_size)
if dataset['components_list']:
self._download_components(dataset['components_list'], dataset_dir, '', d, annex, dataset_size)
dataset_size, dataset_unit = humanize.naturalsize(sum(dataset_size)).split(" ")
dataset["distributions"][0]["size"] = float(dataset_size)
dataset["distributions"][0]["unit"]["value"] = dataset_unit
# Add .conp-osf-crawler.json tracker file
_create_osf_tracker(
os.path.join(dataset_dir, ".conp-osf-crawler.json"), dataset)
def update_if_necessary(self, dataset_description, dataset_dir):
tracker_path = os.path.join(dataset_dir, ".conp-osf-crawler.json")
if not os.path.isfile(tracker_path):
print("{} does not exist in dataset, skipping".format(tracker_path))
return False
with open(tracker_path, "r") as f:
tracker = json.load(f)
if tracker["version"] == dataset_description["version"]:
# Same version, no need to update
if self.verbose:
print("{}, version {} same as OSF version DOI, no need to update"
.format(dataset_description["title"], dataset_description["version"]))
return False
else:
# Update dataset
if self.verbose:
print("{}, version {} different from OSF version DOI {}, updating"
.format(dataset_description["title"], tracker["version"], dataset_description["version"]))
# Remove all data and DATS.json files
for file_name in os.listdir(dataset_dir):
if file_name[0] == ".":
continue
self.datalad.remove(os.path.join(dataset_dir, file_name), check=False)
d = self.datalad.Dataset(dataset_dir)
annex = Repo(dataset_dir).git.annex
dataset_size = []
self._download_files(dataset_description["files"], dataset_dir, "", d, annex, dataset_size)
if dataset_description['components_list']:
self._download_components(
dataset_description['components_list'], dataset_dir, '', d, annex, dataset_size)
dataset_size, dataset_unit = humanize.naturalsize(sum(dataset_size)).split(" ")
dataset_description["distributions"][0]["size"] = float(dataset_size)
dataset_description["distributions"][0]["unit"]["value"] = dataset_unit
# Add .conp-osf-crawler.json tracker file
_create_osf_tracker(
os.path.join(dataset_dir, ".conp-osf-crawler.json"), dataset_description)
return True
def get_readme_content(self, dataset):
readme_content = """# {}
Crawled from [OSF]({})
## Description
{}""".format(dataset["title"], dataset["homepage"], dataset["description"])
if 'identifier' in dataset:
readme_content += """
DOI: {}""".format(dataset['identifier']['identifier'])
return readme_content
| 42.360825 | 119 | 0.532733 |
4a262e1a8c2381db1bc99c05e9d64a1d0feecca5 | 1,173 | py | Python | src/adaf/python_scripts/plotLumBXB.py | eduardomgutierrez/RIAF_radproc | 0e4166f04cce27fed2cbd2c7078023c10e0e8d12 | [
"MIT"
] | 1 | 2021-08-30T06:56:03.000Z | 2021-08-30T06:56:03.000Z | src/adaf/python_scripts/plotLumBXB.py | eduardomgutierrez/RIAF_radproc | 0e4166f04cce27fed2cbd2c7078023c10e0e8d12 | [
"MIT"
] | null | null | null | src/adaf/python_scripts/plotLumBXB.py | eduardomgutierrez/RIAF_radproc | 0e4166f04cce27fed2cbd2c7078023c10e0e8d12 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import rc # TO MANAGE MATPLOTLIB PARAMETERS"
rc('font',family='serif')
rc('text',usetex = True)
import scipy.optimize as optimization
nu,eV,Sy,Br,IC,pp,CD,Refl,Tot = np.loadtxt('lum.txt',unpack=True)
NT_logeV,NT_logSye,NT_logSyp,NT_logIC,NT_logpp,NT_logpg,NT_logeAbs,NT_logpAbs = \
np.loadtxt('ntLuminosities.txt',unpack=True,skiprows=1)
x_eV = [-2,16]
y_axis = [30,38]
fig, ax1 = plt.subplots()
ax1.tick_params(axis='both',labelsize=12)
ax1.set_xlim(x_eV)
ax1.set_ylim(y_axis)
ax1.set_xlabel(r'$\mathrm{Log}(E/\mathrm{eV})$',fontsize=13)
ax1.set_ylabel(r'$\mathrm{Log}(\nu L_\nu / \mathrm{erg~s}^{-1})$',fontsize=13)
ax1.plot(np.log10(eV),np.log10(Tot),label='Thermal')
ax1.plot(NT_logeV,NT_logSye,label='eSy')
ax1.plot(NT_logeV,NT_logSyp,label='pSy')
ax1.plot(NT_logeV,NT_logIC,label='IC')
ax1.plot(NT_logeV,NT_logpp,label='pp')
ax1.plot(NT_logeV,NT_logpg,label=r'p$\gamma$')
ax1.plot(NT_logeV,np.log10(np.power(10,NT_logeAbs)+np.power(10,NT_logpAbs)+Tot),\
lw=3,label='Abs',color='k')
ax1.legend(loc='best',fontsize=8)
fig.savefig('nonThermalLum.pdf')
| 30.868421 | 81 | 0.728048 |
4a262edbdb74da0df79148885dbd9a1b0dbc2699 | 4,806 | py | Python | tools/infer_submission_simple.py | swkpku/Detectron | cf27871b2d092fad2afee47c44a622b4bb974cd0 | [
"Apache-2.0"
] | null | null | null | tools/infer_submission_simple.py | swkpku/Detectron | cf27871b2d092fad2afee47c44a622b4bb974cd0 | [
"Apache-2.0"
] | null | null | null | tools/infer_submission_simple.py | swkpku/Detectron | cf27871b2d092fad2afee47c44a622b4bb974cd0 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python2
# Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Perform inference on a single image or all images with a certain extension
(e.g., .jpg) in a folder.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from collections import defaultdict
import argparse
import cv2 # NOQA (Must import before importing caffe2 due to bug in cv2)
import glob
import logging
import os
import sys
import time
from caffe2.python import workspace
from core.config import assert_and_infer_cfg
from core.config import cfg
from core.config import merge_cfg_from_file
from utils.timer import Timer
import core.test_engine as infer_engine
import datasets.dummy_datasets as dummy_datasets
import utils.c2 as c2_utils
import utils.logging
import utils.vis as vis_utils
c2_utils.import_detectron_ops()
# OpenCL may be enabled by default in OpenCV3; disable it because it's not
# thread safe and causes unwanted GPU memory allocations.
cv2.ocl.setUseOpenCL(False)
def parse_args():
parser = argparse.ArgumentParser(description='End-to-end inference')
parser.add_argument(
'--cfg',
dest='cfg',
help='cfg model file (/path/to/model_config.yaml)',
default=None,
type=str
)
parser.add_argument(
'--wts',
dest='weights',
help='weights model file (/path/to/model_weights.pkl)',
default=None,
type=str
)
parser.add_argument(
'--output-dir',
dest='output_dir',
help='directory for visualization pdfs (default: /tmp/infer_simple)',
default='/tmp/infer_simple',
type=str
)
parser.add_argument(
'--image-ext',
dest='image_ext',
help='image file name extension (default: jpg)',
default='jpg',
type=str
)
parser.add_argument(
'im_or_folder', help='image or folder of images', default=None
)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def main(args):
logger = logging.getLogger(__name__)
merge_cfg_from_file(args.cfg)
cfg.TEST.WEIGHTS = args.weights
cfg.NUM_GPUS = 1
assert_and_infer_cfg()
model = infer_engine.initialize_model_from_cfg()
dummy_coco_dataset = dummy_datasets.get_coco_dataset()
if os.path.isdir(args.im_or_folder):
im_list = glob.iglob(args.im_or_folder + '/*.' + args.image_ext)
else:
im_list = [args.im_or_folder]
test_ids = []
rles = []
for i, im_name in enumerate(im_list):
out_name = os.path.join(
args.output_dir, '{}'.format(os.path.basename(im_name) + '.pdf')
)
logger.info('Processing {} -> {}'.format(im_name, out_name))
im = cv2.imread(im_name)
timers = defaultdict(Timer)
t = time.time()
with c2_utils.NamedCudaScope(0):
cls_boxes, cls_segms, cls_keyps = infer_engine.im_detect_all(
model, im, None, timers=timers
)
logger.info('Inference time: {:.3f}s'.format(time.time() - t))
for k, v in timers.items():
logger.info(' | {}: {:.3f}s'.format(k, v.average_time))
if i == 0:
logger.info(
' \ Note: inference on the first image will be slower than the '
'rest (caches and auto-tuning need to warm up)'
)
new_test_ids, new_rles = vis_utils.make_submission(
im[:, :, ::-1],
im_name,
cls_boxes,
cls_segms,
cls_keyps,
thresh=0.7
)
test_ids.extend(new_test_ids)
rles.extend(new_rles)
import pandas as pd
sub = pd.DataFrame()
sub['ImageId'] = test_ids
sub['EncodedPixels'] = pd.Series(rles).apply(lambda x: ' '.join(str(y) for y in x))
sub.to_csv(args.output_dir + '/e2e_mask_rcnn_R-50-FPN_1x-lr3e-3-nuclei-6-new.csv', index=False)
if __name__ == '__main__':
workspace.GlobalInit(['caffe2', '--caffe2_log_level=0'])
utils.logging.setup_logging(__name__)
args = parse_args()
main(args)
| 31.006452 | 99 | 0.643987 |
4a262f2d7926b61c2d456712cf39637c9d5d9116 | 4,344 | py | Python | solidity/python/MongoTestSale.py | dabdevelop/bancorprotocol-contracts | 53894df7f4c31828188b11c9a974da6413579799 | [
"Apache-2.0"
] | null | null | null | solidity/python/MongoTestSale.py | dabdevelop/bancorprotocol-contracts | 53894df7f4c31828188b11c9a974da6413579799 | [
"Apache-2.0"
] | null | null | null | solidity/python/MongoTestSale.py | dabdevelop/bancorprotocol-contracts | 53894df7f4c31828188b11c9a974da6413579799 | [
"Apache-2.0"
] | null | null | null | import sys
import math
import pymongo
import FormulaSolidityPort
import FormulaNativePython
USERNAME = ''
PASSWORD = ''
SERVER_NAME = '127.0.0.1:27017'
DATABASE_NAME = 'test'
MINIMUM_VALUE_SUPPLY = 100
MAXIMUM_VALUE_SUPPLY = 10**34
GROWTH_FACTOR_SUPPLY = 1.5
MINIMUM_VALUE_RESERVE = 100
MAXIMUM_VALUE_RESERVE = 10**34
GROWTH_FACTOR_RESERVE = 1.5
MINIMUM_VALUE_RATIO = 10
MAXIMUM_VALUE_RATIO = 90
GROWTH_FACTOR_RATIO = 1.25
MINIMUM_VALUE_AMOUNT = 1
MAXIMUM_VALUE_AMOUNT = 10**34
GROWTH_FACTOR_AMOUNT = 1.5
TRANSACTION_SUCCESS = 0
TRANSACTION_FAILURE = 1
TRANSACTION_INVALID = 2
IMPLEMENTATION_ERROR = 3
def Main():
username = USERNAME
password = PASSWORD
server_name = SERVER_NAME
database_name = DATABASE_NAME
for arg in sys.argv[1:]:
username = arg[len('username=' ):] if arg.startswith('username=' ) else username
password = arg[len('password=' ):] if arg.startswith('password=' ) else password
server_name = arg[len('server_name=' ):] if arg.startswith('server_name=' ) else server_name
database_name = arg[len('database_name='):] if arg.startswith('database_name=') else database_name
if username and password:
uri = 'mongodb://{}:{}@{}/{}'.format(username,password,server_name,database_name)
else:
uri = 'mongodb://{}/{}'.format(server_name,database_name)
TestAll(pymongo.MongoClient(uri)[database_name]['sale'])
def TestAll(collection):
range_supply = GenerateRange(MINIMUM_VALUE_SUPPLY ,MAXIMUM_VALUE_SUPPLY ,GROWTH_FACTOR_SUPPLY )
range_reserve = GenerateRange(MINIMUM_VALUE_RESERVE,MAXIMUM_VALUE_RESERVE,GROWTH_FACTOR_RESERVE)
range_ratio = GenerateRange(MINIMUM_VALUE_RATIO ,MAXIMUM_VALUE_RATIO ,GROWTH_FACTOR_RATIO )
range_amount = GenerateRange(MINIMUM_VALUE_AMOUNT ,MAXIMUM_VALUE_AMOUNT ,GROWTH_FACTOR_AMOUNT )
for supply in range_supply :
for reserve in range_reserve:
for ratio in range_ratio :
for amount in range_amount :
if amount <= supply:
resultSolidityPort = Run(FormulaSolidityPort,supply,reserve,ratio,amount)
resultNativePython = Run(FormulaNativePython,supply,reserve,ratio,amount)
if resultNativePython < 0:
status = TRANSACTION_INVALID
loss = {'absolute':0,'relative':0}
elif resultSolidityPort < 0:
status = TRANSACTION_FAILURE
loss = {'absolute':0,'relative':0}
elif resultNativePython < resultSolidityPort:
status = IMPLEMENTATION_ERROR
loss = {'absolute':0,'relative':0}
else: # 0 <= resultSolidityPort <= resultNativePython
status = TRANSACTION_SUCCESS
loss = {'absolute':float(resultNativePython-resultSolidityPort),'relative':1-float(resultSolidityPort/resultNativePython)}
entry = {
'supply' :'{}'.format(supply ),
'reserve':'{}'.format(reserve),
'ratio' :'{}'.format(ratio ),
'amount' :'{}'.format(amount ),
'resultSolidityPort':'{}' .format(resultSolidityPort),
'resultNativePython':'{:.2f}'.format(resultNativePython),
'status':status,
'loss' :loss ,
}
id = collection.insert(entry)
print ', '.join('{}: {}'.format(key,entry[key]) for key in ['supply','reserve','ratio','amount','resultSolidityPort','resultNativePython','status','loss'])
def Run(module,supply,reserve,ratio,amount):
try:
return module.calculateSaleReturn(supply,reserve,ratio,amount)
except Exception:
return -1
def GenerateRange(minimumValue,maximumValue,growthFactor):
return [int(minimumValue*growthFactor**n) for n in range(int(math.log(float(maximumValue)/float(minimumValue),growthFactor))+1)]
Main()
| 40.598131 | 179 | 0.596685 |
4a26307cfa315312eee48378039c786116a11ba0 | 10,235 | py | Python | pacu/models/awsapi/codestar-notifications.py | RyanJarv/Pacu2 | 27df4bcf296fc8f467d3dc671a47bf9519ce7a24 | [
"MIT"
] | 1 | 2022-03-09T14:51:54.000Z | 2022-03-09T14:51:54.000Z | pacu/models/awsapi/codestar-notifications.py | RyanJarv/Pacu2 | 27df4bcf296fc8f467d3dc671a47bf9519ce7a24 | [
"MIT"
] | null | null | null | pacu/models/awsapi/codestar-notifications.py | RyanJarv/Pacu2 | 27df4bcf296fc8f467d3dc671a47bf9519ce7a24 | [
"MIT"
] | null | null | null | # generated by datamodel-codegen:
# filename: openapi.yaml
# timestamp: 2021-12-31T02:46:40+00:00
from __future__ import annotations
from datetime import datetime
from enum import Enum
from typing import Annotated, Any, List, Optional
from pydantic import BaseModel, Extra, Field, SecretStr
class EventTypeId(BaseModel):
__root__: Annotated[str, Field(max_length=200, min_length=1)]
class TagValue(BaseModel):
__root__: Annotated[
str, Field(max_length=256, regex='^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]*)$')
]
class ResourceAlreadyExistsException(BaseModel):
__root__: Any
class ValidationException(ResourceAlreadyExistsException):
pass
class LimitExceededException(ResourceAlreadyExistsException):
pass
class ConfigurationException(ResourceAlreadyExistsException):
pass
class ConcurrentModificationException(ResourceAlreadyExistsException):
pass
class AccessDeniedException(ResourceAlreadyExistsException):
pass
class DeleteTargetResult(BaseModel):
pass
class ResourceNotFoundException(ResourceAlreadyExistsException):
pass
class InvalidNextTokenException(ResourceAlreadyExistsException):
pass
class TargetType(BaseModel):
__root__: Annotated[str, Field(regex='^[A-Za-z]+$')]
class TargetAddress(BaseModel):
__root__: Annotated[SecretStr, Field(max_length=320, min_length=1)]
class UntagResourceResult(DeleteTargetResult):
pass
class TagKey(BaseModel):
__root__: Annotated[
str,
Field(
max_length=128, min_length=1, regex='^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]*)$'
),
]
class UpdateNotificationRuleResult(DeleteTargetResult):
pass
class ClientRequestToken(BaseModel):
__root__: Annotated[str, Field(max_length=256, min_length=1, regex='^[\\w:/-]+$')]
class NotificationRuleName(BaseModel):
__root__: Annotated[
SecretStr, Field(max_length=64, min_length=1, regex='[A-Za-z0-9\\-_ ]+$')
]
class EventTypeIds(BaseModel):
__root__: List[EventTypeId]
class NotificationRuleResource(BaseModel):
__root__: Annotated[
str, Field(regex='^arn:aws[^:\\s]*:[^:\\s]*:[^:\\s]*:[0-9]{12}:[^\\s]+$')
]
class DetailType(Enum):
BASIC = 'BASIC'
FULL = 'FULL'
class Tags(BaseModel):
pass
class Config:
extra = Extra.allow
class NotificationRuleStatus(Enum):
ENABLED = 'ENABLED'
DISABLED = 'DISABLED'
class NotificationRuleArn(BaseModel):
__root__: Annotated[
str,
Field(
regex='^arn:aws[^:\\s]*:codestar-notifications:[^:\\s]+:\\d{12}:notificationrule\\/(.*\\S)?$'
),
]
class CreatedTimestamp(BaseModel):
__root__: datetime
class DeleteNotificationRuleRequest(BaseModel):
Arn: NotificationRuleArn
class ForceUnsubscribeAll(BaseModel):
__root__: bool
class DeleteTargetRequest(BaseModel):
TargetAddress: TargetAddress
ForceUnsubscribeAll: Optional[ForceUnsubscribeAll] = None
class DescribeNotificationRuleRequest(BaseModel):
Arn: NotificationRuleArn
class NotificationRuleCreatedBy(BaseModel):
__root__: Annotated[str, Field(min_length=1)]
class LastModifiedTimestamp(CreatedTimestamp):
pass
class EventTypeName(BaseModel):
__root__: str
class ServiceName(EventTypeName):
pass
class ResourceType(BaseModel):
__root__: Annotated[str, Field(min_length=1, regex='^([a-zA-Z0-9-])+$')]
class ListEventTypesFilterName(Enum):
RESOURCE_TYPE = 'RESOURCE_TYPE'
SERVICE_NAME = 'SERVICE_NAME'
class ListEventTypesFilterValue(EventTypeName):
pass
class NextToken(BaseModel):
__root__: Annotated[str, Field(regex='^[\\w/+=]+$')]
class MaxResults(BaseModel):
__root__: Annotated[int, Field(ge=1.0, le=100.0)]
class ListNotificationRulesFilterName(Enum):
EVENT_TYPE_ID = 'EVENT_TYPE_ID'
CREATED_BY = 'CREATED_BY'
RESOURCE = 'RESOURCE'
TARGET_ADDRESS = 'TARGET_ADDRESS'
class ListNotificationRulesFilterValue(EventTypeName):
pass
class ListTagsForResourceRequest(BaseModel):
Arn: NotificationRuleArn
class ListTargetsFilterName(Enum):
TARGET_TYPE = 'TARGET_TYPE'
TARGET_ADDRESS = 'TARGET_ADDRESS'
TARGET_STATUS = 'TARGET_STATUS'
class ListTargetsFilterValue(EventTypeName):
pass
class NotificationRuleId(BaseModel):
__root__: Annotated[str, Field(max_length=40, min_length=1)]
class TagKeys(BaseModel):
__root__: List[TagKey]
class TagResourceRequest(BaseModel):
Arn: NotificationRuleArn
Tags: Tags
class TargetStatus(Enum):
PENDING = 'PENDING'
ACTIVE = 'ACTIVE'
UNREACHABLE = 'UNREACHABLE'
INACTIVE = 'INACTIVE'
DEACTIVATED = 'DEACTIVATED'
class TargetSummary(BaseModel):
"""
Information about the targets specified for a notification rule.
"""
TargetAddress: Optional[TargetAddress] = None
TargetType: Optional[TargetType] = None
TargetStatus: Optional[TargetStatus] = None
class UnsubscribeRequest(BaseModel):
Arn: NotificationRuleArn
TargetAddress: TargetAddress
class UntagResourceRequest(BaseModel):
Arn: NotificationRuleArn
TagKeys: TagKeys
class CreateNotificationRuleResult(BaseModel):
Arn: Optional[NotificationRuleArn] = None
class Target(BaseModel):
"""
Information about the SNS topics associated with a notification rule.
"""
TargetType: Optional[TargetType] = None
TargetAddress: Optional[TargetAddress] = None
class DeleteNotificationRuleResult(CreateNotificationRuleResult):
pass
class ListEventTypesFilter(BaseModel):
"""
Information about a filter to apply to the list of returned event types. You can filter by resource type or service name.
"""
Name: ListEventTypesFilterName
Value: ListEventTypesFilterValue
class ListNotificationRulesFilter(BaseModel):
"""
Information about a filter to apply to the list of returned notification rules. You can filter by event type, owner, resource, or target.
"""
Name: ListNotificationRulesFilterName
Value: ListNotificationRulesFilterValue
class ListTagsForResourceResult(BaseModel):
Tags: Optional[Tags] = None
class ListTargetsFilter(BaseModel):
"""
Information about a filter to apply to the list of returned targets. You can filter by target type, address, or status. For example, to filter results to notification rules that have active Amazon SNS topics as targets, you could specify a ListTargetsFilter Name as TargetType and a Value of SNS, and a Name of TARGET_STATUS and a Value of ACTIVE.
"""
Name: ListTargetsFilterName
Value: ListTargetsFilterValue
class SubscribeResult(CreateNotificationRuleResult):
pass
class TagResourceResult(ListTagsForResourceResult):
pass
class UnsubscribeResult(BaseModel):
Arn: NotificationRuleArn
class Targets2(BaseModel):
__root__: Annotated[List[Target], Field(max_items=10)]
class CreateNotificationRuleRequest(BaseModel):
Name: NotificationRuleName
EventTypeIds: EventTypeIds
Resource: NotificationRuleResource
Targets: Targets2
DetailType: DetailType
ClientRequestToken: Optional[ClientRequestToken] = None
Tags: Optional[Tags] = None
Status: Optional[NotificationRuleStatus] = None
class TargetsBatch(BaseModel):
__root__: List[TargetSummary]
class EventTypeSummary(BaseModel):
"""
Returns information about an event that has triggered a notification rule.
"""
EventTypeId: Optional[EventTypeId] = None
ServiceName: Optional[ServiceName] = None
EventTypeName: Optional[EventTypeName] = None
ResourceType: Optional[ResourceType] = None
class ListEventTypesFilters(BaseModel):
__root__: List[ListEventTypesFilter]
class ListEventTypesRequest(BaseModel):
Filters: Optional[ListEventTypesFilters] = None
NextToken: Optional[NextToken] = None
MaxResults: Optional[MaxResults] = None
class ListNotificationRulesFilters(BaseModel):
__root__: List[ListNotificationRulesFilter]
class ListNotificationRulesRequest(BaseModel):
Filters: Optional[ListNotificationRulesFilters] = None
NextToken: Optional[NextToken] = None
MaxResults: Optional[MaxResults] = None
class ListTargetsFilters(BaseModel):
__root__: List[ListTargetsFilter]
class ListTargetsRequest(BaseModel):
Filters: Optional[ListTargetsFilters] = None
NextToken: Optional[NextToken] = None
MaxResults: Optional[MaxResults] = None
class NotificationRuleSummary(BaseModel):
"""
Information about a specified notification rule.
"""
Id: Optional[NotificationRuleId] = None
Arn: Optional[NotificationRuleArn] = None
class SubscribeRequest(BaseModel):
Arn: NotificationRuleArn
Target: Target
ClientRequestToken: Optional[ClientRequestToken] = None
class UpdateNotificationRuleRequest(BaseModel):
Arn: NotificationRuleArn
Name: Optional[NotificationRuleName] = None
Status: Optional[NotificationRuleStatus] = None
EventTypeIds: Optional[EventTypeIds] = None
Targets: Optional[Targets2] = None
DetailType: Optional[DetailType] = None
class ListTargetsResult(BaseModel):
Targets: Optional[TargetsBatch] = None
NextToken: Optional[NextToken] = None
class EventTypeBatch(BaseModel):
__root__: List[EventTypeSummary]
class NotificationRuleBatch(BaseModel):
__root__: List[NotificationRuleSummary]
class DescribeNotificationRuleResult(BaseModel):
Arn: NotificationRuleArn
Name: Optional[NotificationRuleName] = None
EventTypes: Optional[EventTypeBatch] = None
Resource: Optional[NotificationRuleResource] = None
Targets: Optional[TargetsBatch] = None
DetailType: Optional[DetailType] = None
CreatedBy: Optional[NotificationRuleCreatedBy] = None
Status: Optional[NotificationRuleStatus] = None
CreatedTimestamp: Optional[CreatedTimestamp] = None
LastModifiedTimestamp: Optional[LastModifiedTimestamp] = None
Tags: Optional[Tags] = None
class ListEventTypesResult(BaseModel):
EventTypes: Optional[EventTypeBatch] = None
NextToken: Optional[NextToken] = None
class ListNotificationRulesResult(BaseModel):
NextToken: Optional[NextToken] = None
NotificationRules: Optional[NotificationRuleBatch] = None
| 23.582949 | 351 | 0.740303 |
4a2630ae576f412f14ba699d698800f2224c3489 | 1,460 | py | Python | pset8/hello/request.py | mar1zzo/cs50-havard-edx | d20bf4dbe45c2633f70704f0dbed7904a2b22841 | [
"MIT"
] | 1 | 2021-01-12T02:54:01.000Z | 2021-01-12T02:54:01.000Z | pset8/hello/request.py | mar1zzo/cs50-havard-edx | d20bf4dbe45c2633f70704f0dbed7904a2b22841 | [
"MIT"
] | null | null | null | pset8/hello/request.py | mar1zzo/cs50-havard-edx | d20bf4dbe45c2633f70704f0dbed7904a2b22841 | [
"MIT"
] | null | null | null | # programa para criar uma pagina web com flask
import random
from flask import Flask, render_template, request
# criando a variavel que vai instanciar a biblioteca flask
app = Flask(__name__)
# criando nossa primeira rota padrao
@app.route("/")
# criando uma funcao qq que retorna uma mensagem de texto na tela
def index():
return render_template("index.html")
# criando uma segunda rota qq
@app.route("/hello")
def hello():
name = request.args.get("name")
if not name:
return render_template("failure.html")
return render_template("hello.html", name=name)
# Como estava o index.html?
# <!DOCTYPE html>
# <html lang="en">
# <head>
# <title>Hello!</title>
# </head>
# <body>
# <form action="/hello">
# <input name="name" type="text">
# <input type="submit">
# </form>
# </body>
# </html>
# Como estava o hello.html?
# <!DOCTYPE html>
# <html lang="en">
# <head>
# <title>Hello!</title>
# </head>
# <body>
# <h1>
# Hello, {{ name }}!
# </h1>
# </body>
# </html>
# Como estava o failure.html?
# <!DOCTYPE html>
# <html lang="en">
# <head>
# <title>Hello!</title>
# <style>
# body
# {
# color: red;
# }
# </style>
# </head>
# <body>
# <h1>
# You must provide a name!
# </h1>
# </body>
# </html> | 20.857143 | 65 | 0.528082 |
4a2630dbbfd26626d96d040b4e85d162afa93ac2 | 11,512 | py | Python | apis/voca.py | sighill/shade_app | 2b42d6411bc6e292b112a5e6be3598de8edadee1 | [
"MIT"
] | null | null | null | apis/voca.py | sighill/shade_app | 2b42d6411bc6e292b112a5e6be3598de8edadee1 | [
"MIT"
] | null | null | null | apis/voca.py | sighill/shade_app | 2b42d6411bc6e292b112a5e6be3598de8edadee1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# voca.py
# Python 3.4.3
# Django 1.9
# Script rassemblant des fonctions python3 pour modifier les fichiers
# des dossiers ###_raw
#####################################################################
# README !
# Les fonctions suivantes sont là pour être appelées au sein d'un
# script personnalisé pour chaque mission.
# Pour importer facilement ces fonctions, CF premier snippet
# en bas de ce script.
# Implémenter un log permettant de suivre les étapes et le traitement
# de la donnée. Le garder en global pour qu'il accumule les logs
# de chaque fonction.
# Des snippets de code utile sont dispos en fin de document
# RAPPEL SUBLIMETEXT :
# Pour wrapper tout le code faire ctrl+a ctrl+k ctrl+1
#####################################################################
log = ''
headerConstant = 'HEADER;'
#####################################################################
def WorkPath():
"""
WorkPath 2016.04.09
L'utilisation de cette fonction permet d'utiliser des
chemins d'accès relatifs entre scripts et dossiers
Cette fonction détermine automatiquement où le script
est lancé :
Soit sur un pc en travail local
Soit sur le serveur avec le shell Django
"""
# TODO : remplacer le snippet de travail en dossiers relatifs
# de voca.py par cette fonction qui détecte automatiquement
# le dossier de travail !
def AddLog(log_level , str_to_add):
"""
AddLog 2016.04.08
Cette fonction gère l'ajout d'une ligne à un compte rendu
Elle gère aussi l'ajout de brackets HTML pour la mise en
forme du log au niveau du template django
log_level is either title , subtitle , corpus
"""
global log
separator = '#'
# If title, big separator and str_to_add
if log_level == 'title':
log = log + separator*70 + '\n' + str_to_add + '\n'
# If subtitle, 4 space indent, medium separator, and str_to_add
elif log_level == 'subtitle':
log = log + ' ' + separator*35 + '\n' + ' ' + str_to_add + '\n'
# If corpus, 8 spaces indent and str_to_add
elif log_level == 'corpus':
log = log + ' ' + str_to_add + '\n'
# If typo
else:
log = log + 'WARNING : bad log_level, using corpus mode'
log = log + ' ' + str_to_add + '\n'
return log
#####################################################################
def OutFileCreate(out_path,raw_filename,ref_list,header):
"""
OutFileCreate 2016.04.08
Crée les fichiers out et log au bon endroit, et les remplit.
CF shade/README.md partie II.2 pour détails
Conseil : fonction à appeler en fin de procédure
Cette fonction attend quatre arguments:
Le chemin absolu vers le dossier out WIP !
Le nom de fichier ###_raw (type attendu : string)
Le texte raffiné ref_list (type attendu : list de strings)
Le log de la procédure (type attendu : string)
"""
# Variables globales
global log
# Variables locales
file_id = raw_filename[:3]
# Création du header
headerComplete = ''
if( header == '' ):
AddLog('corpus' , 'Fichier créé sans header')
else:
headerComplete = headerConstant + header + '\n'
AddLog('corpus' , 'Le header sera: {}'.format( headerComplete ) )
# Création du fichier ###_out
# NB : l'argument w+ écrase l'ancien fichier s'il existe !
AddLog('corpus' , 'Création du fichier {}_out'.format(file_id))
with open(out_path + file_id + '_out' , 'w+') as ofi_out:
# Insertion du header seulement si il est non nul
if( headerComplete != '' ):
ofi_out.write( headerComplete )
ofi_out.write('\n'.join(ref_list))
ofi_out.close()
# Création du fichier ###_log
# NB : l'argument w+ écrase l'ancien fichier s'il existe !
AddLog('corpus' , 'Création du fichier {}_log'.format(file_id))
with open(out_path + file_id + '_log' , 'w+') as ofi_log:
ofi_log.write(log)
ofi_log.close()
#####################################################################
def StringFormatter(raw_str):
"""
StringFormatter 2016.04.08
Fonction de modification d'une string de n'importe quel type
pour la mettre en forme selon le standard de l'app primus.
Standard primus : Une majuscule en début de nom. Si nom
composé, majuscule au début de chaque partie.
"""
# TODO : ajouter un convertisseur de caractères spéciaux.
# Exemple : ` --> '
# Variables globales
global log
# Mise en forme : 'FoO-BAr' --> 'Foo-Bar'
ref_str = raw_str.title()
# Ecriture du log
AddLog( 'corpus' , '{} --> {}.'.format(raw_str , ref_str))
return ref_str
#####################################################################
def StrValidator(list): # WIP NE PAS UTILISER
"""
StrValidator 2016.04.09
Cette fonction permet de valider rapidement chaque entrée
d'une liste manuellement.
Elle prompte chaque ligne et attend un input.
Input vide : validé
Input non vide : éliminé
"""
# TODO : fonction de correction : utiliser while not line_corr_valid_ok
# c'est sale et pas solide. A améliorer !
# Variables globales
global log
# Variables locales
out_list = []
counter_valid = 0
counter_corr = 0
counter_eliminated = 0
print('StrValidator - inputs possibles : \n Vide : string validé.\n c : correction manuelle.\n Tout autre input : string éliminé')
# Pour chaque ligne de list, prompt et attendre input.
for line in list:
# Demande d'input
key_input = input(line + ' : ')
# Si input vide, string éliminée si pas vide, string gardée
if not key_input :
out_list.append(line)
counter_valid += 1
# Si correction, input demandé, confirmé, et ajouté.
elif key_input in ['c','C']:
line_corr_valid_ok = False
while not line_corr_valid_ok:
line_corr = input('Correction de {}: '.format(line))
line_corr_valid = input('Validez vous {} ? o/n : '.format(line_corr))
if line_corr_valid in ['o','O','y','Y']:
out_list.append(line_corr_valid)
line_corr_valid_ok = True
counter_corr += 1
else:
continue
# Si input différent de vide ou 'c', string confirmé et éliminé.
else:
print('String éliminé.')
counter_eliminated += 1
# Ajout du log
AddLog('corpus', 'Lignes validées : {}'.format(counter_valid))
AddLog('corpus', 'Lignes corrigées : {}'.format(counter_corr))
AddLog('corpus', 'Lignes éliminées : {}'.format(counter_eliminated))
return out_list
#####################################################################
def OdditiesFinder(raw_list):
'''
OdditiesFinder 2016.04.12
Cherche dans la string d'entrée des caractères non prévus
et pas acceptables dans la db primus.
Chaque ligne (string) est transformée en liste de lettres (list)
Chaque lettre est comparée à la liste des lettres acceptées.
Si problème, prompt pour avoir la lettre de remplacement
Laisser vide pour suppression de la lettre merdique.
'''
# TODO : tester la fonction avec un insert de plusieurs lettres
# dans le cas d'un remplacement de lettre --> plusieurs lettres
# Variables globales
global log
# Variables locales
ref_line_list = []
acceptable_char = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',
'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V',
'W', 'X', 'Y', 'Z', '\'' , ' ' ]
# Pour chaque ligne, déconstruction lettre par lettre et
# comparaison de chaque lettre à un dico personnalisé
for line in raw_list:
# Passage de string à liste de lettres
letter_list = list(line)
curseur = 0
for letter in letter_list:
if letter not in acceptable_char:
replacement_letter = input('Bizarrerie trouvée : \' {} \' dans \' {} \'. Remplacer par : '.format(letter , line))
letter_list[curseur] = replacement_letter
AddLog('corpus' , '{} : Modification de la lettre : {} en {}'.format(line , letter , replacement_letter))
else:
pass
curseur += 1
# Reconstruction de la string à partir de la liste de lettres
line = ''.join(letter_list)
#Ajout de la string dans la liste de sortie
ref_line_list.append(line)
return ref_line_list
#####################################################################
# SNIPPETS DE CODE UTILES
#####################################################################
'''
# Ajout du répertoire de travail local pour travailler en système
# de fichiers relatifs et importer les fonctions voca facilement
import sys
sys.path.insert(0 , 'D:/Projets/shade_django/apis/')
from voca import AddLog , StringFormatter , OutFileCreate
'''
'''
# créer une liste comportant toutes les lignes du fichier
line_list = ofi.read().splitlines()
# read() importe les lignes
# splitlines() supprime le caractère de retour à la ligne
'''
# Ouvrir un fichier et en tirer des lignes sans le caractère
# spécial \n qui signifie un retour à la ligne :
# ofi = open('path_to_file'+'file_name' , 'option')
# CONSEIL : WRAPPE CE COMMENT ! option est soit :
# r Opens a file for reading only. The file pointer is placed
# at the beginning of the file. This is the default mode.
# r+ Opens a file for both reading and writing. The file pointer
# placed at the beginning of the file.
# w Opens a file for writing only. Overwrites the file if the
# file exists. If the file does not exist, creates a
# new file for writing.
# w+ Opens a file for both writing and reading. Overwrites the
# existing file if the file exists. If the file does
# not exist, creates a new file for reading and writing.
# wb+ Opens a file for both writing and reading in binary format.
# Overwrites the existing file if the file exists.
# If the file does not exist, creates a new file for
# reading and writing.
# a Opens a file for appending. The file pointer is at the end
# of the file if the file exists. That is, the file is
# in the append mode. If the file does not exist, it
# creates a new file for writing.
# a+ Opens a file for both appending and reading. The file
# pointer is at the end of the file if the file exists.
# The file opens in the append mode. If the file does
# not exist, it creates a new file for reading and writing.
| 39.560137 | 138 | 0.564541 |
4a2631bb56bcc0e982f037bdd17f161ee9a918f4 | 5,436 | py | Python | coffelli/dashboard/utils.py | coffeestudio/django-coffelli | cd0e6fbcfc169d8335351072c5816fb1d1639429 | [
"BSD-3-Clause"
] | null | null | null | coffelli/dashboard/utils.py | coffeestudio/django-coffelli | cd0e6fbcfc169d8335351072c5816fb1d1639429 | [
"BSD-3-Clause"
] | null | null | null | coffelli/dashboard/utils.py | coffeestudio/django-coffelli | cd0e6fbcfc169d8335351072c5816fb1d1639429 | [
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
"""
Admin ui common utilities.
"""
# PYTHON IMPORTS
from __future__ import unicode_literals
from fnmatch import fnmatch
from importlib import import_module
# DJANGO IMPORTS
from django.conf import settings
from django.contrib import admin
from django.core.urlresolvers import reverse
def _get_dashboard_cls(dashboard_cls, context):
if isinstance(dashboard_cls, dict):
curr_url = context.get('request').path
for key in dashboard_cls:
admin_site_mod, admin_site_inst = key.rsplit('.', 1)
admin_site_mod = import_module(admin_site_mod)
admin_site = getattr(admin_site_mod, admin_site_inst)
admin_url = reverse('%s:index' % admin_site.name)
if curr_url.startswith(admin_url):
mod, inst = dashboard_cls[key].rsplit('.', 1)
mod = import_module(mod)
return getattr(mod, inst)
else:
mod, inst = dashboard_cls.rsplit('.', 1)
mod = import_module(mod)
return getattr(mod, inst)
raise ValueError('Dashboard matching "%s" not found' % dashboard_cls)
def get_index_dashboard(context):
"""
Returns the admin dashboard defined in settings (or the default one).
"""
return _get_dashboard_cls(getattr(
settings,
'GRAPPELLI_INDEX_DASHBOARD',
'coffelli.dashboard.dashboards.DefaultIndexDashboard'
), context)()
def get_admin_site(context=None, request=None):
dashboard_cls = getattr(
settings,
'GRAPPELLI_INDEX_DASHBOARD',
'coffelli.dashboard.dashboards.DefaultIndexDashboard'
)
if isinstance(dashboard_cls, dict):
if context:
request = context.get('request')
curr_url = request.META['PATH_INFO']
for key in dashboard_cls:
mod, inst = key.rsplit('.', 1)
mod = import_module(mod)
admin_site = getattr(mod, inst)
admin_url = reverse('%s:index' % admin_site.name)
if curr_url.startswith(admin_url):
return admin_site
else:
return admin.site
raise ValueError('Admin site matching "%s" not found' % dashboard_cls)
def get_admin_site_name(context):
return get_admin_site(context).name
def get_avail_models(request):
""" Returns (model, perm,) for all models user can possibly see """
items = []
admin_site = get_admin_site(request=request)
for model, model_admin in admin_site._registry.items():
perms = model_admin.get_model_perms(request)
if True not in perms.values():
continue
items.append((model, perms,))
return items
def filter_models(request, models, exclude):
"""
Returns (model, perm,) for all models that match models/exclude patterns
and are visible by current user.
"""
items = get_avail_models(request)
included = []
full_name = lambda model: '%s.%s' % (model.__module__, model.__name__)
# I beleive that that implemented
# O(len(patterns)*len(matched_patterns)*len(all_models))
# algorythm is fine for model lists because they are small and admin
# performance is not a bottleneck. If it is not the case then the code
# should be optimized.
if len(models) == 0:
included = items
else:
for pattern in models:
pattern_items = []
for item in items:
model, perms = item
if fnmatch(full_name(model), pattern) and item not in included:
pattern_items.append(item)
pattern_items.sort(key=lambda x: str(x[0]._meta.verbose_name_plural.encode('utf-8')))
included.extend(pattern_items)
result = included[:]
for pattern in exclude:
for item in included:
model, perms = item
if fnmatch(full_name(model), pattern):
try:
result.remove(item)
except ValueError: # if the item was already removed skip
pass
return result
class AppListElementMixin(object):
"""
Mixin class used by both the AppListDashboardModule and the
AppListMenuItem (to honor the DRY concept).
"""
def _visible_models(self, request):
included = self.models[:]
excluded = self.exclude[:]
if not self.models and not self.exclude:
included = ["*"]
return filter_models(request, included, excluded)
def _get_admin_app_list_url(self, model, context):
"""
Returns the admin change url.
"""
app_label = model._meta.app_label
return reverse('%s:app_list' % get_admin_site_name(context),
args=(app_label,))
def _get_admin_change_url(self, model, context):
"""
Returns the admin change url.
"""
app_label = model._meta.app_label
return reverse('%s:%s_%s_changelist' % (get_admin_site_name(context),
app_label,
model.__name__.lower()))
def _get_admin_add_url(self, model, context):
"""
Returns the admin add url.
"""
app_label = model._meta.app_label
return reverse('%s:%s_%s_add' % (get_admin_site_name(context),
app_label,
model.__name__.lower()))
| 32.746988 | 97 | 0.611111 |
4a2633ab6e0338e6ed92ec16f91938e7851f53a3 | 644 | py | Python | gemlib/mcmc/__init__.py | ThomFNC/covid19uk | ca7e700259b23e4f909b4206d1c160803b5cc0a0 | [
"MIT"
] | null | null | null | gemlib/mcmc/__init__.py | ThomFNC/covid19uk | ca7e700259b23e4f909b4206d1c160803b5cc0a0 | [
"MIT"
] | null | null | null | gemlib/mcmc/__init__.py | ThomFNC/covid19uk | ca7e700259b23e4f909b4206d1c160803b5cc0a0 | [
"MIT"
] | null | null | null | """MCMC kernel addons"""
from gemlib.mcmc.adaptive_random_walk_metropolis import (
AdaptiveRandomWalkMetropolis,
)
from gemlib.mcmc.event_time_mh import (
UncalibratedEventTimesUpdate,
TransitionTopology,
)
from gemlib.mcmc.gibbs_kernel import GibbsKernel
from gemlib.mcmc.multi_scan_kernel import MultiScanKernel
from gemlib.mcmc.h5_posterior import Posterior
from gemlib.mcmc.occult_events_mh import UncalibratedOccultUpdate
__all__ = [
"AdaptiveRandomWalkMetropolis",
"TransitionTopology",
"UncalibratedEventTimesUpdate",
"GibbsKernel",
"MultiScanKernel",
"Posterior",
"UncalibratedOccultUpdate",
]
| 26.833333 | 65 | 0.791925 |
4a2634db250a238aeb5f072c2b697dfee398fd98 | 1,329 | py | Python | app/modules/users/controller.py | awesomedeba10/ocr-automation-api | 23e1773de7db74cae323c948d2050815d51e8724 | [
"BSD-2-Clause"
] | null | null | null | app/modules/users/controller.py | awesomedeba10/ocr-automation-api | 23e1773de7db74cae323c948d2050815d51e8724 | [
"BSD-2-Clause"
] | null | null | null | app/modules/users/controller.py | awesomedeba10/ocr-automation-api | 23e1773de7db74cae323c948d2050815d51e8724 | [
"BSD-2-Clause"
] | null | null | null | from re import template
from flask import Blueprint, json, request
import os
from app import app
from app.helper import *
from app.middleware import login_required, param_required
from app.modules.users.schema import *
user_blueprint = Blueprint('user_blueprint', __name__)
@user_blueprint.route('/create', methods=['POST'])
@param_required(UserCreationSchema())
def create():
user_info = dict(request.form)
user_info['user_id'] = return_random()
user_info['templates'] = {}
with open(os.path.join(app.config.get('STORAGE_DIR'),'users', user_info['user_id']+'.json'), 'w') as jsonFile:
json.dump(user_info, jsonFile)
return json.jsonify({
'status': True,
'message': 'User ID Created Successfully',
'response': {
'user_id': user_info['user_id']
}
})
@user_blueprint.route('/profile', methods=['POST'])
@param_required(UserProfileSchema())
def profile():
user_id = request.form['user_id']
try:
with open(os.path.join(app.config.get('STORAGE_DIR'),'users', user_id +'.json'), 'r') as jsonFile:
return json.jsonify({
'status': True,
'response': json.load(jsonFile)
})
except FileNotFoundError:
return json.jsonify(format_error({"user_id": ["Invalid User Id"]})), 400
| 31.642857 | 114 | 0.650865 |
4a2635d4f40d98d1427e86851b92975e57801958 | 792 | py | Python | tests/integration/framework/lwm2m_test.py | cerkiewny/Anjay | b38ac519225cb27369c9a411f3f96899075c1216 | [
"Apache-2.0"
] | 161 | 2017-02-08T12:07:22.000Z | 2022-03-20T11:10:10.000Z | tests/integration/framework/lwm2m_test.py | cerkiewny/Anjay | b38ac519225cb27369c9a411f3f96899075c1216 | [
"Apache-2.0"
] | 53 | 2017-03-15T12:58:40.000Z | 2022-01-20T09:31:54.000Z | tests/integration/framework/lwm2m_test.py | cerkiewny/Anjay | b38ac519225cb27369c9a411f3f96899075c1216 | [
"Apache-2.0"
] | 68 | 2017-02-21T15:02:02.000Z | 2022-03-19T06:24:21.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2017-2021 AVSystem <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import test_suite
from .test_utils import *
from . import lwm2m
from .lwm2m import coap
from .lwm2m.server import Lwm2mServer
from .lwm2m.messages import *
| 33 | 74 | 0.756313 |
4a26360940cdee6d81cd4845c33b1291169f4f13 | 882 | py | Python | docs/en/conf.py | guanglinchen/esp-moonlight | 285df0c5edbda9e7cbbcd6fc6b6185974887b797 | [
"Apache-2.0"
] | 2 | 2021-03-31T22:14:11.000Z | 2021-12-03T03:40:34.000Z | docs/en/conf.py | guanglinchen/esp-moonlight | 285df0c5edbda9e7cbbcd6fc6b6185974887b797 | [
"Apache-2.0"
] | null | null | null | docs/en/conf.py | guanglinchen/esp-moonlight | 285df0c5edbda9e7cbbcd6fc6b6185974887b797 | [
"Apache-2.0"
] | 2 | 2021-04-06T01:59:39.000Z | 2021-12-11T13:26:53.000Z | # -*- coding: utf-8 -*-
#
# English Language RTD & Sphinx config file
#
# Uses ../conf_common.py for most non-language-specific settings.
# Importing conf_common adds all the non-language-specific
# parts to this conf module
import sys, os
sys.path.insert(0, os.path.abspath('../'))
from conf_common import *
# General information about the project.
project = u'ESP-Jumpstart'
copyright = u'2018 - 2019, Espressif Systems (Shanghai) PTE LTD'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ReadtheDocsTemplate.tex', u'ESP-Jumpstart',
u'2018 - 2019, Espressif Systems (Shanghai) PTE LTD', 'manual'),
]
| 31.5 | 74 | 0.730159 |
4a2637d006c0ea50e9840efd717ab9a632a11306 | 947 | py | Python | package/cloudshell/cp/azure/common/parsers/security_group_parser.py | yaroslavNqualisystems/Azure-Shell | 51e376fa60b8276570cc7763b1b1be70655e0b4d | [
"Apache-2.0"
] | 5 | 2016-09-08T08:33:47.000Z | 2020-02-10T12:31:15.000Z | package/cloudshell/cp/azure/common/parsers/security_group_parser.py | yaroslavNqualisystems/Azure-Shell | 51e376fa60b8276570cc7763b1b1be70655e0b4d | [
"Apache-2.0"
] | 505 | 2016-08-09T07:41:03.000Z | 2021-02-08T20:26:46.000Z | package/cloudshell/cp/azure/common/parsers/security_group_parser.py | yaroslavNqualisystems/Azure-Shell | 51e376fa60b8276570cc7763b1b1be70655e0b4d | [
"Apache-2.0"
] | 5 | 2016-12-21T12:52:55.000Z | 2021-07-08T09:50:42.000Z | from cloudshell.cp.azure.common.parsers.port_group_attribute_parser import PortGroupAttributeParser
from cloudshell.cp.azure.models.app_security_groups_model import SecurityGroupConfiguration
class SecurityGroupParser(object):
def __init__(self):
pass
@staticmethod
def parse_security_group_configurations(data):
"""
:param [list] data:
:rtype list[SecurityGroupConfiguration]
"""
if not isinstance(data, list):
return None
parsed_data = []
for configuration in data:
sg_configuration = SecurityGroupConfiguration()
sg_configuration.subnet_id = configuration.subnetId
rules = configuration.rules
sg_configuration.rules = PortGroupAttributeParser.parse_security_group_rules_to_port_data(rules)
parsed_data.append(sg_configuration)
return parsed_data if (len(parsed_data) > 0) else None
| 33.821429 | 108 | 0.704329 |
4a26381c58fbbdf536b5b4e3e172091b13c984d1 | 2,602 | py | Python | main_lqr.py | longwang-jhu/Complex-Step-SPSA | 0db187d0add89c64c3ca11d8cfd1ee372b82538b | [
"MIT"
] | 1 | 2021-10-15T08:09:33.000Z | 2021-10-15T08:09:33.000Z | main_lqr.py | longwang-jhu/Complex-Step-SPSA | 0db187d0add89c64c3ca11d8cfd1ee372b82538b | [
"MIT"
] | null | null | null | main_lqr.py | longwang-jhu/Complex-Step-SPSA | 0db187d0add89c64c3ca11d8cfd1ee372b82538b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Last updated: 2021-03-14
@author: Long Wang
"""
from datetime import date
import numpy as np
import matplotlib.pyplot as plt
from utility import norm_error
# import algorithms
from algorithms.spsa import SPSA
from algorithms.cs_spsa import CsSPSA
# import objective
from objectives.lqr import LQR
###
today = date.today()
np.random.seed(100)
p = 12; T = 100
n = 4; m = 3
x_0 = 20 * np.array([1, 2, -1, -0.5]).reshape(n,1)
LQR_model = LQR(p=p, T=T, x_0=x_0)
K_star = np.array([
[1.60233232e-01, -1.36227805e-01, -9.93576677e-02, -4.28244630e-02],
[7.47596033e-02, 9.05753832e-02, 7.46951286e-02, -1.53947620e-01],
[3.65372978e-01, -2.59862175e-04, 5.91522023e-02, 8.25660846e-01]])
theta_star = K_star.flatten()
# loss_star = loss_true(theta_star)
loss_star = 4149.38952236
def loss_true(theta):
return LQR_model.compute_cost(theta)
def loss_noisy(theta):
return LQR_model.compute_cost_noisy(theta)
# inital value
K_0 = np.ones(K_star.shape) * 2
theta_0 = K_0.flatten()
loss_0 = loss_true(theta_0)
print('loss_0:', loss_0)
# parameters
alpha = 0.668; gamma = 0.167
iter_num = 500; rep_num = 5
print('running SPSA')
SPSA_solver = SPSA(a=0.00005, c=0.5, A=100, alpha=alpha, gamma=gamma,
iter_num=iter_num, rep_num=rep_num,
theta_0=theta_0, loss_true=loss_true, loss_noisy=loss_noisy,
record_loss_flag=True)
# SPSA_solver.train()
SPSA_loss_error = norm_error.get_norm_loss_error(SPSA_solver.loss_ks, loss_0, loss_star)
plt.yscale('log')
plt.plot(SPSA_loss_error, 'k-')
print('running CsSPSA')
CsSPSA_solver = CsSPSA(a=0.0001, c=0.5, A=100, alpha=alpha, gamma=gamma,
iter_num=iter_num, rep_num=rep_num,
theta_0=theta_0, loss_true=loss_true, loss_noisy=loss_noisy,
record_loss_flag=True)
CsSPSA_solver.train()
CsSPSA_loss_error = norm_error.get_norm_loss_error(CsSPSA_solver.loss_ks, loss_0, loss_star)
plt.yscale('log')
plt.plot(CsSPSA_loss_error, 'k-')
# plot loss
fig = plt.figure()
ax = fig.add_subplot()
plt.grid()
line_SPSA, = ax.plot(SPSA_loss_error, 'k--', dashes=(5,5))
line_SPSA_for_legend, = ax.plot([1], 'k--')
line_CS_SPSA, = ax.plot(CsSPSA_loss_error, 'k-')
plt.xlim(xmin=0, xmax=iter_num)
plt.yscale("log")
plt.legend((line_SPSA_for_legend, line_CS_SPSA),
("SPSA", "CS-SPSA"), loc="best")
plt.xlabel("Number of Iterations")
plt.ylabel("Normalized Errors in Loss (log scale)")
plt.savefig("figures/LQR-loss-" + str(today) + ".pdf")#, bbox_inches='tight')
plt.show() | 29.235955 | 92 | 0.688701 |
4a263948777fca6c046fd297740329bc5081ddef | 12,841 | py | Python | flow/networks/bottleneck.py | cuijiaxun/MITC | a226308424237a69b5e938baf72949de9b1b4bf2 | [
"MIT"
] | 1 | 2021-06-17T03:25:13.000Z | 2021-06-17T03:25:13.000Z | flow/networks/bottleneck.py | cuijiaxun/MITC | a226308424237a69b5e938baf72949de9b1b4bf2 | [
"MIT"
] | null | null | null | flow/networks/bottleneck.py | cuijiaxun/MITC | a226308424237a69b5e938baf72949de9b1b4bf2 | [
"MIT"
] | 1 | 2021-03-18T16:20:57.000Z | 2021-03-18T16:20:57.000Z | """Contains the bottleneck network class."""
from flow.core.params import InitialConfig
from flow.core.params import TrafficLightParams
from flow.networks.base import Network
import numpy as np
ADDITIONAL_NET_PARAMS = {
# the factor multiplying number of lanes.
"scaling": 1,
# edge speed limit
'speed_limit': 23
}
class BottleneckNetwork(Network):
"""Network class for bottleneck simulations.
This network acts as a scalable representation of the Bay Bridge. It
consists of a two-stage lane-drop bottleneck where 4n lanes reduce to 2n
and then to n, where n is the scaling value. The length of the bottleneck
is fixed.
Requires from net_params:
* **scaling** : the factor multiplying number of lanes
* **speed_limit** : edge speed limit
Usage
-----
>>> from flow.core.params import NetParams
>>> from flow.core.params import VehicleParams
>>> from flow.core.params import InitialConfig
>>> from flow.networks import BottleneckNetwork
>>>
>>> network = BottleneckNetwork(
>>> name='bottleneck',
>>> vehicles=VehicleParams(),
>>> net_params=NetParams(
>>> additional_params={
>>> 'scaling': 1,
>>> 'speed_limit': 1,
>>> },
>>> )
>>> )
"""
def __init__(self,
name,
vehicles,
net_params,
initial_config=InitialConfig(),
traffic_lights=TrafficLightParams()):
"""Instantiate the network class."""
for p in ADDITIONAL_NET_PARAMS.keys():
if p not in net_params.additional_params:
raise KeyError('Network parameter "{}" not supplied'.format(p))
super().__init__(name, vehicles, net_params, initial_config,
traffic_lights)
def specify_nodes(self, net_params):
"""See parent class."""
nodes = [
{
"id": "1",
"x": 0,
"y": 0
}, # pre-toll
{
"id": "2",
"x": 100,
"y": 0
}, # toll
{
"id": "3",
"x": 410,
"y": 0
}, # light
{
"id": "4",
"x": 550,
"y": 0,
"type": "zipper",
"radius": 20
}, # merge1
{
"id": "5",
"x": 830,
"y": 0,
"type": "zipper",
"radius": 20
}, # merge2
{
"id": "6",
"x": 985,
"y": 0
},
# fake nodes used for visualization
{
"id": "fake1",
"x": 0,
"y": 1
},
{
"id": "fake2",
"x": 0,
"y": 2
}
] # post-merge2
return nodes
def specify_edges(self, net_params):
"""See parent class."""
scaling = net_params.additional_params.get("scaling", 1)
speed = net_params.additional_params['speed_limit']
assert (isinstance(scaling, int)), "Scaling must be an int"
edges = [
{
"id": "1",
"from": "1",
"to": "2",
"length": 100,
"spreadType": "center",
"numLanes": 4 * scaling,
"speed": speed
},
{
"id": "2",
"from": "2",
"to": "3",
"length": 310,
"spreadType": "center",
"numLanes": 4 * scaling,
"speed": speed
},
{
"id": "3",
"from": "3",
"to": "4",
"length": 140,
"spreadType": "center",
"numLanes": 4 * scaling,
"speed": speed
},
{
"id": "4",
"from": "4",
"to": "5",
"length": 280,
"spreadType": "center",
"numLanes": 2 * scaling,
"speed": speed
},
{
"id": "5",
"from": "5",
"to": "6",
"length": 155,
"spreadType": "center",
"numLanes": scaling,
"speed": speed
},
# fake edge used for visualization
{
"id": "fake_edge",
"from": "fake1",
"to": "fake2",
"length": 1,
"spreadType": "center",
"numLanes": scaling,
"speed": speed
}
]
return edges
def specify_connections(self, net_params):
"""See parent class."""
scaling = net_params.additional_params.get("scaling", 1)
conn_dic = {}
conn = []
for i in range(4 * scaling):
conn += [{
"from": "3",
"to": "4",
"fromLane": i,
"toLane": int(np.floor(i / 2))
}]
conn_dic["4"] = conn
conn = []
for i in range(2 * scaling):
conn += [{
"from": "4",
"to": "5",
"fromLane": i,
"toLane": int(np.floor(i / 2))
}]
conn_dic["5"] = conn
return conn_dic
def specify_centroids(self, net_params):
"""See parent class."""
centroids = []
centroids += [{
"id": "1",
"from": None,
"to": "1",
"x": -30,
"y": 0,
}]
centroids += [{
"id": "1",
"from": "5",
"to": None,
"x": 985 + 30,
"y": 0,
}]
return centroids
def specify_routes(self, net_params):
"""See parent class."""
rts = {
"1": ["1", "2", "3", "4", "5"],
"2": ["2", "3", "4", "5"],
"3": ["3", "4", "5"],
"4": ["4", "5"],
"5": ["5"]
}
return rts
def specify_edge_starts(self):
"""See parent class."""
return [("1", 0), ("2", 100), ("3", 405), ("4", 425), ("5", 580)]
def get_bottleneck_lanes(self, lane):
"""Return the reduced number of lanes."""
return [int(lane / 2), int(lane / 4)]
class BottleneckNetwork3to2(Network):
"""Scenario class for bottleneck simulations.
This network acts as a scalable representation of the Bay Bridge. It
consists of a two-stage lane-drop bottleneck where 4n lanes reduce to 2n
and then to n, where n is the scaling value. The length of the bottleneck
is fixed.
Requires from net_params:
* **scaling** : the factor multiplying number of lanes
* **speed_limit** : edge speed limit
In order for right-of-way dynamics to take place at the intersection,
set *no_internal_links* in net_params to False.
Usage
-----
>>> from flow.core.params import NetParams
>>> from flow.core.params import VehicleParams
>>> from flow.core.params import InitialConfig
>>> from flow.scenarios import BottleneckScenario
>>>
>>> scenario = BottleneckScenario(
>>> name='bottleneck',
>>> vehicles=VehicleParams(),
>>> net_params=NetParams(
>>> additional_params={
>>> 'scaling': 1,
>>> 'speed_limit': 1,
>>> },
>>> no_internal_links=False # we want junctions
>>> )
>>> )
"""
def __init__(self,
name,
vehicles,
net_params,
initial_config=InitialConfig(),
traffic_lights=TrafficLightParams()):
"""Instantiate the scenario class."""
for p in ADDITIONAL_NET_PARAMS.keys():
if p not in net_params.additional_params:
raise KeyError('Network parameter "{}" not supplied'.format(p))
super().__init__(name, vehicles, net_params, initial_config,
traffic_lights)
def specify_nodes(self, net_params):
"""See parent class."""
nodes = [
{
"id": "inflow_highway",
"x": 0,
"y": 0
},
{
"id": "left",
"x": 100,
"y": 0
}, # toll
{
"id": "center",
"x": 920,
"y": 0,
"type": "zipper",
"radius": 70
},
{
"id": "right",
"x": 1100,
"y": 0,
},
# fake nodes used for visualization
{
"id": "fake1",
"x": 0,
"y": 1
},
{
"id": "fake2",
"x": 0,
"y": 2
}
] # post-merge2
return nodes
def specify_edges(self, net_params):
"""See parent class."""
scaling = net_params.additional_params.get("scaling", 1)
speed = net_params.additional_params['speed_limit']
assert (isinstance(scaling, int)), "Scaling must be an int"
edges = [
{
"id": "inflow_highway",
"from": "inflow_highway",
"to": "left",
"length": 100,
"spreadType": "center",
"numLanes": 3,
"speed": speed
},
{
"id": "left",
"from": "left",
"to": "center",
"length": 820,
"spreadType": "center",
"numLanes": 3,
"speed": speed
},
{
"id": "center",
"from": "center",
"to": "right",
"length": 180,
"spreadType": "center",
"numLanes": 2,
"speed": speed
},
# fake edge used for visualization
{
"id": "fake_edge",
"from": "fake1",
"to": "fake2",
"length": 1,
"spreadType": "center",
"numLanes": scaling,
"speed": speed
}
]
return edges
def specify_connections(self, net_params):
"""See parent class."""
scaling = net_params.additional_params.get("scaling", 1)
conn_dic = {}
conn = [
{
"from": "left",
"to": "center",
"fromLane": 0,
"toLane": 0
},
{
"from": "left",
"to": "center",
"fromLane": 1,
"toLane": 0
},
{
"from": "left",
"to": "center",
"fromLane": 1,
"toLane": 1
},
{
"from": "left",
"to": "center",
"fromLane": 2,
"toLane": 1
}
]
conn_dic["center"] = conn
return conn_dic
def specify_centroids(self, net_params):
"""See parent class."""
centroids = []
centroids += [{
"id": "1",
"from": None,
"to": "inflow_highway",
"x": -30,
"y": 0,
}]
centroids += [{
"id": "1",
"from": "center",
"to": None,
"x": 1100 + 30,
"y": 0,
}]
return centroids
def specify_routes(self, net_params):
"""See parent class."""
rts = {
"inflow_highway": ["inflow_highway", "left", "center"],
"left": ["left", "center"],
"center": ["center"],
}
return rts
def specify_edge_starts(self):
"""See parent class."""
return [("inflow_highway", 0), ("left", 100), ("center", 920)]
def specify_internal_edge_starts(self):
"""See parent class."""
# hard coded, must correspond to internal noded generated in the
# network. Here I hard coded based on .net.xml file I have, and the
# information in specify_edge_starts()
return [(":left_0", 100), (":center_0", 920)]
#def get_bottleneck_lanes(self, lane):
# """Return the reduced number of lanes."""
# return [int(lane / 2), int(lane / 4)]
| 28.221978 | 79 | 0.410015 |
4a263b70239d3c463f832998ceef11c478c71da4 | 1,776 | py | Python | samples/python/13.core-bot/envs/chat_bot_02/Lib/site-packages/adal/__init__.py | luzeunice/BotBuilder-Samples | b62be4e8863125a567902b736b7b74313d9d4f28 | [
"MIT"
] | 1 | 2021-10-16T19:33:56.000Z | 2021-10-16T19:33:56.000Z | samples/python/13.core-bot/envs/chat_bot_02/Lib/site-packages/adal/__init__.py | luzeunice/BotBuilder-Samples | b62be4e8863125a567902b736b7b74313d9d4f28 | [
"MIT"
] | 1 | 2021-04-30T20:41:19.000Z | 2021-04-30T20:41:19.000Z | venv/lib/python2.7/site-packages/adal/__init__.py | 784134748/kubernetes-install | 5df59632c2619632e422948b667fb68eab9ff5be | [
"MIT"
] | 1 | 2019-07-25T15:09:05.000Z | 2019-07-25T15:09:05.000Z | #------------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation.
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#------------------------------------------------------------------------------
# pylint: disable=wrong-import-position
__version__ = '1.2.1'
import logging
from .authentication_context import AuthenticationContext
from .token_cache import TokenCache
from .log import (set_logging_options,
get_logging_options,
ADAL_LOGGER_NAME)
from .adal_error import AdalError
# to avoid "No handler found" warnings.
logging.getLogger(ADAL_LOGGER_NAME).addHandler(logging.NullHandler())
| 39.466667 | 80 | 0.696509 |
Subsets and Splits