function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
list |
---|---|---|
def get_provider_fields(provider):
return get_provider_field_map().get(provider)
|
frappe/frappe
|
[
4495,
2418,
4495,
1493,
1307520856
] |
def patch(self):
math_libs = self.spec['lapack'].libs + self.spec['blas'].libs
makefile = FileFilter('Makefile')
if self.spec.satisfies('%gcc'):
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sgcc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sg\+\+',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%fj'):
makefile.filter(r'^#ENV\s+=\sFX100', 'ENV=FX100')
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^MKL\s+=\s1', 'MKL=0')
makefile.filter(r'^CC\s+=\sfccpx',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sFCCpx',
'CXX={0}'.format(spack_cxx))
makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas',
'BLASLIBS={0}'.format(math_libs.ld_flags))
elif self.spec.satisfies('%intel'):
makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC')
makefile.filter(r'^ENV\s+=\sICC', 'ENV=ICC')
makefile.filter(r'^CC\s+=\sicc',
'CC={0}'.format(spack_cc))
makefile.filter(r'^CXX\s+=\sicc',
'CXX={0}'.format(spack_cxx))
|
LLNL/spack
|
[
3244,
1839,
3244,
2847,
1389172932
] |
def testFromSparseTensorSlices(self, slices):
"""Test a dataset based on slices of a `tf.sparse.SparseTensor`."""
st = array_ops.sparse_placeholder(dtypes.float64)
iterator = dataset_ops.make_initializable_iterator(
dataset_ops.Dataset.from_sparse_tensor_slices(st))
init_op = iterator.initializer
get_next = sparse_tensor.SparseTensor(*iterator.get_next())
with self.cached_session() as sess:
# Test with sparse tensor in the appropriate order.
# pylint: disable=g-complex-comprehension
indices = np.array(
[[i, j] for i in range(len(slices)) for j in range(len(slices[i]))])
values = np.array([val for s in slices for val in s])
# pylint: enable=g-complex-comprehension
dense_shape = np.array([len(slices), max(len(s) for s in slices) + 1])
sparse_feed = sparse_tensor.SparseTensorValue(indices, values,
dense_shape)
sess.run(init_op, feed_dict={st: sparse_feed})
for i, s in enumerate(slices):
results = sess.run(get_next)
self.assertAllEqual(s, results.values)
expected_indices = np.array(
[[j] for j in range(len(slices[i]))]).reshape([-1, 1])
self.assertAllEqual(expected_indices, results.indices)
self.assertAllEqual(dense_shape[1:], results.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
|
tensorflow/tensorflow
|
[
171949,
87931,
171949,
2300,
1446859160
] |
def testFromSparseTensorSlicesInReverse(self, slices):
"""Test a dataset based on slices of a `tf.sparse.SparseTensor` in reverse order."""
st = array_ops.sparse_placeholder(dtypes.float64)
iterator = dataset_ops.make_initializable_iterator(
dataset_ops.Dataset.from_sparse_tensor_slices(st))
init_op = iterator.initializer
with self.cached_session() as sess:
# pylint: disable=g-complex-comprehension
indices = np.array(
[[i, j] for i in range(len(slices)) for j in range(len(slices[i]))])
values = np.array([val for s in slices for val in s])
# pylint: enable=g-complex-comprehension
dense_shape = np.array([len(slices), max(len(s) for s in slices) + 1])
# Test with sparse tensor in the reverse order, which is not
# currently supported.
reverse_order_indices = indices[::-1, :]
reverse_order_values = values[::-1]
sparse_feed = sparse_tensor.SparseTensorValue(
reverse_order_indices, reverse_order_values, dense_shape)
with self.assertRaises(errors.UnimplementedError):
sess.run(init_op, feed_dict={st: sparse_feed})
|
tensorflow/tensorflow
|
[
171949,
87931,
171949,
2300,
1446859160
] |
def testEmptySparseTensorSlices(self):
"""Test a dataset based on slices of an empty `tf.sparse.SparseTensor`."""
st = array_ops.sparse_placeholder(dtypes.float64)
iterator = dataset_ops.make_initializable_iterator(
dataset_ops.Dataset.from_sparse_tensor_slices(st))
init_op = iterator.initializer
get_next = sparse_tensor.SparseTensor(*iterator.get_next())
with self.cached_session() as sess:
# Test with an empty sparse tensor.
empty_indices = np.empty((0, 4), dtype=np.int64)
empty_values = np.empty((0,), dtype=np.float64)
empty_dense_shape = [0, 4, 37, 9]
sparse_feed = sparse_tensor.SparseTensorValue(empty_indices, empty_values,
empty_dense_shape)
sess.run(init_op, feed_dict={st: sparse_feed})
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
|
tensorflow/tensorflow
|
[
171949,
87931,
171949,
2300,
1446859160
] |
def testEmptySparseTensorSlicesInvalid(self):
"""Test a dataset based on invalid `tf.sparse.SparseTensor`."""
st = array_ops.sparse_placeholder(dtypes.float64)
iterator = dataset_ops.make_initializable_iterator(
dataset_ops.Dataset.from_sparse_tensor_slices(st))
init_op = iterator.initializer
with self.cached_session() as sess:
# Test with an empty sparse tensor but with non empty values.
empty_indices = np.empty((0, 4), dtype=np.int64)
non_empty_values = [1, 2, 3, 4]
empty_dense_shape = [0, 4, 37, 9]
sparse_feed = sparse_tensor.SparseTensorValue(empty_indices,
non_empty_values,
empty_dense_shape)
# Here, we expect the test to fail when running the feed.
with self.assertRaises(errors.InvalidArgumentError):
sess.run(init_op, feed_dict={st: sparse_feed})
|
tensorflow/tensorflow
|
[
171949,
87931,
171949,
2300,
1446859160
] |
def testEmptySparseTensorSlicesInvalid2(self):
"""Test a dataset based on invalid `tf.sparse.SparseTensor`."""
st = array_ops.sparse_placeholder(dtypes.float64)
iterator = dataset_ops.make_initializable_iterator(
dataset_ops.Dataset.from_sparse_tensor_slices(st))
init_op = iterator.initializer
with self.cached_session() as sess:
# Test with an empty sparse tensor but with non empty values.
empty_indices = [[]]
empty_values = []
dense_shape = [1, 1]
sparse_feed = sparse_tensor.SparseTensorValue(empty_indices, empty_values,
dense_shape)
# Here, we expect the test to fail when running the feed.
with self.assertRaises(errors.InvalidArgumentError):
sess.run(init_op, feed_dict={st: sparse_feed})
|
tensorflow/tensorflow
|
[
171949,
87931,
171949,
2300,
1446859160
] |
def testFromSparseTensorSlicesError(self):
with self.assertRaises(AttributeError):
dataset_ops.Dataset.from_sparse_tensor_slices(None)
|
tensorflow/tensorflow
|
[
171949,
87931,
171949,
2300,
1446859160
] |
def _build_sparse_tensor_slice_dataset(self, slices):
# pylint: disable=g-complex-comprehension
indices = np.array(
[[i, j] for i in range(len(slices)) for j in range(len(slices[i]))],
dtype=np.int64)
values = np.array([val for s in slices for val in s], dtype=np.float64)
# pylint: enable=g-complex-comprehension
dense_shape = np.array(
[len(slices), max(len(s) for s in slices) + 1], dtype=np.int64)
sparse_components = sparse_tensor.SparseTensor(indices, values, dense_shape)
return dataset_ops.Dataset.from_sparse_tensor_slices(sparse_components)
|
tensorflow/tensorflow
|
[
171949,
87931,
171949,
2300,
1446859160
] |
def test(self, verify_fn):
slices = [[1., 2., 3.], [1.], [1.], [1., 2.], [], [1., 2.], [], [], []]
verify_fn(
self,
lambda: self._build_sparse_tensor_slice_dataset(slices),
num_outputs=9,
sparse_tensors=True)
|
tensorflow/tensorflow
|
[
171949,
87931,
171949,
2300,
1446859160
] |
def should_render(self, context):
"""Return whether the action should render.
Args:
context (dict):
The current render context.
Returns:
bool:
Whether the action should render.
"""
review_request = context['review_request']
user = context['request'].user
return (review_request.status == ReviewRequest.PENDING_REVIEW and
not is_site_read_only_for(user) and
(context['request'].user.pk == review_request.submitter_id or
(context['perms']['reviews']['can_change_status'] and
review_request.public)))
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def should_render(self, context):
"""Return whether the action should render.
Args:
context (dict):
The current render context.
Returns:
bool:
Whether the action should render.
"""
return (context['review_request'].public and
not is_site_read_only_for(context['request'].user))
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def should_render(self, context):
"""Return whether the action should render.
Args:
context (dict):
The current render context.
Returns:
bool:
Whether the action should render.
"""
return (context['perms']['reviews']['delete_reviewrequest'] and
not is_site_read_only_for(context['request'].user))
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def should_render(self, context):
"""Return whether the action should render.
Args:
context (dict):
The current render context.
Returns:
bool:
Whether the action should render.
"""
review_request = context['review_request']
user = context['request'].user
return (review_request.status == ReviewRequest.PENDING_REVIEW and
not is_site_read_only_for(user) and
(user.pk == review_request.submitter_id or
context['perms']['reviews']['can_edit_reviewrequest']))
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def get_label(self, context):
"""Return this action's label.
The label will change depending on whether or not the corresponding
review request already has a diff.
Args:
context (django.template.Context):
The collection of key-value pairs from the template.
Returns:
unicode: The label that displays this action to the user.
"""
review_request = context['review_request']
draft = review_request.get_draft(context['request'].user)
if (draft and draft.diffset) or review_request.get_diffsets():
return _('Update Diff')
return _('Upload Diff')
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def get_url(self, context):
"""Return this action's URL.
Args:
context (django.template.Context):
The collection of key-value pairs from the template.
Returns:
unicode: The URL to invoke if this action is clicked.
"""
match = context['request'].resolver_match
# We want to use a relative URL in the diff viewer as we will not be
# re-rendering the page when switching between revisions.
if match.url_name in diffviewer_url_names:
return 'raw/'
return local_site_reverse('raw-diff', context['request'], kwargs={
'review_request_id': context['review_request'].display_id,
})
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def should_render(self, context):
"""Return whether or not this action should render.
Args:
context (django.template.Context):
The collection of key-value pairs available in the template
just before this action is to be rendered.
Returns:
bool: Determines if this action should render.
"""
review_request = context['review_request']
request = context['request']
match = request.resolver_match
# If we're on a diff viewer page, then this DownloadDiffAction should
# initially be rendered, but possibly hidden.
if match.url_name in diffviewer_url_names:
return True
return review_request.repository_id is not None
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def should_render(self, context):
"""Return whether the action should render.
Args:
context (dict):
The current render context.
Returns:
bool:
Whether the action should render.
"""
user = context['request'].user
return (user.is_authenticated and
not is_site_read_only_for(user))
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def should_render(self, context):
"""Return whether the action should render.
Args:
context (dict):
The current render context.
Returns:
bool:
Whether the action should render.
"""
request = context['request']
user = request.user
return (user.is_authenticated and
not is_site_read_only_for(user) and
general_comments_feature.is_enabled(request=request))
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def should_render(self, context):
"""Return whether the action should render.
Args:
context (dict):
The current render context.
Returns:
bool:
Whether the action should render.
"""
user = context['request'].user
return (user.is_authenticated and
not is_site_read_only_for(user))
|
reviewboard/reviewboard
|
[
1464,
419,
1464,
1,
1250977189
] |
def setUp(self):
pass
|
talon-one/talon_one.py
|
[
1,
3,
1,
1,
1484929786
] |
def make_instance(self, include_optional):
"""Test UpdateLoyaltyProgram
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.update_loyalty_program.UpdateLoyaltyProgram() # noqa: E501
if include_optional :
return UpdateLoyaltyProgram(
title = '0',
description = '0',
subscribed_applications = [
56
],
default_validity = '0',
default_pending = '0',
allow_subledger = True
)
else :
return UpdateLoyaltyProgram(
)
|
talon-one/talon_one.py
|
[
1,
3,
1,
1,
1484929786
] |
def __init__(self, plotly_name="hoverlabel", parent_name="sankey", **kwargs):
super(HoverlabelValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Hoverlabel"),
data_docs=kwargs.pop(
"data_docs",
"""
align
Sets the horizontal alignment of the text
content within hover label box. Has an effect
only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for align .
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for bgcolor .
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for namelength .
|
plotly/python-api
|
[
13052,
2308,
13052,
1319,
1385013188
] |
def recognize_business_card(self):
path_to_sample_forms = os.path.abspath(os.path.join(os.path.abspath(__file__),
"..", "..", "./sample_forms/business_cards/business-card-english.jpg"))
# [START recognize_business_cards]
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer import FormRecognizerClient
endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]
key = os.environ["AZURE_FORM_RECOGNIZER_KEY"]
form_recognizer_client = FormRecognizerClient(
endpoint=endpoint, credential=AzureKeyCredential(key)
)
with open(path_to_sample_forms, "rb") as f:
poller = form_recognizer_client.begin_recognize_business_cards(business_card=f, locale="en-US")
business_cards = poller.result()
for idx, business_card in enumerate(business_cards):
print("--------Recognizing business card #{}--------".format(idx+1))
contact_names = business_card.fields.get("ContactNames")
if contact_names:
for contact_name in contact_names.value:
print("Contact First Name: {} has confidence: {}".format(
contact_name.value["FirstName"].value, contact_name.value["FirstName"].confidence
))
print("Contact Last Name: {} has confidence: {}".format(
contact_name.value["LastName"].value, contact_name.value["LastName"].confidence
))
company_names = business_card.fields.get("CompanyNames")
if company_names:
for company_name in company_names.value:
print("Company Name: {} has confidence: {}".format(company_name.value, company_name.confidence))
departments = business_card.fields.get("Departments")
if departments:
for department in departments.value:
print("Department: {} has confidence: {}".format(department.value, department.confidence))
job_titles = business_card.fields.get("JobTitles")
if job_titles:
for job_title in job_titles.value:
print("Job Title: {} has confidence: {}".format(job_title.value, job_title.confidence))
emails = business_card.fields.get("Emails")
if emails:
for email in emails.value:
print("Email: {} has confidence: {}".format(email.value, email.confidence))
websites = business_card.fields.get("Websites")
if websites:
for website in websites.value:
print("Website: {} has confidence: {}".format(website.value, website.confidence))
addresses = business_card.fields.get("Addresses")
if addresses:
for address in addresses.value:
print("Address: {} has confidence: {}".format(address.value, address.confidence))
mobile_phones = business_card.fields.get("MobilePhones")
if mobile_phones:
for phone in mobile_phones.value:
print("Mobile phone number: {} has confidence: {}".format(phone.value, phone.confidence))
faxes = business_card.fields.get("Faxes")
if faxes:
for fax in faxes.value:
print("Fax number: {} has confidence: {}".format(fax.value, fax.confidence))
work_phones = business_card.fields.get("WorkPhones")
if work_phones:
for work_phone in work_phones.value:
print("Work phone number: {} has confidence: {}".format(work_phone.value, work_phone.confidence))
other_phones = business_card.fields.get("OtherPhones")
if other_phones:
for other_phone in other_phones.value:
print("Other phone number: {} has confidence: {}".format(other_phone.value, other_phone.confidence))
# [END recognize_business_cards]
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def test_config(self):
with self.test_session():
loss = LeastSquaresLoss(mock_gan(), loss_config)
self.assertTrue(loss.config.test)
|
255BITS/HyperGAN
|
[
1175,
170,
1175,
20,
1466808596
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def begin_create_or_update(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.LocalNetworkGateway"
**kwargs # type: Any
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def get(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def _delete_initial(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def begin_delete(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
**kwargs # type: Any
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def _update_tags_initial(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def begin_update_tags(
self,
resource_group_name, # type: str
local_network_gateway_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_account.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3, pattern=r'^[A-Za-z0-9]+(-[A-Za-z0-9]+)*$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def schedule_single_message(sender):
message = ServiceBusMessage("Message to be scheduled")
scheduled_time_utc = datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
sequence_number = sender.schedule_messages(message, scheduled_time_utc)
return sequence_number
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def main():
servicebus_client = ServiceBusClient.from_connection_string(
conn_str=CONNECTION_STR, logging_enable=True
)
with servicebus_client:
sender = servicebus_client.get_topic_sender(topic_name=TOPIC_NAME)
with sender:
sequence_number = schedule_single_message(sender)
print(
"Single message is scheduled and sequence number is {}".format(
sequence_number
)
)
sequence_numbers = schedule_multiple_messages(sender)
print(
"Multiple messages are scheduled and sequence numbers are {}".format(
sequence_numbers
)
)
sender.cancel_scheduled_messages(sequence_number)
sender.cancel_scheduled_messages(sequence_numbers)
print("All scheduled messages are cancelled.")
|
Azure/azure-sdk-for-python
|
[
3526,
2256,
3526,
986,
1335285972
] |
def main():
|
HaroldMills/Vesper
|
[
43,
3,
43,
33,
1398351334
] |
def annotate_old_bird_calls():
|
HaroldMills/Vesper
|
[
43,
3,
43,
33,
1398351334
] |
def create_raw_df(rows):
|
HaroldMills/Vesper
|
[
43,
3,
43,
33,
1398351334
] |
def create_aggregate_df(df):
|
HaroldMills/Vesper
|
[
43,
3,
43,
33,
1398351334
] |
def sum_counts(df, detector):
|
HaroldMills/Vesper
|
[
43,
3,
43,
33,
1398351334
] |
def add_precision_recall_f1(df):
p = df['Old Bird Calls'] / df['Old Bird Clips']
r = df['Old Bird Calls'] / df['Ground Truth Calls']
df['Precision'] = to_percent(p)
df['Recall'] = to_percent(r)
df['F1'] = to_percent(2 * p * r / (p + r))
|
HaroldMills/Vesper
|
[
43,
3,
43,
33,
1398351334
] |
def test_survey_publication_date_views(self):
"""We have some views to display and set the published column
for a survey session
"""
with api.env.adopt_user("admin"):
survey = addSurvey(self.portal, BASIC_SURVEY)
account = addAccount(password="secret")
survey_session = model.SurveySession(
id=123,
title=u"Dummy session",
created=datetime(2012, 4, 22, 23, 5, 12),
modified=datetime(2012, 4, 23, 11, 50, 30),
zodb_path="nl/ict/software-development",
account=account,
company=model.Company(country="nl", employees="1-9", referer="other"),
)
model.Session.add(survey_session)
survey = self.portal.client.nl.ict["software-development"]
session_id = "++session++%d" % survey_session.id
traversed_survey_session = survey.restrictedTraverse(session_id)
with api.env.adopt_user(user=survey_session.account):
with self._get_view(
"publication_date", traversed_survey_session, survey_session
) as view:
# The view is not callable but
# has traversable allowed attributes
self.assertRaises(ViewNotCallableError, view)
# We have some default values that will be changed
# when publishing/unpublishing the session
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.last_modifier, None)
self.assertEqual(survey_session.review_state, "private")
# Calling set_date will result in having this session published
# and the publication time and the publisher will be recorded
# If no referer is set,
# the methods will redirect to the context url
self.assertEqual(
view.set_date(),
"{url}/{session_id}".format(
url=survey.absolute_url(), session_id=session_id
),
)
self.assertEqual(survey_session.last_publisher, survey_session.account)
self.assertIsInstance(survey_session.published, datetime)
self.assertEqual(survey_session.review_state, "published")
old_modified = survey_session.modified
old_published = survey_session.published
old_modifier = survey_session.last_modifier
# Changing the HTTP_REFERER will redirect there
# and calling reset_date will update the published date
view.request.set("HTTP_REFERER", "foo")
# We need to wait at least one second because the datetime
# is stored with that accuracy
sleep(1)
self.assertEqual(view.reset_date(), "foo")
self.assertEqual(survey_session.last_publisher, survey_session.account)
# The publisher and publication dates are set. The modification date
# is not touched.
self.assertEqual(survey_session.modified, old_modified)
self.assertEqual(survey_session.last_modifier, old_modifier)
self.assertTrue(survey_session.published > old_published)
# Calling unset_date will restore the publication info
self.assertEqual(view.unset_date(), "foo")
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.review_state, "private")
# We also have a menu view
with self._get_view(
"publication_menu", traversed_survey_session, survey_session
) as view:
soup = html.fromstring(view())
self.assertListEqual(
["publication_date/set_date#content"],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
# We trigger the session to be private
survey_session.published = "foo"
soup = html.fromstring(view())
self.assertListEqual(
[
"publication_date/unset_date#content",
"publication_date/reset_date#content",
],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
|
euphorie/Euphorie
|
[
11,
5,
11,
6,
1303204650
] |
def device(portnum):
"""Turn a port number into a device name"""
#the "//./COMx" format is required for devices >= 9
#not all versions of windows seem to support this propperly
#so that the first few ports are used with the DOS device name
if portnum < 9:
return 'COM%d' % (portnum+1) #numbers are transformed to a string
else:
return r'\\.\COM%d' % (portnum+1)
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def open(self):
"""Open port with current settings. This may throw a SerialException
if the port cannot be opened."""
if self._port is None:
raise SerialException("Port must be configured before it can be used.")
self.hComPort = None
try:
self.hComPort = win32file.CreateFile(self.portstr,
win32con.GENERIC_READ | win32con.GENERIC_WRITE,
0, # exclusive access
None, # no security
win32con.OPEN_EXISTING,
win32con.FILE_ATTRIBUTE_NORMAL | win32con.FILE_FLAG_OVERLAPPED,
None)
except Exception, msg:
self.hComPort = None #'cause __del__ is called anyway
raise SerialException("could not open port: %s" % msg)
# Setup a 4k buffer
win32file.SetupComm(self.hComPort, 4096, 4096)
#Save original timeout values:
self._orgTimeouts = win32file.GetCommTimeouts(self.hComPort)
self._reconfigurePort()
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def _reconfigurePort(self):
"""Set commuication parameters on opened port."""
if not self.hComPort:
raise SerialException("Can only operate on a valid port handle")
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def close(self):
"""Close port"""
if self._isOpen:
if self.hComPort:
#Restore original timeout values:
win32file.SetCommTimeouts(self.hComPort, self._orgTimeouts)
#Close COM-Port:
win32file.CloseHandle(self.hComPort)
self.hComPort = None
self._isOpen = False
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def inWaiting(self):
"""Return the number of characters currently in the input buffer."""
flags, comstat = win32file.ClearCommError(self.hComPort)
return comstat.cbInQue
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def write(self, s):
"""Output the given string over the serial port."""
if not self.hComPort: raise portNotOpenError
#print repr(s),
if s:
err, n = win32file.WriteFile(self.hComPort, s, self._overlappedWrite)
if err: #will be ERROR_IO_PENDING:
# Wait for the write to complete.
win32event.WaitForSingleObject(self._overlappedWrite.hEvent, win32event.INFINITE)
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def flushOutput(self):
"""Clear output buffer, aborting the current output and
discarding all that is in the buffer."""
if not self.hComPort: raise portNotOpenError
win32file.PurgeComm(self.hComPort, win32file.PURGE_TXCLEAR | win32file.PURGE_TXABORT)
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def setRTS(self,level=1):
"""Set terminal status line: Request To Send"""
if not self.hComPort: raise portNotOpenError
if level:
win32file.EscapeCommFunction(self.hComPort, win32file.SETRTS)
else:
win32file.EscapeCommFunction(self.hComPort, win32file.CLRRTS)
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def getCTS(self):
"""Read terminal status line: Clear To Send"""
if not self.hComPort: raise portNotOpenError
return MS_CTS_ON & win32file.GetCommModemStatus(self.hComPort) != 0
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def getRI(self):
"""Read terminal status line: Ring Indicator"""
if not self.hComPort: raise portNotOpenError
return MS_RING_ON & win32file.GetCommModemStatus(self.hComPort) != 0
|
emilydolson/forestcat
|
[
2,
2,
2,
2,
1370887123
] |
def test_serialize(self):
# Valid room
data = SensorSerializer.serialize(SensorDTO(id=1, name='foo', room=5),
fields=['id', 'name', 'room'])
self.assertEqual({'id': 1,
'name': 'foo',
'room': 5}, data)
# Empty room
data = SensorSerializer.serialize(SensorDTO(id=1, name='foo'),
fields=['id', 'name', 'room'])
self.assertEqual({'id': 1,
'name': 'foo',
'room': 255}, data)
# No room
data = SensorSerializer.serialize(SensorDTO(id=1, name='foo', room=5),
fields=['id', 'name'])
self.assertEqual({'id': 1,
'name': 'foo'}, data)
|
openmotics/gateway
|
[
30,
12,
30,
27,
1481877206
] |
def update_product_by_location(self, cr, uid, context=None):
context = context or self.pool['res.users'].context_get(cr, uid)
location_ids = self.search(cr, uid, [('update_product_bylocation', '=', True)], context=context)
location_vals = {}
start_time = datetime.now()
date_product_by_location_update = start_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
if location_ids:
product_obj = self.pool['product.product']
for location in self.browse(cr, uid, location_ids, context):
location_vals[location.id] = location.product_related_columns
product_ids = product_obj.search(cr, uid, [('type', '!=', 'service')], context=context)
product_context = context.copy()
product_vals = {}
for product_id in product_ids:
product_vals[product_id] = {}
for location_keys in location_vals.keys():
product_context['location'] = location_keys
for product in product_obj.browse(cr, uid, product_ids, product_context):
if location_vals[location_keys] and (product[location_vals[location_keys]] != product.qty_available):
product_vals[product.id][location_vals[location_keys]] = product.qty_available
if product_vals:
for product_id in product_vals.keys():
product_val = product_vals[product_id]
if product_val:
product_val['date_product_by_location_update'] = date_product_by_location_update
product_obj.write(cr, uid, product_id, product_val, context)
end_time = datetime.now()
duration_seconds = (end_time - start_time)
duration = '{sec}'.format(sec=duration_seconds)
_logger.info(u'update_product_by_location get in {duration}'.format(duration=duration))
return True
|
iw3hxn/LibrERP
|
[
29,
16,
29,
1,
1402418161
] |
def _get_original_image_url(self, px=1024):
return "https://upload.wikimedia.org/wikipedia/commons/thumb/1/1e/Gullfoss%2C_an_iconic_waterfall_of_Iceland.jpg/{}px-Gullfoss%2C_an_iconic_waterfall_of_Iceland.jpg".format(
px
)
|
yelizariev/addons-yelizariev
|
[
33,
79,
33,
2,
1465301317
] |
def __init__(self, resource):
self.resource = resource
|
yaybu/touchdown
|
[
11,
4,
11,
17,
1410353271
] |
def pending(self, runner, object):
provisioner = runner.get_service(self.resource.provisioner, "apply")
return provisioner.object["Result"] == "Pending"
|
yaybu/touchdown
|
[
11,
4,
11,
17,
1410353271
] |
def __init__(self, resource):
self.resource = resource
|
yaybu/touchdown
|
[
11,
4,
11,
17,
1410353271
] |
def pending(self, runner, object):
provisioner = runner.get_service(self.resource.provisioner, "apply")
return provisioner.object["Result"] == "Pending"
|
yaybu/touchdown
|
[
11,
4,
11,
17,
1410353271
] |
def __init__(self,
channel=LocalChannel(),
nodes_per_block=1,
cores_per_block=None,
cores_per_node=None,
init_blocks=1,
min_blocks=0,
max_blocks=1,
parallelism=1,
walltime="00:10:00",
scheduler_options='',
worker_init='',
project=None,
queue=None,
cmd_timeout=120,
move_files=True,
bsub_redirection=False,
request_by_nodes=True,
launcher=SingleNodeLauncher()):
label = 'LSF'
super().__init__(label,
channel,
nodes_per_block,
init_blocks,
min_blocks,
max_blocks,
parallelism,
walltime,
cmd_timeout=cmd_timeout,
launcher=launcher)
self.project = project
self.queue = queue
self.cores_per_block = cores_per_block
self.cores_per_node = cores_per_node
self.move_files = move_files
self.bsub_redirection = bsub_redirection
self.request_by_nodes = request_by_nodes
# Update scheduler options
self.scheduler_options = scheduler_options + "\n"
if project:
self.scheduler_options += "#BSUB -P {}\n".format(project)
if queue:
self.scheduler_options += "#BSUB -q {}\n".format(queue)
if request_by_nodes:
self.scheduler_options += "#BSUB -nnodes {}\n".format(nodes_per_block)
else:
assert cores_per_block is not None and cores_per_node is not None, \
"Requesting resources by the number of cores. " \
"Need to specify cores_per_block and cores_per_node in the LSF provider."
self.scheduler_options += "#BSUB -n {}\n".format(cores_per_block)
self.scheduler_options += '#BSUB -R "span[ptile={}]"\n'.format(cores_per_node)
# Set nodes_per_block manually for Parsl strategy
assert cores_per_node != 0, "Need to specify a non-zero cores_per_node."
self.nodes_per_block = int(math.ceil(cores_per_block / cores_per_node))
self.worker_init = worker_init
|
Parsl/parsl
|
[
369,
114,
369,
333,
1476980871
] |
def submit(self, command, tasks_per_node, job_name="parsl.lsf"):
"""Submit the command as an LSF job.
Parameters
----------
command : str
Command to be made on the remote side.
tasks_per_node : int
Command invocations to be launched per node
job_name : str
Name for the job (must be unique).
Returns
-------
None or str
If at capacity, returns None; otherwise, a string identifier for the job
"""
job_name = "{0}.{1}".format(job_name, time.time())
script_path = "{0}/{1}.submit".format(self.script_dir, job_name)
script_path = os.path.abspath(script_path)
logger.debug("Requesting one block with {} nodes".format(self.nodes_per_block))
job_config = {}
job_config["submit_script_dir"] = self.channel.script_dir
job_config["nodes"] = self.nodes_per_block
job_config["tasks_per_node"] = tasks_per_node
job_config["walltime"] = wtime_to_minutes(self.walltime)
job_config["scheduler_options"] = self.scheduler_options
job_config["worker_init"] = self.worker_init
job_config["user_script"] = command
# Wrap the command
job_config["user_script"] = self.launcher(command,
tasks_per_node,
self.nodes_per_block)
logger.debug("Writing submit script")
self._write_submit_script(template_string, script_path, job_name, job_config)
if self.move_files:
logger.debug("moving files")
channel_script_path = self.channel.push_file(script_path, self.channel.script_dir)
else:
logger.debug("not moving files")
channel_script_path = script_path
if self.bsub_redirection:
cmd = "bsub < {0}".format(channel_script_path)
else:
cmd = "bsub {0}".format(channel_script_path)
retcode, stdout, stderr = super().execute_wait(cmd)
job_id = None
if retcode == 0:
for line in stdout.split('\n'):
if line.lower().startswith("job") and "is submitted to" in line.lower():
job_id = line.split()[1].strip('<>')
self.resources[job_id] = {'job_id': job_id, 'status': JobStatus(JobState.PENDING)}
else:
logger.warning("Submission of command to scale_out failed")
logger.error("Retcode:%s STDOUT:%s STDERR:%s", retcode, stdout.strip(), stderr.strip())
return job_id
|
Parsl/parsl
|
[
369,
114,
369,
333,
1476980871
] |
def get_metadata(self):
"""Returns the current metadata as a dictionary."""
|
googleapis/python-aiplatform
|
[
306,
205,
306,
52,
1600875819
] |
def exponential_draw(lambdax):
scale = 1.0 / lambdax
return exponential(scale=scale,size=None)
|
ThomasBrouwer/BNMTF
|
[
18,
6,
18,
1,
1438595414
] |
def test_on_device_via_string():
x = relay.Var("x")
call = relay.annotation.on_device(x, "cuda")
assert isinstance(call, relay.Call)
assert len(call.args) == 1
assert call.args[0] == x
assert call.attrs.virtual_device.device_type_int == 2 # ie kDLCUDA
assert call.attrs.virtual_device.virtual_device_id == 0
assert call.attrs.virtual_device.target is None
assert call.attrs.virtual_device.memory_scope == ""
assert call.attrs.constrain_body
assert not call.attrs.constrain_result
|
dmlc/tvm
|
[
9142,
2938,
9142,
595,
1476310828
] |
def test_on_device_invalid_device():
x = relay.Var("x")
pytest.raises(ValueError, lambda: relay.annotation.on_device(x, "bogus"))
|
dmlc/tvm
|
[
9142,
2938,
9142,
595,
1476310828
] |
def test_on_device_free():
x = relay.Var("x")
call = relay.annotation.on_device(x, "cuda", constrain_result=False, constrain_body=False)
assert call.attrs.virtual_device.device_type_int == -1 # ie kInvalidDeviceType
assert not call.attrs.constrain_body
assert not call.attrs.constrain_result
|
dmlc/tvm
|
[
9142,
2938,
9142,
595,
1476310828
] |
def tokenize(self, text):
raise NotImplementedError("Tokenizer must override tokenize() method")
|
google-research/google-research
|
[
27788,
6881,
27788,
944,
1538678568
] |
def __init__(self, use_stemmer=False):
"""Constructor for DefaultTokenizer.
Args:
use_stemmer: boolean, indicating whether Porter stemmer should be used to
strip word suffixes to improve matching.
"""
self._stemmer = porter.PorterStemmer() if use_stemmer else None
|
google-research/google-research
|
[
27788,
6881,
27788,
944,
1538678568
] |
def __init__(self, action: str = None) -> None:
super().__init__(prefix, action)
|
cloudtools/awacs
|
[
386,
98,
386,
14,
1364415387
] |
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
|
cloudtools/awacs
|
[
386,
98,
386,
14,
1364415387
] |
def RunSteps(api):
url = 'https://chromium.googlesource.com/chromium/src.git'
# git.checkout can optionally dump GIT_CURL_VERBOSE traces to a log file,
# useful for debugging git access issues that are reproducible only on bots.
curl_trace_file = None
if api.properties.get('use_curl_trace'):
curl_trace_file = api.path['start_dir'].join('curl_trace.log')
submodule_update_force = api.properties.get('submodule_update_force', False)
submodule_update_recursive = api.properties.get('submodule_update_recursive',
True)
# You can use api.git.checkout to perform all the steps of a safe checkout.
revision = (api.buildbucket.gitiles_commit.ref or
api.buildbucket.gitiles_commit.id)
retVal = api.git.checkout(
url,
ref=revision,
recursive=True,
submodule_update_force=submodule_update_force,
set_got_revision=api.properties.get('set_got_revision'),
curl_trace_file=curl_trace_file,
remote_name=api.properties.get('remote_name'),
display_fetch_size=api.properties.get('display_fetch_size'),
file_name=api.properties.get('checkout_file_name'),
submodule_update_recursive=submodule_update_recursive,
use_git_cache=api.properties.get('use_git_cache'),
tags=api.properties.get('tags'))
assert retVal == "deadbeef", (
"expected retVal to be %r but was %r" % ("deadbeef", retVal))
# count_objects shows number and size of objects in .git dir.
api.git.count_objects(
name='count-objects',
can_fail_build=api.properties.get('count_objects_can_fail_build'),
git_config_options={'foo': 'bar'})
# Get the remote URL.
api.git.get_remote_url(
step_test_data=lambda: api.raw_io.test_api.stream_output('foo'))
api.git.get_timestamp(test_data='foo')
# You can use api.git.fetch_tags to fetch all tags from the remote
api.git.fetch_tags(api.properties.get('remote_name'))
# If you need to run more arbitrary git commands, you can use api.git itself,
# which behaves like api.step(), but automatically sets the name of the step.
with api.context(cwd=api.path['checkout']):
api.git('status')
api.git('status', name='git status can_fail_build',
can_fail_build=True)
api.git('status', name='git status cannot_fail_build',
can_fail_build=False)
# You should run git new-branch before you upload something with git cl.
api.git.new_branch('refactor') # Upstream is origin/master by default.
# And use upstream kwarg to set up different upstream for tracking.
api.git.new_branch('feature', upstream='refactor')
# You can use api.git.rebase to rebase the current branch onto another one
api.git.rebase(name_prefix='my repo', branch='origin/master',
dir_path=api.path['checkout'],
remote_name=api.properties.get('remote_name'))
if api.properties.get('cat_file', None):
step_result = api.git.cat_file_at_commit(api.properties['cat_file'],
revision,
stdout=api.raw_io.output())
if 'TestOutput' in step_result.stdout:
pass # Success!
# Bundle the repository.
api.git.bundle_create(
api.path['start_dir'].join('all.bundle'))
|
endlessm/chromium-browser
|
[
21,
16,
21,
3,
1435959644
] |
def set_home(home):
""" Set Home """
# This module should be refactored into object to avoid the anti-pattern global statement
global CONFIG_PATH, CACHE_PATH, SHARE_PATH
CONFIG_PATH = os.path.join(home, "config")
CACHE_PATH = os.path.join(home, "cache")
SHARE_PATH = os.path.join(home, "share")
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def get_cache_path(*args):
"""
Get a config path to read or write some cached data
:param args: a list of subfolders. Those will be created when needed
"""
return get_path(CACHE_PATH, *args)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def get_path(*args):
""" Helper for get_*_path methods """
full_path = os.path.join(*args)
to_make = os.path.dirname(full_path)
mkdir(to_make, recursive=True)
full_path = to_native_path(full_path)
return full_path
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def mkdir(dest_dir, recursive=False):
""" Recursive mkdir (do not fail if file exists) """
try:
if recursive:
os.makedirs(dest_dir)
else:
os.mkdir(dest_dir)
except OSError as exc:
if exc.errno != 17:
raise
# Directory already exists -> no exception
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def write_file_if_different(data, out_path, mode="w"):
""" Write the data to out_path if the content is different """
try:
with open(out_path, "r") as outr:
out_prev = outr.read()
if out_prev == data:
ui.debug("skipping write to %s: same content" % (out_path))
return
except Exception:
pass
with open(out_path, mode) as out_file:
out_file.write(data)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def _copy_link(src, dest, quiet):
""" Copy Link """
if not os.path.islink(src):
raise Exception("%s is not a link!" % src)
target = os.readlink(src) # pylint:disable=no-member
# remove existing stuff
if os.path.lexists(dest):
rm(dest)
if sys.stdout.isatty() and not quiet:
print("-- Installing %s -> %s" % (dest, target))
to_make = os.path.dirname(dest)
mkdir(to_make, recursive=True)
os.symlink(target, dest) # pylint:disable=no-member
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def _handle_files(src, dest, root, files, filter_fun, quiet):
""" Helper function used by install() """
installed = list()
rel_root = os.path.relpath(root, src)
if rel_root == ".":
rel_root = ""
new_root = os.path.join(dest, rel_root)
for f in files:
if not filter_fun(os.path.join(rel_root, f)):
continue
fsrc = os.path.join(root, f)
fdest = os.path.join(new_root, f)
rel_path = os.path.join(rel_root, f)
if os.path.islink(fsrc):
mkdir(new_root, recursive=True)
_copy_link(fsrc, fdest, quiet)
installed.append(rel_path)
else:
if os.path.lexists(fdest) and os.path.isdir(fdest):
raise Exception("Expecting a file but found a directory: %s" % fdest)
if not quiet:
print("-- Installing %s" % fdest.encode('ascii', "ignore"))
mkdir(new_root, recursive=True)
# We do not want to fail if dest exists but is read only
# (following what `install` does, but not what `cp` does)
rm(fdest)
shutil.copy(fsrc, fdest)
installed.append(rel_path)
return installed
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def no_filter_fun(_unused):
""" Filter Function Always True """
return True
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def safe_copy(src, dest):
"""
Copy a source file to a destination but
do not overwrite dest if it is more recent than src
Create any missing directories when necessary
If dest is a directory, src will be copied inside dest.
"""
if os.path.isdir(dest):
dest = os.path.join(dest, os.path.basename(src))
if not up_to_date(dest, src):
shutil.copy(src, dest)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def copy_git_src(src, dest):
"""
Copy a source to a destination but only copy the files under version control.
Assumes that ``src`` is inside a git worktree
"""
process = subprocess.Popen(["git", "ls-files", "."], cwd=src,
stdout=subprocess.PIPE)
(out, _) = process.communicate()
for filename in out.splitlines():
src_file = os.path.join(src, filename.decode('ascii'))
dest_file = os.path.join(dest, filename.decode('ascii'))
install(src_file, dest_file, quiet=True)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def rmtree(path):
"""
shutil.rmtree() on steroids.
Taken from gclient source code (BSD license)
Recursively removes a directory, even if it's marked read-only.
shutil.rmtree() doesn't work on Windows if any of the files or directories
are read-only, which svn repositories and some .svn files are. We need to
be able to force the files to be writable (i.e., deletable) as we traverse
the tree.
Even with all this, Windows still sometimes fails to delete a file, citing
a permission error (maybe something to do with antivirus scans or disk
indexing). The best suggestion any of the user forums had was to wait a
bit and try again, so we do that too. It's hand-waving, but sometimes it
works. :/
On POSIX systems, things are a little bit simpler. The modes of the files
to be deleted doesn't matter, only the modes of the directories containing
them are significant. As the directory tree is traversed, each directory
has its mode set appropriately before descending into it. This should
result in the entire tree being removed, with the possible exception of
``path`` itself, because nothing attempts to change the mode of its parent.
Doing so would be hazardous, as it's not a directory slated for removal.
In the ordinary case, this is not a problem: for our purposes, the user
will never lack write permission on ``path``'s parent.
"""
if not os.path.exists(path):
return
if os.path.islink(path) or not os.path.isdir(path):
raise Exception('Called rmtree(%s) in non-directory' % path)
if sys.platform == 'win32':
# Some people don't have the APIs installed. In that case we'll do without.
win32api = None
win32con = None
try:
import win32api
import win32con
except ImportError:
pass
else:
# On POSIX systems, we need the x-bit set on the directory to access it,
# the r-bit to see its contents, and the w-bit to remove files from it.
# The actual modes of the files within the directory is irrelevant.
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
def remove(func, subpath):
""" Remove """
if sys.platform == 'win32':
os.chmod(subpath, stat.S_IWRITE)
if win32api and win32con:
win32api.SetFileAttributes(subpath, win32con.FILE_ATTRIBUTE_NORMAL)
try:
func(subpath)
except OSError as e:
if e.errno != errno.EACCES or sys.platform != 'win32':
raise
# Failed to delete, try again after a 100ms sleep.
time.sleep(0.1)
func(subpath)
for fn in os.listdir(path):
# If fullpath is a symbolic link that points to a directory, isdir will
# be True, but we don't want to descend into that as a directory, we just
# want to remove the link. Check islink and treat links as ordinary files
# would be treated regardless of what they reference.
fullpath = os.path.join(path, fn)
if os.path.islink(fullpath) or not os.path.isdir(fullpath):
remove(os.remove, fullpath)
else:
# Recurse.
rmtree(fullpath)
remove(os.rmdir, path)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def ls_r(directory):
"""
Returns a sorted list of all the files present in a directory,
relative to this directory.
For instance, with::
foo
|__ eggs
| |__ c
| |__ d
|__ empty
|__ spam
|__a
|__b
ls_r(foo) returns:
["eggs/c", "eggs/d", "empty/", "spam/a", "spam/b"]
"""
res = list()
for root, dirs, files in os.walk(directory):
new_root = os.path.relpath(root, directory)
if new_root == "." and not files:
continue
if new_root == "." and files:
res.extend(files)
continue
if not files and not dirs:
res.append(new_root + os.path.sep)
continue
for f in files:
res.append(os.path.join(new_root, f))
return sorted(res)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def to_posix_path(path, fix_drive=False):
"""
Returns a POSIX path from a DOS path
:param fix_drive: if True, will replace c: by /c/ (ala mingw)
"""
res = os.path.expanduser(path)
res = os.path.abspath(res)
res = path.replace(ntpath.sep, posixpath.sep)
if fix_drive:
(drive, rest) = os.path.splitdrive(res)
letter = drive[0]
return "/" + letter + rest
return res
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def to_native_path(path, normcase=True):
"""
Return an absolute, native path from a path,
:param normcase: make sure the path is all lower-case on
case-insensitive filesystems
"""
path = os.path.expanduser(path)
if normcase:
path = os.path.normcase(path)
path = os.path.normpath(path)
path = os.path.abspath(path)
path = os.path.realpath(path)
if sys.platform.startswith("win"):
path = to_dos_path(path)
return path
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def is_empty(path):
""" Check if a path is empty """
return os.listdir(path) == list()
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def __init__(self, name="tmp"):
""" TempDir Init """
self._temp_dir = tempfile.mkdtemp(prefix=name + "-")
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def __exit__(self, _type, value, tb):
""" Exit """
if os.environ.get("DEBUG"):
if tb is not None:
print("==")
print("Not removing ", self._temp_dir)
print("==")
return
rm(self._temp_dir)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def change_cwd(directory):
""" Change the current working dir """
if not os.path.exists(directory):
mess = "Cannot change working dir to '%s'\n" % directory
mess += "This path does not exist"
raise Exception(mess)
previous_cwd = os.getcwd()
os.chdir(directory)
yield
os.chdir(previous_cwd)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def broken_symlink(file_path):
""" Returns True if the file is a broken symlink """
return os.path.lexists(file_path) and not os.path.exists(file_path)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def is_executable_binary(file_path):
"""
Returns true if the file:
* is executable
* is a binary (i.e not a script)
"""
if not os.path.isfile(file_path):
return False
if not os.access(file_path, os.X_OK):
return False
return is_binary(file_path)
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def __init__(self, path):
""" Preserve file metadata of 'path' """
self.path = path
self.time = None
self.mode = None
|
aldebaran/qibuild
|
[
67,
45,
67,
42,
1297185497
] |
def extractRoontalesCom(item):
'''
Parser for 'roontales.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
[
191,
16,
191,
3,
1437712243
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.