message
stringlengths
13
484
diff
stringlengths
38
4.63k
fix: to_tensor applied on a jt.Var Allow jt.transform.to_tensor applied on a jt.Var (do nothing but return the inputs).
@@ -408,6 +408,9 @@ def to_tensor(pic): img = Image.open(...) img_ = transform.to_tensor(img) """ + if isinstance(pic, jt.Var): + return pic + if isinstance(pic, tuple): # try convert ten crop tuple pic = ( to_tensor(pic) for p in pic )
Add -ate words (Issue As mentioned, following words added: Authenticate Automate Assimilate Associate Followed separate parts-of-speech guidlines for Associate. Assimilate had tailing semicolon removed as it messes up with JSON parsing.
"word": "Assimilate", "definitions": [ "To take in (information, ideas, or culture) and understand fully." - "To cause (something) to resemble; liken." + "To cause (something) to resemble." ], "parts-of-speech": "Verb" }
Update default size to 20 Tib Now in managed service provider add-on of stagging and production the only available storage cluster size is 20 TiB .
@@ -206,7 +206,7 @@ ENV_DATA: deploy_acm_hub_cluster: false #Managed service - Managed StorageCluster size in TiB - size: '4' + size: '20' # This section is related to upgrade UPGRADE:
Python API: enhance array bindings to ease refactoring In particular, isolate what can be shared code-generation-wise from what cannot. TN:
% if cls.element_type()._exposed or ctx.library_fields_all_public: <% type_name = cls.name().camel - element_type = pyapi.type_internal_name(cls.element_type()) + c_element_type = pyapi.type_internal_name(cls.element_type()) %> @@ -18,7 +18,7 @@ class ${type_name}(object): self._length = c_value.contents.n items_addr = _field_address(c_value.contents, 'items') - items = ${element_type}.from_address(items_addr) + items = self._c_element_type.from_address(items_addr) self._items = ctypes.pointer(items) if inc_ref: @@ -47,7 +47,10 @@ class ${type_name}(object): elif not (0 <= key < self._length): raise IndexError() - item = self._items[key] + return self._unwrap_item(self._items[key]) + + @staticmethod + def _unwrap_item(item): ## In the case of array of Structure instances, array[index] returns a ## reference to the record. Thus, in order to keep memory safety, we ## must copy the record itself so that the array can be deallocated @@ -59,10 +62,12 @@ class ${type_name}(object): return ${pyapi.wrap_value('item', elt_type)} % endif + _c_element_type = ${c_element_type} + class _c_struct(ctypes.Structure): _fields_ = [('n', ctypes.c_int), ('ref_count', ctypes.c_int), - ('items', ${element_type} * 1)] + ('items', ${c_element_type} * 1)] _c_type = ctypes.POINTER(_c_struct)
readd the BOM stripping from source for docutils The case is tested with the testing/examples/good/bom.rst file.
@@ -170,6 +170,12 @@ def check_source( string_io = io.StringIO() + # This is a hack to avoid false positive from docutils (#23). docutils mistakes BOMs for actual + # visible letters. This results in the "underline too short" warning firing. + # This is tested in the CLI integration tests with the `testing/examples/good/bom.rst` file. + with contextlib.suppress(UnicodeError): + source = source.encode("utf-8").decode("utf-8-sig") + with contextlib.suppress(docutils.utils.SystemMessage, AttributeError): # Sphinx will sometimes throw an `AttributeError` trying to access # "self.state.document.settings.env". Ignore this for now until we
Change missing memcache chunk error to warning Review-Url:
@@ -265,14 +265,14 @@ def get_cached_swarming_bot_zip(version): for idx, f in enumerate(futures): chunk = f.get_result() if chunk is None: - logging.debug('bot code %s was missing chunk %d/%d', - version, idx, len(futures)) + logging.debug( + 'bot code %s was missing chunk %d/%d', version, idx, len(futures)) missing += 1 else: content += chunk if missing: - logging.error('bot code %s was missing %d/%d chunks', - version, missing, len(futures)) + logging.warning( + 'bot code %s was missing %d/%d chunks', version, missing, len(futures)) return None h = hashlib.sha256() h.update(content)
port ge changes from bert/pytorch_fusion Summary: Pull Request resolved:
-#include <memory> -#include <torch/csrc/jit/runtime/graph_executor.h> -#include <torch/csrc/jit/jit_log.h> #include <torch/csrc/jit/ir/alias_analysis.h> +#include <torch/csrc/jit/jit_log.h> #include <torch/csrc/jit/passes/constant_propagation.h> #include <torch/csrc/jit/passes/guard_elimination.h> #include <torch/csrc/jit/passes/peephole.h> +#include <torch/csrc/jit/runtime/graph_executor.h> +#include <memory> #include <unordered_set> namespace torch { @@ -12,8 +12,7 @@ namespace jit { struct GuardElimination { GuardElimination(std::shared_ptr<Graph> graph) - : graph_(std::move(graph)), - aliasDb_(std::make_unique<AliasDb>(graph_)) {} + : graph_(std::move(graph)), aliasDb_(std::make_unique<AliasDb>(graph_)) {} void run() { const size_t MAX_ATTEMPTS = 5; @@ -123,8 +122,11 @@ struct GuardElimination { auto it = guard; while (it != output) { if (it->kind() != prim::Guard && it->kind() != prim::Constant) { - GRAPH_DEBUG("found an unexpected node ", *it, - " while trying to eliminate ", *guard); + GRAPH_DEBUG( + "found an unexpected node ", + *it, + " while trying to eliminate ", + *guard); return false; } it = it->prev(); @@ -160,7 +162,10 @@ struct GuardElimination { // `checkInputs` check the invariants specified in `removableGuard` // on inputs to `n`. The invariants must hold, or an input must // be a `prim::Constant` or be included as an exception in `except` - bool checkInputs(Node *n, const std::unordered_set<size_t> &except, bool allow_numbers) { + bool checkInputs( + Node* n, + const std::unordered_set<size_t>& except, + bool allow_numbers) { bool all_inputs_guarded = true; size_t i = 0; for (auto input : n->inputs()) { @@ -173,7 +178,10 @@ struct GuardElimination { input->node()->kind() != prim::Guard || input->type()->expect<TensorType>()); } else { - GRAPH_DEBUG("input ", input->debugName(), " isn't guarded, type ", + GRAPH_DEBUG( + "input ", + input->debugName(), + " isn't guarded, type ", *input->type()); all_inputs_guarded = false; break; @@ -211,7 +219,6 @@ private: // returns // false or inputs are `prim::Constant` bool removableGuard(Node* n) { - const static auto no_exceptions = std::unordered_set<size_t>{}; switch (n->kind()) { case aten::add: @@ -271,6 +278,14 @@ private: case aten::reciprocal: case aten::addcmul: case aten::where: + case aten::_cast_Float: + case aten::_sigmoid_backward: + case aten::_tanh_backward: + case aten::__and__: + case aten::__or__: + case aten::__xor__: + case aten::__lshift__: + case aten::__rshift__: return checkInputs(n, no_exceptions, true); case aten::avg_pool2d: return checkInputs(n, no_exceptions, false); @@ -366,7 +381,6 @@ private: static std::unordered_set<Symbol> simple_ops_; }; - void EliminateRedundantGuards(std::shared_ptr<Graph> graph) { GuardElimination ge(std::move(graph)); ge.run();
Update localftp.py Improve perf for float and profile localftp index fetchers
@@ -651,7 +651,8 @@ class IndexFetcher_wmo(LocalFTPArgoIndexFetcher): il_read, il_loaded, il_this = 0, 0, 0 for line in index: il_this = il_loaded - if re.search("/%i/" % wmo, line.split(',')[0]): + # if re.search("/%i/" % wmo, line.split(',')[0]): + if "/%i/" % wmo in line: # much faster than re # Search for the wmo at the beginning of the file name under: /<dac>/<wmo>/profiles/ results += line il_loaded += 1 @@ -682,7 +683,8 @@ class IndexFetcher_wmo(LocalFTPArgoIndexFetcher): il_read, il_loaded, il_this, moveon = 0, 0, 0, True for line in index_file: il_this = il_loaded - if re.search("/%i/" % wmo, line.split(',')[0]): + # if re.search("/%i/" % wmo, line.split(',')[0]): + if "/%i/" % wmo in line: # much faster than re results += line il_loaded += 1 if il_this == il_loaded and il_this > 0: @@ -692,10 +694,12 @@ class IndexFetcher_wmo(LocalFTPArgoIndexFetcher): # Then look for the profile: if results: def search_this(this_line): - return np.any([re.search("%0.3d.nc" % c, this_line.split(',')[0]) for c in cyc]) + # return np.any([re.search("%0.3d.nc" % c, this_line.split(',')[0]) for c in cyc]) + return np.any(["%0.3d.nc" % c in this_line for c in cyc]) if np.all(cyc >= 1000): def search_this(this_line): - return np.any([re.search("%0.4d.nc" % c, this_line.split(',')[0]) for c in cyc]) + # return np.any([re.search("%0.4d.nc" % c, this_line.split(',')[0]) for c in cyc]) + return np.any(["%0.4d.nc" % c in this_line for c in cyc]) il_loaded, cyc_results = 0, "" for line in results.split(): if search_this(line):
Update malicious_php.txt ```/assets/images/accesson.php``` is the main sign of compromised MODX. ```/assets/images/customizer.php``` is the secondary one.
@@ -499,3 +499,9 @@ westflies.com/api/api.php # Reference: https://twitter.com/MalwarePatrol/status/1159617579469742082 /835pnjmr1w4p5ypvgcymfkkx.php + +# Reference: https://forums.modx.com/thread/102644/evo-1-2-1-hacked-again-and-again +# Generic trails for compromised MODX CMS-es + +/assets/images/accesson.php +/assets/images/customizer.php
Fix container priority for docker compose recipe Cache must be running *before* the server Server must be running *before* the worker
@@ -57,6 +57,18 @@ services: - inventree_data:/var/lib/postgresql/data/ restart: unless-stopped + # redis acts as database cache manager + inventree-cache: + container_name: inventree-cache + image: redis:7.0 + depends_on: + - inventree-db + env_file: + - .env + ports: + - ${INVENTREE_CACHE_PORT:-6379}:6379 + restart: unless-stopped + # InvenTree web server services # Uses gunicorn as the web server inventree-server: @@ -67,6 +79,7 @@ services: - 8000 depends_on: - inventree-db + - inventree-cache env_file: - .env volumes: @@ -81,7 +94,6 @@ services: image: inventree/inventree:stable command: invoke worker depends_on: - - inventree-db - inventree-server env_file: - .env @@ -113,18 +125,6 @@ services: - inventree_data:/var/www restart: unless-stopped - # redis acts as database cache manager - inventree-cache: - container_name: inventree-cache - image: redis:7.0 - depends_on: - - inventree-db - env_file: - - .env - ports: - - ${INVENTREE_CACHE_PORT:-6379}:6379 - restart: unless-stopped - volumes: # NOTE: Change /path/to/data to a directory on your local machine # Persistent data, stored external to the container(s)
Add extra split identifier Added an extra split identifier to make sure presence of extras does not influence determining whether clearml is included in requirements.
@@ -255,7 +255,7 @@ class CreateAndPopulate(object): for line in reqs: if line.strip().startswith('#'): continue - package = reduce(lambda a, b: a.split(b)[0], "#;@=~<>", line).strip() + package = reduce(lambda a, b: a.split(b)[0], "#;@=~<>[", line).strip() if package == 'clearml': clearml_found = True break
Fix exception when using non-device parameters in DeviceCommands This happens e.g. when starting udiskie with: udiskie -ANT --notify-command "echo {event}" Regression introduced by: Commit: Subject: Remove deprecated syntax for positional parameter in format strings
@@ -192,6 +192,7 @@ class DeviceCommand: for text, kwd, spec, conv in formatter.parse(arg): if kwd is None: continue + if kwd in DeviceFilter.VALID_PARAMETERS: self.used_attrs.add(kwd) if kwd not in DeviceFilter.VALID_PARAMETERS and \ kwd not in self.extra:
ExamplesIterable fixes ex iterable fixes
@@ -108,10 +108,10 @@ class ExamplesIterable(_BaseExamplesIterable): def shuffle_data_sources(self, generator: np.random.Generator) -> "ExamplesIterable": return ShardShuffledExamplesIterable(self.generate_examples_fn, self.kwargs, generator) - def shard_data_sources(self, shard_idx: int) -> "MappedExamplesIterable": + def shard_data_sources(self, shard_idx: int) -> "ExamplesIterable": """Keep only the requested shard.""" kwargs_with_requested_data_source = _split_gen_kwargs(self.kwargs, max_num_jobs=self.n_shards)[shard_idx] - yield from self.generate_examples_fn(**kwargs_with_requested_data_source) + return ExamplesIterable(self.generate_examples_fn, kwargs_with_requested_data_source) @property def n_shards(self) -> int: @@ -129,7 +129,7 @@ class ShardShuffledExamplesIterable(ExamplesIterable): kwargs_with_shuffled_shards = _shuffle_gen_kwargs(rng, self.kwargs) yield from self.generate_examples_fn(**kwargs_with_shuffled_shards) - def shard_data_sources(self, shard_idx: int) -> "MappedExamplesIterable": + def shard_data_sources(self, shard_idx: int) -> "ExamplesIterable": """Keep only the requested shard.""" rng = deepcopy(self.generator) kwargs_with_shuffled_shards = _shuffle_gen_kwargs(rng, self.kwargs)
Windows: Fix, bist_nuitka for nested namespaces was broken * The fixup for the filename was no longer necessary after the usage of "asPath" on the module name and even causing an issue with copying the file to itself, which doesn't work on Windows.
@@ -28,7 +28,6 @@ import wheel.bdist_wheel # pylint: disable=I0021,import-error,no-name-in-module from nuitka.tools.testing.Common import my_print from nuitka.utils.Execution import check_call -from nuitka.utils.FileOperations import copyTree, removeDirectory def setupNuitkaDistutilsCommands(dist, keyword, value): @@ -224,9 +223,10 @@ class build(distutils.command.build.build): command.append(main_filename) - # added for clarity - my_print("Building: %s" % to_build, style="yellow") + # Adding traces for clarity, TODO: color scheme used is not really clear. + my_print("Building: %s with %r" % (to_build, command), style="yellow") check_call(command, cwd=build_lib) + my_print("Finished compilation of %s." % to_build, style="yellow") for root, _, filenames in os.walk(build_lib): for filename in filenames: @@ -235,17 +235,6 @@ class build(distutils.command.build.build): if fullpath.lower().endswith((".py", ".pyw", ".pyc", ".pyo")): os.unlink(fullpath) - # If the Python module has more than one parent package (e.g. - # 'a.b.mod'), the compiled module will be in 'a.b/mod.so'. Move it - # to 'a/b/mod.so', to make imports work. - if package and "." in package: - compiled_package_path = os.path.join(build_lib, package.asPath()) - assert os.path.isdir(compiled_package_path), compiled_package_path - - fixed_package_path = os.path.join(build_lib, package.asPath()) - copyTree(compiled_package_path, fixed_package_path) - removeDirectory(compiled_package_path, ignore_errors=False) - os.chdir(old_dir) self.build_lib = build_lib
Fix the bug when using gdbserver If we execute the `monitor` command in `is_debug_probe()`, the GDB with normal GDBserver will stuck after first `continue`. To avoid this, we check if `monitor help` showed that we are using GDBserver.
@@ -25,6 +25,21 @@ def is_debug_probe(): Returns True if the target is a debug probe for an embedded device. Currently detects the Black Magic Probe and the SEGGER J-Link GDB Server. """ + try: + help_output = gdb.execute("monitor help", to_string=True) + if ( + "GDBserver" in help_output + and "Black Magic Probe" not in help_output + and "SEGGER J-Link GDB Server" not in help_output + ): + # We can't directly use the `monitor` command if we are using normal GDBserver, because the `monitor` command will cause GDBserver stuck. + # So we check if we are using GDBserver by checking the output of `monitor help`. + # TODO: Does this problem only occur with normal GDBserver? + # If not, we should find a better way to check what remote server we are using. + return False + except gdb.error: + # Now we check if we are using Black Magic Probe or the SEGGER J-Link GDB Server + pass try: monitor_output = gdb.execute("monitor", to_string=True) except gdb.error:
Fix BQL error on holdings by account page Commit ("Use better default order for holdings") introduced a BQL error on the "Holdings by Account" page.
cost(sum(position)) as book_value, value(sum(position)) as market_value WHERE account_sortkey(account) ~ "^[01]" - GROUP BY account, cost_currency, account_sortkey(account) + GROUP BY account, cost_currency, account_sortkey(account), currency ORDER BY account_sortkey(account), currency', 'currency': 'SELECT units(sum(position)) as units,
Update README.md Add a "note" on font awesome usage.
@@ -30,6 +30,8 @@ Supported FontAwesome version: 4 (free version of 5 doesn't include some of the Explicitly unsupported Python versions: 3.2 (missing unicode literals) +:information_source: The ![Font Awesome](https://fontawesome.com/) is required for all themes that contain icons (because that is the font that includes these icons). Please refer to your distribution's package management on how to install them, or get them from their website directly. Also, please note that Font Awesome removed some icons used by `bumblebee-status` from the free set in version 5, so if possible, stick with 4. + Example usage: ```
Delete some unused variables. Summary: Pull Request resolved:
@@ -72,11 +72,10 @@ Tensor prelu_cuda(const Tensor& self, const Tensor& weight_) { AT_CHECK(input_ndim > 0, "Not allow zero-dim input tensor."); int64_t channel_size = 1; // channel_size default to 1 - int64_t input_dim0_size = 1, input_stride0 = 1, input_stride1 = 1; + int64_t input_stride0 = 1, input_stride1 = 1; if (input_ndim > 1) { channel_size = input.size(1); // channel is the 2nd dim of input - input_dim0_size = input.size(0); input_stride0 = strides[0]; input_stride1 = strides[1]; } @@ -189,11 +188,10 @@ std::tuple<Tensor, Tensor> prelu_backward_cuda(const Tensor& grad_out_, const Te AT_CHECK(input_ndim > 0, "Not allow zero-dim input tensor."); int64_t channel_size = 1; // channel_size default to 1 - int64_t input_dim0_size = 1, input_stride0 = 1, input_stride1 = 1; + int64_t input_stride0 = 1, input_stride1 = 1; if (input_ndim > 1) { channel_size = input.size(1); // channel is the 2nd dim of input - input_dim0_size = input.size(0); input_stride0 = strides[0]; input_stride1 = strides[1]; }
add filter for boolean values in dict2string so "no_rot=True" works (issue
@@ -33,6 +33,9 @@ def _dict2string(projparams): # convert a dict to a proj4 string. pjargs = [] for key, value in projparams.items(): + if type(value) == bool and value == True: + pjargs.append("+" + key + " ") + else: pjargs.append("+" + key + "=" + str(value) + " ") return "".join(pjargs)
Stats app : Fix bugs with `-preCache` argument Use script context Respect `-frames` argument
@@ -385,17 +385,21 @@ class stats( Gaffer.Application ) : IECore.msg( IECore.Msg.Level.Error, "stats", "Scene \"%s\" does not exist" % args["scene"].value ) return - if args["preCache"].value : - GafferSceneTest.traverseScene( scene ) + def computeScene() : - memory = _Memory.maxRSS() - with _Timer() as sceneTimer : - with self.__performanceMonitor or _NullContextManager(), self.__contextMonitor or _NullContextManager() : with Gaffer.Context( script.context() ) as context : for frame in self.__frames( script, args ) : context.setFrame( frame ) GafferSceneTest.traverseScene( scene ) + if args["preCache"].value : + computeScene() + + memory = _Memory.maxRSS() + with _Timer() as sceneTimer : + with self.__performanceMonitor or _NullContextManager(), self.__contextMonitor or _NullContextManager() : + computeScene() + self.__timers["Scene generation"] = sceneTimer self.__memory["Scene generation"] = _Memory.maxRSS() - memory @@ -416,17 +420,21 @@ class stats( Gaffer.Application ) : IECore.msg( IECore.Msg.Level.Error, "stats", "Image \"%s\" does not exist" % args["image"].value ) return - if args["preCache"].value : - GafferImageTest.processTiles( image ) + def computeImage() : - memory = _Memory.maxRSS() - with _Timer() as imageTimer : - with self.__performanceMonitor or _NullContextManager(), self.__contextMonitor or _NullContextManager() : with Gaffer.Context( script.context() ) as context : for frame in self.__frames( script, args ) : context.setFrame( frame ) GafferImageTest.processTiles( image ) + if args["preCache"].value : + computeImage() + + memory = _Memory.maxRSS() + with _Timer() as imageTimer : + with self.__performanceMonitor or _NullContextManager(), self.__contextMonitor or _NullContextManager() : + computeImage() + self.__timers["Image generation"] = imageTimer self.__memory["Image generation"] = _Memory.maxRSS() - memory
Listing S3 bucket objects returns Object instead of ObjectSummary. Fixes
@@ -830,9 +830,7 @@ class AWSBucketContainer(BaseBucketContainer): else: # pylint:disable=protected-access boto_objs = self.bucket._bucket.objects.all() - objects = [AWSBucketObject(self._provider, obj) - for obj in boto_objs] - + objects = [self.get(obj.key) for obj in boto_objs] return ClientPagedResultList(self._provider, objects, limit=limit, marker=marker)
Pay attention to function substitution done by output staging Prior to this, staging providers were given the opportunity to replace the app function with a staging wrapper for stageout, but that staging wrapper was then discarded.
@@ -686,7 +686,7 @@ class DataFlowKernel(object): # Transform remote input files to data futures args, kwargs, func = self._add_input_deps(executor, args, kwargs, func) - self._add_output_deps(executor, args, kwargs, app_fu, func) + func = self._add_output_deps(executor, args, kwargs, app_fu, func) task_def.update({ 'args': args,
Fix test_get_field to not depend on platform-specific path handling This test was failing on Windows due to either abspath() or expanduser() normalizing paths by changing slashes to backslashes (and sometimes adding a drive letter): E AssertionError: 'D:\\path\\to\\image.jpg' != '/path/to/image.jpg'
@@ -768,13 +768,13 @@ class SampleTests(unittest.TestCase): @drop_datasets def test_get_field(self): - filepath = "/path/to/image.jpg" - sample = fo.Sample(filepath=filepath) + field_value = "custom_value" + sample = fo.Sample(filepath="/path/to/image.jpg", field1=field_value) # get valid - self.assertEqual(sample.get_field("filepath"), filepath) - self.assertEqual(sample["filepath"], filepath) - self.assertEqual(sample.filepath, filepath) + self.assertEqual(sample.get_field("field1"), field_value) + self.assertEqual(sample["field1"], field_value) + self.assertEqual(sample.field1, field_value) # get missing with self.assertRaises(AttributeError):
put back the method all_modules even if no longer used by `_read_aliases` it is still needed in other parts of the program.
@@ -16,6 +16,16 @@ try: except ImportError: from configparser import RawConfigParser +def all_modules(): + """Return a list of available modules""" + result = [] + path = os.path.dirname(bumblebee.modules.__file__) + for mod in [name for _, name, _ in pkgutil.iter_modules([path])]: + result.append({ + "name": mod + }) + return result + class Module(object): """Module instance base class
Adapt bindep ubuntu-jammy This is needed to make openstack-tox-py310 job to install deps properly.
@@ -39,7 +39,6 @@ postgresql postgresql-client [platform:dpkg] postgresql-devel [platform:rpm test] postgresql-server [platform:rpm] -python-dev [platform:dpkg test] python3-all [platform:dpkg] python3-all-dev [platform:dpkg] python3 [platform:rpm test]
Fix history's marker event * now that the event-as-dict is part of the public API, normalize its "event_id" key * fix failed state name
@@ -453,11 +453,11 @@ class History(object): 'name': event.marker_name, 'state': event.state, 'details': getattr(event, 'details', None), - 'recorded_event_id': event.id, - 'recorded_event_timestamp': event.timestamp, + 'event_id': event.id, + 'timestamp': event.timestamp, } self._markers.setdefault(event.marker_name, []).append(marker) - elif event.state == 'failed': + elif event.state == 'record_failed': marker = { 'type': 'marker', 'name': event.marker_name,
http: Handle responses when no receiver exists If a request was made with HttpConnection and cancelled before the response was received, a response could be received with no apparent receiver yielding an exception. This commit fixes that.
@@ -330,11 +330,10 @@ class HttpConnection(asyncio.Protocol): # Dispatch message to first receiver self._responses.put(parsed) - event = self._requests.pop() - if event: - event.set() + if self._requests: + self._requests.pop().set() else: - _LOGGER.warning("Got response without having a request") + _LOGGER.warning("Got response without having a request: %s", parsed) @staticmethod def error_received(exc) -> None:
Update common/chromium/blink_fix_prototype_assert.patch The Great Blink mv for source files, part 2.
-diff --git a/third_party/WebKit/Source/platform/bindings/V8ObjectConstructor.cpp b/third_party/WebKit/Source/platform/bindings/V8ObjectConstructor.cpp +cdiff --git a/third_party/blink/renderer/platform/bindings/v8_object_constructor.cc b/third_party/blink/renderer/platform/bindings/v8_object_constructor.cc index aedc832..8c26681 100644 ---- a/third_party/WebKit/Source/platform/bindings/V8ObjectConstructor.cpp -+++ b/third_party/WebKit/Source/platform/bindings/V8ObjectConstructor.cpp +--- a/third_party/blink/renderer/platform/bindings/v8_object_constructor.cc ++++ b/third_party/blink/renderer/platform/bindings/v8_object_constructor.cc @@ -94,8 +94,10 @@ v8::Local<v8::Function> V8ObjectConstructor::CreateInterfaceObject( bool get_prototype_value = interface_object->Get(context, V8AtomicString(isolate, "prototype"))
Use consistent capitilazation for OAuth(2) Requested by:
@@ -237,7 +237,7 @@ Development callback (redirect) URL Doximity -------- -Doximity Oauth2 implementation documentation +Doximity OAuth2 implementation documentation https://www.doximity.com/developers/documentation#oauth Request API keys here @@ -760,7 +760,7 @@ fetching the access token:: App registration (get your key and secret here) https://www.linkedin.com/secure/developer?newapp= -Authorized Redirect URLs (Oauth2) +Authorized Redirect URLs (OAuth2) ********************************* Add any you need (up to 200) consisting of: @@ -771,7 +771,7 @@ For example when using the built-in django server and default settings: http://localhost:8000/accounts/linkedin_oauth2/login/callback/ -Development "Accept" and "Cancel" redirect URL (Oauth 1.0a) +Development "Accept" and "Cancel" redirect URL (OAuth 1.0a) *********************************************************** Leave the OAuth1 redirect URLs empty.
issue / [stream-refactor]: fix close/poller deregister crash on OSX See source comment.
@@ -1628,12 +1628,17 @@ class Protocol(object): self.stream.on_disconnect(broker) def on_disconnect(self, broker): + # Normally both sides an FD, so it is important that tranmit_side is + # deregistered from Poller before closing the receive side, as pollers + # like epoll and kqueue unregister all events on FD close, causing + # subsequent attempt to unregister the transmit side to fail. LOG.debug('%r: disconnecting', self) - if self.stream.receive_side: broker.stop_receive(self.stream) - self.stream.receive_side.close() if self.stream.transmit_side: broker._stop_transmit(self.stream) + + self.stream.receive_side.close() + if self.stream.transmit_side: self.stream.transmit_side.close()
DX Beta Changes force:open:display (used to be :describe) updated JSON response parsing
@@ -616,8 +616,8 @@ class ScratchOrgConfig(OrgConfig): self.logger.info('Getting scratch org info from Salesforce DX') - # Call force:org:open and parse output to get instance_url and access_token - command = 'sfdx force:org:describe -u {} --json'.format(self.username) + # Call force:org:display and parse output to get instance_url and access_token + command = 'sfdx force:org:display -u {} --json'.format(self.username) p = sarge.Command(command, stdout=sarge.Capture(buffer_size=-1)) p.run() @@ -646,15 +646,14 @@ class ScratchOrgConfig(OrgConfig): ''.join(stdout_list), ) ) - - org_id = org_info['accessToken'].split('!')[0] + org_id = org_info['result']['accessToken'].split('!')[0] self._scratch_info = { - 'instance_url': org_info['instanceUrl'], - 'access_token': org_info['accessToken'], + 'instance_url': org_info['result']['instanceUrl'], + 'access_token': org_info['result']['accessToken'], 'org_id': org_id, - 'username': org_info['username'], - 'password': org_info.get('password',None), + 'username': org_info['result']['username'], + 'password': org_info['result'].get('password',None), } self.config.update(self._scratch_info)
CompileCtx.struct_types: minor refactoring TN:
@@ -888,10 +888,17 @@ class CompileCtx(object): # generated code. We're also putting env_metadata and env_element in # the beginning and in the right dependency order (the metadata type # before the env element type). + # # TODO: Using a dependency order topological sort wouldn't hurt at # some point. - from langkit.compiled_types import StructMetaclass, T + + from langkit.compiled_types import StructMetaclass + if self._struct_types: + # TODO: A better solution at some point would be having a + # "freezable list" for struct_types (and every list of types for + # that matter) and raising an error if some code tries to add to it + # after the freeze point. assert ( len(self._struct_types) == len(StructMetaclass.struct_types) ), ( @@ -899,18 +906,15 @@ class CompileCtx(object): "were added" ) - # TODO: A better solution at some point would be having a - # "freezable list" for struct_types (and every list of types for - # that matter) and raising an error if some code tries to add to it - # after the freeze point. else: env_element = StructMetaclass.root_grammar_class.env_el() + env_md = StructMetaclass.env_metadata self._struct_types = [ t for t in StructMetaclass.struct_types - if t not in [StructMetaclass.env_metadata, env_element] + if t not in [env_md, env_element] ] self._struct_types.insert(0, env_element) - self._struct_types.insert(0, T.env_md) + self._struct_types.insert(0, env_md) return self._struct_types
Documentation for postgresql default port * Documentation for postgresql default port A port is necessary for postgresql config. Add postgresql default port 5432 in documentation for LNBITS_DATABASE_URL config * Update docs/guide/installation.md
@@ -220,8 +220,8 @@ You need to edit the `.env` file. ```sh # add the database connection string to .env 'nano .env' LNBITS_DATABASE_URL= -# postgres://<user>:<myPassword>@<host>/<lnbits> - alter line bellow with your user, password and db name -LNBITS_DATABASE_URL="postgres://postgres:postgres@localhost/lnbits" +# postgres://<user>:<myPassword>@<host>:<port>/<lnbits> - alter line bellow with your user, password and db name +LNBITS_DATABASE_URL="postgres://postgres:postgres@localhost:5432/lnbits" # save and exit ```
HA: fix injection of certificate in haproxy container Injection of certificate in pacemaker-managed haproxy [1] is never exercised due to a bad parsing of container name vs container id. Closes-Bug: [1]
@@ -175,7 +175,7 @@ outputs: - name: copy certificate, chgrp, restart haproxy shell: | set -e - if echo {{ item }} | grep -q "^haproxy-bundle"; then + if {{ container_cli }} ps -f "id={{ item }}" --format "{{ '{{' }}.Names{{ '}}' }}" | grep -q "^haproxy-bundle"; then tar -c {{ cert_path }} | {{container_cli}} exec -i {{ item }} tar -C / -xv else {{ container_cli }} cp {{ cert_path }} {{ item }}:{{ cert_path }}
Remove an import hack from the base Dockerfile. No longer needed as App Engine SDK is removed.
@@ -67,10 +67,6 @@ RUN curl -sSO https://dl.google.com/cloudagents/install-logging-agent.sh && \ sed -i 's/flush_interval 5s/flush_interval 60s/' /etc/google-fluentd/google-fluentd.conf COPY clusterfuzz-fluentd.conf /etc/google-fluentd/config.d/clusterfuzz.conf -# Hack to force google to be a namespace package. -# https://github.com/google/protobuf/issues/1296#issuecomment-264264926 -RUN echo "import google; import pkgutil; pkgutil.extend_path(google.__path__, google.__name__)" > /usr/local/lib/python2.7/dist-packages/gae.pth - # Common environment variables. ENV USER=clusterfuzz ENV INSTALL_DIRECTORY /mnt/scratch0
update kpt and action data collector; test=document_fix
@@ -86,9 +86,10 @@ class DataCollector(object): if attr_res: self.collector[ids]["attrs"].append(attr_res['output'][idx]) if kpt_res: - self.collector[ids]["kpts"].append(kpt_res['keypoint'][idx]) + self.collector[ids]["kpts"].append( + [kpt_res['keypoint'][0][idx], kpt_res['keypoint'][1][idx]]) if action_res: - self.collector[ids]["actions"].append(action_res['output'][idx]) + self.collector[ids]["actions"].append(action_res[idx + 1]) else: # action model generate result per X frames, Not available every frames self.collector[ids]["actions"].append(None)
Nuke: removing obsolete code fixing family to families as Write is stored there in avalon data on node
@@ -836,29 +836,6 @@ def check_subsetname_exists(nodes, subset_name): if subset_name in read_avalon_data(n).get("subset", "")), False) - -def get_render_path(node): - ''' Generate Render path from presets regarding avalon knob data - ''' - avalon_knob_data = read_avalon_data(node) - data = {'avalon': avalon_knob_data} - - nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], - plugin_name=avalon_knob_data["creator"], - subset=avalon_knob_data["subset"] - ) - host_name = os.environ.get("AVALON_APP") - - data.update({ - "app": host_name, - "nuke_imageio_writes": nuke_imageio_writes - }) - - anatomy_filled = format_anatomy(data) - return anatomy_filled["render"]["path"].replace("\\", "/") - - def format_anatomy(data): ''' Helping function for formatting of anatomy paths @@ -2185,15 +2162,14 @@ def get_write_node_template_attr(node): avalon_knob_data = read_avalon_data(node) # get template data nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["family"], + node_class=avalon_knob_data["families"], plugin_name=avalon_knob_data["creator"], subset=avalon_knob_data["subset"] ) + # collecting correct data - correct_data = OrderedDict({ - "file": get_render_path(node) - }) + correct_data = OrderedDict() # adding imageio knob presets for k, v in nuke_imageio_writes.items():
Completing functionality for enum.auto Full context: Closes python/mypy#3434.
@@ -32,7 +32,8 @@ def unique(enumeration: _S) -> _S: ... if sys.version_info >= (3, 6): _auto_null = ... # type: Any - class auto: + # subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() + class auto(IntFlag): value = ... # type: Any class Flag(Enum):
[Core] update grpc to 1.46.6 Upgrade to a version of gRPC that GHSA-cfmr-vrgj-vqwv in Zlib 1.46.6 has this patch: grpc/grpc#31845
@@ -238,8 +238,8 @@ def ray_deps_setup(): auto_http_archive( name = "com_github_grpc_grpc", # NOTE: If you update this, also update @boringssl's hash. - url = "https://github.com/grpc/grpc/archive/refs/tags/v1.45.2.tar.gz", - sha256 = "e18b16f7976aab9a36c14c38180f042bb0fd196b75c9fd6a20a2b5f934876ad6", + url = "https://github.com/grpc/grpc/archive/refs/tags/v1.46.6.tar.gz", + sha256 = "6514b3e6eab9e9c7017304512d4420387a47b1a9c5caa986643692977ed44e8a", patches = [ "@com_github_ray_project_ray//thirdparty/patches:grpc-cython-copts.patch", "@com_github_ray_project_ray//thirdparty/patches:grpc-python.patch", @@ -282,11 +282,11 @@ def ray_deps_setup(): # https://github.com/grpc/grpc/blob/1ff1feaa83e071d87c07827b0a317ffac673794f/bazel/grpc_deps.bzl#L189 # Ensure this rule matches the rule used by grpc's bazel/grpc_deps.bzl name = "boringssl", - sha256 = "e168777eb0fc14ea5a65749a2f53c095935a6ea65f38899a289808fb0c221dc4", - strip_prefix = "boringssl-4fb158925f7753d80fb858cb0239dff893ef9f15", + sha256 = "534fa658bd845fd974b50b10f444d392dfd0d93768c4a51b61263fd37d851c40", + strip_prefix = "boringssl-b9232f9e27e5668bc0414879dcdedb2a59ea75f2", urls = [ - "https://storage.googleapis.com/grpc-bazel-mirror/github.com/google/boringssl/archive/4fb158925f7753d80fb858cb0239dff893ef9f15.tar.gz", - "https://github.com/google/boringssl/archive/4fb158925f7753d80fb858cb0239dff893ef9f15.tar.gz", + "https://storage.googleapis.com/grpc-bazel-mirror/github.com/google/boringssl/archive/b9232f9e27e5668bc0414879dcdedb2a59ea75f2.tar.gz", + "https://github.com/google/boringssl/archive/b9232f9e27e5668bc0414879dcdedb2a59ea75f2.tar.gz", ], )
fixed orbital correction method2 implementation TODO: mpi tests
@@ -98,7 +98,8 @@ def orbital_correction(ifgs_or_ifg_paths, params, mlooked=None, offset=True, if method == NETWORK_METHOD: if mlooked is None: network_correction(ifgs_or_ifg_paths, degree, offset, - preread_ifgs) + m_ifgs=mlooked, + preread_ifgs=preread_ifgs) else: _validate_mlooked(mlooked, ifgs_or_ifg_paths) network_correction(ifgs_or_ifg_paths, degree, offset, mlooked, @@ -397,6 +398,11 @@ def remove_orbital_error(ifgs, params, preread_ifgs=None): ifg_paths = [i.data_path for i in ifgs] \ if isinstance(ifgs[0], Ifg) else ifgs + mlooked = None + + # mlooking is not necessary for independent correction + # can use multiple procesing if write_to_disc=True + if params[cf.ORBITAL_FIT_METHOD] == 2: mlooked_dataset = prepifg.prepare_ifgs( ifg_paths, crop_opt=prepifg.ALREADY_SAME_SIZE, @@ -409,8 +415,8 @@ def remove_orbital_error(ifgs, params, preread_ifgs=None): for m in mlooked: m.initialize() m.nodata_value = params[cf.NO_DATA_VALUE] - m.convert_to_nans() - m.convert_to_mm() + # m.convert_to_nans() # already nan converted + # m.convert_to_mm() # already mm converted orbital_correction(ifgs, params, mlooked=mlooked, preread_ifgs=preread_ifgs)
Devex 3 upload agent resume message * Fix log message if file 100% uploaded If remote file is 100% complete, inform user "Will not upload it" * Change log message to 'not resume'
@@ -152,11 +152,20 @@ void File::init(const bool tryResuming) { isRemoteFileOpen = true; } DXLOG(logINFO) << "A resume target is found .. " << endl; + if (isRemoteFileOpen) { DXLOG(logUSERINFO) << "Signature of file " << localFile << " matches remote file " << findResult[0]["describe"]["name"].get<string>() << " (" << fileID << "), which is " << completePercentage << "% complete. Will resume uploading to it." << endl; DXLOG(logINFO) << "Remote resume target is in state: \"" << state << "\""; } + else { + DXLOG(logUSERINFO) + << "Signature of file " << localFile << " matches remote file " << findResult[0]["describe"]["name"].get<string>() + << " (" << fileID << "), which is " << completePercentage << "% complete. Will not resume uploading it." << endl; + DXLOG(logINFO) << "Remote resume target is in state: \"" << state << "\""; + } + + } if (findResult.size() > 1) { ostringstream oss; oss << endl << "More than one resumable targets for local file \"" << localFile << "\" found in the project '" + projectID + "', candidates: " << endl;
langkit.compiled_types: fix a typo TN:
@@ -737,7 +737,7 @@ class CompiledType(object): @property def memoization_kind(self): """ - Return the enumerator name that correspond to this type for the + Return the enumerator name that corresponds to this type for the discriminated record to materialize memoization keys/values. :rtype: str
validating pipeline output required fields minor change
@@ -9,7 +9,7 @@ DUMB_PIPELINE_INPUT = '| from read_text("/")' \ '| select from_json_object(value) as input_event' \ '| eval timestamp=parse_long(ucast(map_get(input_event, "_time"), "string", null))' -DUMB_PIPELINE_OUTPUT = ';' +DUMB_PIPELINE_OUTPUT = '| select start_time, end_time, entities, body;' def main(args): @@ -128,7 +128,10 @@ def validate_required_fields(detection): return False return True except subprocess.CalledProcessError: - log(logging.ERROR, "Syntax errors in pipeline %s" % detection['name'], detail=detection['search']) + log(logging.ERROR, + "Syntax errors in pipeline %s" % detection['name'], + detail=detection['search']) + log(logging.INFO, "Perhaps required [input|output] fields do not match SSA ones") return False
Update makeplots.py corrected the noon-midnight line only when the current UTC is set to be after 12:00 of the day.
@@ -170,8 +170,8 @@ def noon_midnight_meridian(dtime=None, delta=0.25): lons_latmax = 0 + 15 * diff_in_hours # longitude for noon line lons_latmin = lons_latmax - 180 # longitude for midnight line elif diff_in_hours < 0: - lons_latmax = 0 - 15 * diff_in_hours # longitude for noon line - lons_latmin = lons_latmax + 180 # longitude for midnight line + lons_latmax = 0 + 15 * diff_in_hours # longitude for noon line, old version is - + lons_latmin = lons_latmax - 180 # longitude for midnight line, old version is + # lons_max_arr = np.full((1, ni_half), lons_latmax) # for noon line lats_max_arr = np.linspace(-90, 90, ni_half) # for noon line
alvalidators.py allow outage_start_time_step = outage_end_time_step need to allow this in order to allow users to model an outage that lasts an entire year. in other words the end time step is inclusive, such that modeling a single outage time step requires having equal values for start and end time steps.
@@ -924,8 +924,8 @@ class ValidateNestedInput: self.input_data_errors.append('LoadProfile outage_end_hour must be larger than outage_end_hour and these inputs cannot be equal') if real_values.get('outage_start_time_step') is not None and real_values.get('outage_end_time_step') is not None: - if real_values.get('outage_start_time_step') >= real_values.get('outage_end_time_step'): - self.input_data_errors.append('LoadProfile outage_end_time_step must be larger than outage_start_time_step and these inputs cannot be equal') + if real_values.get('outage_start_time_step') > real_values.get('outage_end_time_step'): + self.input_data_errors.append('LoadProfile outage_end_time_step must be larger than outage_start_time_step.') def check_for_nans(self, object_name_path, template_values=None, real_values=None, number=1, input_isDict=None): """
Ignore matplotlib objects output lines Contains code inspired by (MIT license)
import pytest import numpy import matplotlib +import doctest matplotlib.use('agg', force=True) +# Ignore matplotlib output such as `<matplotlib.image.AxesImage at +# 0x7f956908c280>`. doctest monkeypatching inspired by +# https://github.com/wooyek/pytest-doctest-ellipsis-markers (MIT license) +OutputChecker = doctest.OutputChecker + +class SkipMatplotlibOutputChecker(doctest.OutputChecker): + def check_output(self, want, got, optionflags): + if '<matplotlib.' in got: + got = '' + return OutputChecker.check_output(self, want, got, optionflags) + +doctest.OutputChecker = SkipMatplotlibOutputChecker + @pytest.fixture(autouse=True) def add_np(doctest_namespace): numpy.random.seed(1)
Add Mockaroo API to Test Data Fix apiKey syntax highlighting
@@ -1222,6 +1222,7 @@ API | Description | Auth | HTTPS | CORS | | [Loripsum](http://loripsum.net/) | The "lorem ipsum" generator that doesn't suck | No | No | Unknown | | [Mailsac](https://mailsac.com/docs/api) | Disposable Email | `apiKey` | Yes | Unknown | | [Metaphorsum](http://metaphorpsum.com/) | Generate demo paragraphs giving number of words and sentences | No | No | Unknown | +| [Mockaroo](https://www.mockaroo.com/docs) | Generate fake data to JSON, CSV, TXT, SQL and XML | `apiKey` | Yes | Unknown | | [PIPL](https://pipl.ir/) | Free and public API that generates random and fake people's data in JSON | No | Yes | No | | [QuickMocker](https://quickmocker.com) | API mocking tool to generate contextual, fake or random data | No | Yes | Yes | | [Randommer](https://randommer.io/randommer-api) | Random data generator | `apiKey` | Yes | Yes |
update tests/test_poly_spaces.py for simplex Bernstein basis update _gen_common_data(), test_partition_of_unity()
@@ -92,7 +92,7 @@ def _gen_common_data(orders, gels, report): ['lagrange', 'serendipity', 'bernstein', 'lobatto']])] + [ii for ii in combine([['2_3', '3_4'], - ['lagrange']])]) + ['lagrange', 'bernstein']])]) for geom, poly_space_base in bases: order = orders[geom] if (geom == '3_8') and (poly_space_base == 'serendipity'): @@ -202,10 +202,11 @@ class Test(TestCommon): ok = True orders = {'2_3' : 5, '2_4' : 5, '3_4' : 5, '3_8' : 5} bases = ( - [ii for ii in combine( - [['2_4', '3_8'], ['lagrange', 'serendipity', 'bernstein']] + [ii for ii in combine([['2_4', '3_8'], + ['lagrange', 'serendipity', 'bernstein']] )] - + [ii for ii in combine([['2_3', '3_4'], ['lagrange']])] + + [ii for ii in combine([['2_3', '3_4'], + ['lagrange', 'bernstein']])] ) for geom, poly_space_base in bases:
Update EPS_Screen.kv Adjusted Value/Unit definition laout.
font_size: 20 Label: id: current_label - pos_hint: {"center_x": 0.5, "center_y": 0.10} + pos_hint: {"center_x": 0.5, "center_y": 0.12} text: 'Current = A' markup: True color: 1,1,1 font_size: 20 Label: id: voltage_label - pos_hint: {"center_x": 0.5, "center_y": 0.03} + pos_hint: {"center_x": 0.5, "center_y": 0.05} text: 'Voltage = V' markup: True color: 1,1,1
change: re-enable pylint's no-member check in .backends re-enable pylint's no-member check in .backends as it should not be warned any more.
# Copyright (C) 2011 - 2018 Satoru SATOH <ssato @ redhat.com> # License: MIT # -# Suppress: -# - false-positive warn at '... pkg_resources ...' line -# - import positions after some globals are defined -# pylint: disable=no-member,wrong-import-position +# Suppress: import positions after some globals are defined +# pylint: disable=wrong-import-position """A module to aggregate config parser (loader/dumper) backends. """ from __future__ import absolute_import
sso: Fix debug statement Fix debug statement to print stringified json
@@ -30,7 +30,7 @@ class RHSSOOAuthService(OIDCLoginService): json={"user": {"login": lusername}, "account": {"primary": True}}, timeout=5, ) - logger.debug("Got result from export compliance service: " + result.json()) + logger.debug("Got result from export compliance service: " + str(result.json())) if result.status_code != 200: raise OAuthLoginException(str(result.json())) if result.json()["result"] != "OK":
slack: Skip reactions for deleted users. Fixes
@@ -1146,6 +1146,10 @@ def build_reactions( continue for slack_user_id in slack_reaction["users"]: + if slack_user_id not in slack_user_id_to_zulip_user_id: + # Deleted users still have reaction references but no profile, so we skip + continue + reaction_id = NEXT_ID("reaction") reaction = Reaction( id=reaction_id,
docs(python): document adding Python 3.9 support, dropping 3.5 support Closes
@@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -202,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/{{ metadata['repo']['repo'] }}/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_
datacube/drivers/s3/index.py - commented problem line writing to dataset.local_uri - Todo - adapt for dataset.uris
@@ -51,8 +51,8 @@ class Index(datacube.index._api.Index): dataset_refs = [] n = 0 for dataset in datasets.values: - if dataset.local_uri: - dataset.local_uri = '%s:%s' % (self.uri_scheme, dataset.local_uri.split(':', 1)[1]) + # if dataset.local_uri: + # dataset.local_uri = '%s:%s' % (self.uri_scheme, dataset.local_uri.split(':', 1)[1]) self.datasets.add(dataset, sources_policy='skip') dataset_refs.append(dataset.id) n += 1
Also support POST requests for case seach We're anticipating USH cases exceeding the maximum URL length and will switch formplayer to use POST requests.
@@ -93,6 +93,7 @@ def restore(request, domain, app_id=None): @location_safe_bypass +@csrf_exempt @mobile_auth @check_domain_migration def search(request, domain): @@ -100,6 +101,7 @@ def search(request, domain): @location_safe_bypass +@csrf_exempt @mobile_auth @check_domain_migration def app_aware_search(request, domain, app_id): @@ -110,7 +112,8 @@ def app_aware_search(request, domain, app_id): Returns results as a fixture with the same structure as a casedb instance. """ - criteria = {k: v[0] if len(v) == 1 else v for k, v in request.GET.lists()} + request_dict = request.GET if request.method == 'GET' else request.POST + criteria = {k: v[0] if len(v) == 1 else v for k, v in request_dict.lists()} try: cases = get_case_search_results(domain, criteria, app_id, request.couch_user) except CaseSearchUserError as e:
tests: correct the url_str_to_user_pk test to correctly assert that a UUID field is returned
@@ -1100,7 +1100,7 @@ class UtilsTests(TestCase): with patch('allauth.account.utils.get_user_model') as mocked_gum: mocked_gum.return_value = UUIDUser self.assertEqual(url_str_to_user_pk(self.user_id), - self.user_id) + uuid.UUID(self.user_id)) def test_pk_to_url_string_identifies_UUID_as_stringlike(self): user = UUIDUser(
[Stress Tester XFails] Remove They have been fixed
], "issueUrl" : "https://bugs.swift.org/browse/SR-14328" }, - { - "path" : "*\/ACHNBrowserUI\/ACHNBrowserUI\/ACHNBrowserUI\/packages\/Backend\/Sources\/Backend\/environments\/Items.swift", - "issueDetail" : { - "kind" : "codeComplete", - "offset" : 1277 - }, - "applicableConfigs" : [], - "issueUrl" : "https://bugs.swift.org/browse/SR-12985" - }, { "path" : "*\/ACHNBrowserUI\/ACHNBrowserUI\/ACHNBrowserUI\/SceneDelegate.swift", "issueDetail" : { ], "issueUrl" : "https://bugs.swift.org/browse/SR-14328" }, - { - "path" : "*\/Base64CoderSwiftUI\/Base64CoderSwiftUI\/ContentView.swift", - "issueDetail" : { - "kind" : "codeComplete", - "offset" : 304 - }, - "applicableConfigs" : [ - "main" - ], - "issueUrl" : "https://bugs.swift.org/browse/SR-12965" - }, { "path" : "*\/MovieSwift\/MovieSwift\/Packages\/UI\/Sources\/UI\/badges\/RoundedBadge.swift", "modification" : "concurrent-844", ], "issueUrl" : "https://bugs.swift.org/browse/SR-14430" }, - { - "path" : "*\/MovieSwift\/MovieSwift\/MovieSwift\/launch\/SceneDelegate.swift", - "issueDetail" : { - "kind" : "codeComplete", - "offset" : 2285 - }, - "applicableConfigs" : [ - "main" - ], - "issueUrl" : "https://bugs.swift.org/browse/SR-14432" - }, { "path" : "*\/Result\/Result\/Result.swift", "issueDetail" : { "main" ], "issueUrl" : "https://bugs.swift.org/browse/SR-14454" - }, - { - "path" : "*\/project_cache\/MovieSwift\/MovieSwift\/Packages\/Backend\/Sources\/Backend\/services\/APIService.swift", - "issueDetail" : { - "kind" : "codeComplete", - "offset" : 1611 - }, - "applicableConfigs" : [ - "main" - ], - "issueUrl" : "https://bugs.swift.org/browse/SR-14455" } ]
Run layout.show() when restoring them from state This ensures layouts that are visible following a restart have set themselves properly. Fixes
@@ -162,6 +162,7 @@ class Qtile(CommandObject): if self._state: for screen in self.screens: + screen.group.layout.show(screen.get_rect()) screen.group.layout_all() self._state = None self.update_desktops()
Fix import bug from cosmology/parameters... This manifests itself during the test collection phase of pytest (when invoked directly from the command line). It seems like during test collection 'locals' gets polluted and so it is not a reliable way to load the parameters into the module's namespace.
@@ -42,12 +42,6 @@ doi: 10.1088/0067-0049/180/2/330. Table 1 (WMAP + BAO + SN ML). """ from __future__ import (absolute_import, division, print_function, unicode_literals) -# delete these things from the namespace so we can automatically find -# all of the parameter dictionaries below. -del absolute_import -del division -del print_function -del unicode_literals # Note: if you add a new cosmology, please also update the table # in the 'Built-in Cosmologies' section of astropy/docs/cosmology/index.rst @@ -150,4 +144,5 @@ WMAP5 = dict( "Table 1 (WMAP + BAO + SN ML).") ) -available = tuple(k for k in locals() if not k.startswith('_')) +# If new parameters are added, this list must be updated +available = ['Planck15', 'Planck13', 'WMAP9', 'WMAP7', 'WMAP5']
cas_disk: handle allocation error properly In case of error `blk_mq_init_queue()` does not return NULL, but `ERR_PTR(error_code)` instead. `IS_ERR_OR_NULL()` should be used to check if `blk_mq_init_queue()` actually failed.
@@ -557,8 +557,8 @@ int casdsk_exp_obj_create(struct casdsk_disk *dsk, const char *dev_name, } queue = blk_mq_init_queue(&dsk->tag_set); - if (!queue) { - result = -ENOMEM; + if (IS_ERR_OR_NULL(queue)) { + result = queue ? PTR_ERR(queue) : -ENOMEM; goto error_init_queue; }
Pathfix for Windows Path fix for windows that contains no syntax errors
@@ -150,7 +150,7 @@ class DataService: return new_ability_file def _check_uuid(self, _filename): - uuid_string = os.path.basename(_filename).split(',')[0] + uuid_string = os.path.basename(_filename).split('.')[0] try: val = UUID(uuid_string, version=4) return _filename
Replaced the JSONDecodeError with a ValueError as it was failing on Jenkins.
@@ -995,7 +995,7 @@ def test_preview_letter_template_precompiled_png_file_type( file_type='png' ) - with pytest.raises(json.decoder.JSONDecodeError): + with pytest.raises(ValueError): mock_post.last_request.json() assert mock_get_letter_pdf.called_once_with(notification) assert base64.b64decode(resp['content']) == png_content @@ -1044,7 +1044,7 @@ def test_preview_letter_template_precompiled_png_template_preview_500_error( ) - with pytest.raises(json.decoder.JSONDecodeError): + with pytest.raises(ValueError): mock_post.last_request.json() @@ -1090,5 +1090,5 @@ def test_preview_letter_template_precompiled_png_template_preview_400_error( _expected_status=500 ) - with pytest.raises(json.decoder.JSONDecodeError): + with pytest.raises(ValueError): mock_post.last_request.json()
[logger] log packet_dropped with reason header_parse_error Some servers add all zero padding to UDP datagrams during the handshake.
@@ -730,6 +730,15 @@ class QuicConnection: buf, host_cid_length=self._configuration.connection_id_length ) except ValueError: + if self._quic_logger is not None: + self._quic_logger.log_event( + category="transport", + event="packet_dropped", + data={ + "trigger": "header_parse_error", + "raw": {"length": buf.capacity - start_off}, + }, + ) return # check destination CID matches
Update EPS_Screen.kv Last adjustment to Value/Units definition position? Hopefully...
##---------------------------------------------------------------------- Label: id: angle_label - pos_hint: {"center_x": 0.5, "center_y": 0.19} + pos_hint: {"center_x": 0.5, "center_y": 0.14} text: 'Angle = deg' markup: True color: 1,1,1 font_size: 20 Label: id: current_label - pos_hint: {"center_x": 0.5, "center_y": 0.14} + pos_hint: {"center_x": 0.5, "center_y": 0.10} text: 'Current = A' markup: True color: 1,1,1 font_size: 20 Label: id: voltage_label - pos_hint: {"center_x": 0.5, "center_y": 0.09} + pos_hint: {"center_x": 0.5, "center_y": 0.06} text: 'Voltage = V' markup: True color: 1,1,1
Added autorefresh for video_list_id in case no video_list_id was given. This makes it possible for skinners to create a container with contents of the addons f.e. widgets. Used parts of im85288's pull request but made a few corrections.
@@ -415,6 +415,9 @@ class Navigation(object): user_data = self._check_response(self.call_netflix_service({ 'method': 'get_user_data'})) if user_data: + user_list = ['queue', 'topTen', 'netflixOriginals', 'trendingNow', 'newRelease', 'popularTitles'] + if str(type) in user_list and video_list_id == None: + video_list_id = self.refresh_list_id_for_type(type) for i in range(0, 4): items = self._check_response(self.call_netflix_service({ 'method': 'fetch_video_list', @@ -480,6 +483,20 @@ class Navigation(object): return listing return False + @log + def refresh_list_id_for_type(self,type): + """The list_ids are not static so may need refreshed for example when stored as a widget""" + user_data = self._check_response(self.call_netflix_service({ + 'method': 'get_user_data'})) + video_list_ids = self._check_response(self.call_netflix_service({ + 'method': 'fetch_video_list_ids', + 'guid': user_data['guid'], + 'cache': True})) + if video_list_ids: + for video_list_id in video_list_ids['user']: + if video_list_ids['user'][video_list_id]['name'] == type: + return str(video_list_ids['user'][video_list_id]['id']) + @log def show_profiles(self): """List the profiles for the active account"""
Update Variables.md Correct spelling mistakes and removed undesired and confusing space in float
@@ -75,10 +75,10 @@ Tutorial Code func main() { name := "Chris Mien" age := 29 - weigth := 200.21 + weight := 200.21 fmt.Println(name) fmt.Println(age) - fmt.Println(weigth) + fmt.Println(weight) } @@ -98,8 +98,8 @@ Solution func main() { name := "John Doe" age := 24 - weigth := 154.61 + weight := 154.61 fmt.Println(name) fmt.Println(age) - fmt.Println(weigth) + fmt.Println(weight) }
Handle cloudflare bans on webhook requests Fixes
@@ -222,6 +222,10 @@ class AsyncWebhookAdapter(WebhookAdapter): # we are being rate limited if r.status == 429: + if not r.headers.get('Via'): + # Banned by Cloudflare more than likely. + raise HTTPException(r, data) + retry_after = response['retry_after'] / 1000.0 log.warning('Webhook ID %s is rate limited. Retrying in %.2f seconds', _id, retry_after) await asyncio.sleep(retry_after) @@ -317,6 +321,10 @@ class RequestsWebhookAdapter(WebhookAdapter): # we are being rate limited if r.status == 429: if self.sleep: + if not r.headers.get('Via'): + # Banned by Cloudflare more than likely. + raise HTTPException(r, data) + retry_after = response['retry_after'] / 1000.0 log.warning('Webhook ID %s is rate limited. Retrying in %.2f seconds', _id, retry_after) time.sleep(retry_after)
STY: removed unreachable line Removed an unreachable line in `MetaHeader.__setattr__`.
@@ -1929,13 +1929,10 @@ class MetaHeader(object): """ # If this is a new attribute, update the `global_attrs` list if not hasattr(self, name) and name != 'global_attrs': - if hasattr(self, 'global_attrs'): # Only need to expand the global attributes if the new name # is not present in the list if name not in self.global_attrs: self.global_attrs.append(name) - else: - super(MetaHeader, self).__setattr__('global_attrs', [name]) # Use Object to avoid recursion super(MetaHeader, self).__setattr__(name, value)
DOC: updated docstring to incude raises Updated docstring to include raises info and added missing description of instruments.
@@ -44,6 +44,12 @@ class Constellation(object): const_module : string Name of a pysat constellation module instruments : list-like + A list of pysat Instruments to include in the Constellation + + Raises + ------ + ValueError + When `instruments` is not list-like Note ----
[docs] fix bad emphasize on presets Summary: `startLines` also all messed up but seems like worth a systemic approach ie drop them all Test Plan: `yarn dev` Reviewers: sashank, yuhan
@@ -176,7 +176,7 @@ variables. Dagster calls this a config preset: -```python literalinclude showLines startLine=133 emphasize-lines=10-27 caption=presets.py +```python literalinclude showLines startLine=133 emphasize-lines=14-37 caption=presets.py file:/docs_snippets/docs_snippets/intro_tutorial/advanced/pipelines/presets.py startAfter:start_presets_marker_0 endBefore:end_presets_marker_0
2018 Lexus NX300: Add missing EPS & engine f/w 2018 Lexus NX300 ICE DongleID
@@ -1244,6 +1244,7 @@ FW_VERSIONS = { CAR.LEXUS_NX: { (Ecu.engine, 0x700, None): [ b'\x01896637851000\x00\x00\x00\x00', + b'\x01896637852000\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152678140\x00\x00\x00\x00\x00\x00', @@ -1253,6 +1254,7 @@ FW_VERSIONS = { ], (Ecu.eps, 0x7a1, None): [ b'8965B78060\x00\x00\x00\x00\x00\x00', + b'8965B78080\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702300\x00\x00\x00\x00',
Use HTTP API for `paasta metastatus` by default Switch `paasta metastatus` to use the HTTP API endpoint by default. It can be forced to use the old ssh-based method by specifying a false value for `USE_API_ENDPOINT` environment variable, e.g.: ``` $ USE_API_ENDPOINT=0 paasta metastatus -c somecluster -vv -a ```
@@ -221,7 +221,7 @@ def paasta_metastatus( if 'USE_API_ENDPOINT' in os.environ: use_api_endpoint = strtobool(os.environ['USE_API_ENDPOINT']) else: - use_api_endpoint = False + use_api_endpoint = True all_clusters = list_clusters(soa_dir=soa_dir) clusters_to_inspect = figure_out_clusters_to_inspect(args, all_clusters)
Add missing packages to test dependencies The tests are failing when running on Centos due to lack of qemu-img package and python3-devel. This patch is adding the missing packages to bindep.txt. Closes-Bug:
@@ -20,6 +20,9 @@ postgresql postgresql-client [platform:dpkg] postgresql-devel [platform:rpm] postgresql-server [platform:rpm] +libpython3-dev [platform:dpkg] +python3-devel [platform:rpm] qemu [platform:dpkg devstack build-image-dib] qemu-utils [platform:dpkg devstack build-image-dib] +qemu-img [platform:rpm] libpq-dev [platform:dpkg]
Link update Changed link destination from ``download`` to ``deploy``.
@@ -9,7 +9,7 @@ Assume that the IP address of this server is 10.10.10.2. 1. Log in to the server that will host Mattermost Server and open a terminal window. -2. Download `the latest version of the Mattermost Server <https://mattermost.com/download/>`__. In the following command, replace ``X.X.X`` with the version that you want to download: +2. Download `the latest version of the Mattermost Server <https://mattermost.com/deploy/>`__. In the following command, replace ``X.X.X`` with the version that you want to download: ``wget https://releases.mattermost.com/X.X.X/mattermost-X.X.X-linux-amd64.tar.gz``
Update elf_coinminer.txt New Reference links.
@@ -200,6 +200,8 @@ xfer.abcxyz.stream # Reference: https://www.virustotal.com/gui/file/2d9fb5ea6356fba9734673ba4ed1653ff7e887875cc3bfc9da7669c80a53a93b/detection # Reference: https://twitter.com/luc4m/status/1202311106187821056 (Note: not perl ircbot) +# Reference: https://yoroi.company/research/outlaw-is-back-a-new-crypto-botnet-targets-european-organizations/ +# Reference: https://otx.alienvault.com/pulse/5eb984d90091572e80b24197 45.9.148.125:80 45.9.148.125:443
Apply suggestions from code review fix typo add non-POSIX support in path
@@ -12,7 +12,7 @@ def main(): # Command line options parser parser = argparse.ArgumentParser(description='Simple SDFG command-line compiler.') - # Required argument for SDGF file path + # Required argument for SDFG file path parser.add_argument('SDFGfilepath', help='<PATH TO SDFG FILE>', type=str) # Optional argument for output location @@ -33,7 +33,7 @@ def main(): # Copying header file to optional path if args.out: - source = os.path.join(sdfg.build_folder, 'src/cpu', sdfg.name+'.h') + source = os.path.join(sdfg.build_folder, 'src', 'cpu', sdfg.name+'.h') destination = os.path.join(args.out,sdfg.name+'.h') shutil.copyfile(source, destination)
Test nqn successfully provided If we can provide a correct NQN then it should be set on the host, and result should be True.
@@ -90,3 +90,20 @@ def test_when_nqn_is_not_then_host_update_should_not_call_set_host(fake_set_host for call in fake_set_host.mock_calls: assert "addnqnlist" not in call.kwargs + + +def test_when_nqn_is_correctly_provided_it_should_be_set_on_the_host(fake_set_host): + purefa._get_host.return_value = True + expected_calls = [call("fnord", addnqnlist=["roscivs"])] + + purefa.host_update("fnord", nqn="roscivs") + + fake_set_host.assert_has_calls(expected_calls) + + +def test_when_nqn_is_correctly_provided_result_should_be_True(fake_set_host): + purefa._get_host.return_value = True + + result = purefa.host_update("fnord", nqn="roscivs") + + assert result is True
Update New York.md Buffalo > Police shove elderly man. Adding details about cops involved and more source links
@@ -9,11 +9,16 @@ Three police officers run over to and tackle man with hands raised giving an int ### Police shove elderly man, causing him to fall on the back of his head | June 4th -Two police officers shove an unarmed, elderly man, who falls backwards and strikes his head on the concrete sidewalk. He appears to be bleeding. Location: Niagara Square +Two police officers shove an unarmed, elderly man, who falls backwards and strikes his head on the concrete sidewalk. He appears to be bleeding. +One of the officers who pushed him (the cop on the right, from the camera's perspective) has initials A.T. Buffalo P.D. released an official statement saying that he "tripped and fell." They have now opened an investigation into the incident. +Location: Niagara Square **Links** * https://twitter.com/WBFO/status/1268712530358292484?s=20 * https://news.wbfo.org/post/graphic-video-buffalo-police-officers-violently-shove-man-ground +* https://twitter.com/DeathMetaIHippy/status/1268718692600295430?s=20 +* https://twitter.com/DeathMetaIHippy/status/1268719319220916226?s=20 +* https://www.wivb.com/news/five-people-arrested-one-person-injured-during-protest-activity-in-niagara-square/ ## Rochester
Windows: Retry replace clcache entry after a delay * Apply decoratorRetries to os.replace to overcome random "access denied" errors tied to anti-virus.
@@ -31,6 +31,9 @@ from tempfile import TemporaryFile from atomicwrites import atomic_write from io import open +from nuitka.utils.Utils import decoratorRetries +from nuitka.Tracing import general + VERSION = "5.0.0-dev" HashAlgorithm = hashlib.md5 @@ -452,7 +455,15 @@ class CompilerArtifactsSection(object): artifacts.stderr, ) # Replace the full cache entry atomically + @decoratorRetries( + logger=general, + purpose="replace %r with %r" % (cacheEntryDir, tempEntryDir), + consequence="aborting", + ) + def _replaceEntry(): os.replace(tempEntryDir, cacheEntryDir) + + _replaceEntry() return size def getEntry(self, key):
Remove nagging test This test accomplishes nothing that isn't already checked by another test and it invokes platform-specific behavior (newline chars in terminal commands) that we don't care about here.
@@ -194,7 +194,3 @@ Syntax errors: >>> filter("(") guild: syntax error in filter - unexpected end of expresion <exit 1> - - >>> filter("foo = \n 123") - guild: syntax error in filter - unexpected end of expresion - <exit 1>
Delete python bytecode including pyo TrivialFix
@@ -103,7 +103,7 @@ passenv = KMIP_PLUGIN_ENABLED deps = -r{toxinidir}/test-requirements.txt setenv = OS_TEST_PATH={toxinidir}/barbican/cmd/functionaltests commands = - /usr/bin/find . -type f -name "*.pyc" -delete + /usr/bin/find . -type f -name "*.py[c|o]" -delete /bin/bash {toxinidir}/functionaltests/pretty_tox.sh '{posargs}' [flake8]
test: Don't reseed python random for every test Nothing should use it
import pytest -import random -# import time import numpy as np from psyneulink.core.llvm import ptx_enabled @@ -48,7 +46,6 @@ def pytest_runtest_setup(item): def pytest_runtest_call(item): # seed = int(item.config.getoption('--pnl-seed')) seed = 0 - random.seed(seed) np.random.seed(seed) from psyneulink.core.globals.utilities import set_global_seed set_global_seed(seed)
Spin wheel Error removed version
from plugin import plugin import random - ''' -This code pics one of the random inputs given by the user like spin wheel +'''This code picks one of the random inputs given by the user like spin wheel''' - ''' @alias("wheel spin") @plugin("spin wheel") def spin(jarvis, s):
Updated notebook code and docs as HTML render Updated code and documentation as HTML rendered output from Jupyter Notebooks for faster loading.
@@ -22,8 +22,8 @@ DataAtWork: URL: https://aws.amazon.com/blogs/big-data/build-a-real-time-stream-processing-pipeline-with-apache-flink-on-aws/ AuthorName: Steffen Hausmann - Title: Exploring data with Python and Amazon S3 Select - URL: https://github.com/manavsehgal/CloudStory/blob/master/open-data-analytics/exploring-data-with-python-and-amazon-s3-select.ipynb + URL: https://s3.amazonaws.com/cloudstory/exploring-data-with-python-and-amazon-s3-select.html AuthorName: Manav Sehgal - Title: Optimizing data for analysis with Amazon Athena and AWS Glue - URL: https://github.com/manavsehgal/CloudStory/blob/master/open-data-analytics/optimizing-data-for-analysis-with-amazon-athena-and-aws-glue.ipynb + URL: https://s3.amazonaws.com/cloudstory/optimizing-data-for-analysis-with-amazon-athena-and-aws-glue.html AuthorName: Manav Sehgal
fix(zh_stock_a_sina.py): exchange open and close exchange open and close
@@ -186,7 +186,7 @@ def stock_zh_a_daily(symbol="sh600000", factor=""): temp_df = temp_df.astype(float) temp_df["amount"] = temp_df["amount"] * 10000 temp_df["turnover"] = temp_df["volume"] / temp_df["amount"] - temp_df.columns = ['open', 'high', 'low', 'close', 'volume', 'outstanding_share', 'turnover'] + temp_df.columns = ['close', 'high', 'low', 'open', 'volume', 'outstanding_share', 'turnover'] if not factor: return temp_df if factor == "hfq":
Fix, must not cache parent package in modules * It could happen that this parent was demoted and as a consequence this was still used, but broken.
@@ -52,7 +52,7 @@ from .NodeBases import ( class PythonModuleBase(NodeBase): # Base classes can be abstract, pylint: disable=abstract-method - __slots__ = "module_name", "package" + __slots__ = ("module_name",) def __init__(self, module_name, source_ref): assert type(module_name) is ModuleName, module_name @@ -60,7 +60,6 @@ class PythonModuleBase(NodeBase): NodeBase.__init__(self, source_ref=source_ref) self.module_name = module_name - self.package = None def getDetails(self): return {"module_name": self.module_name} @@ -81,17 +80,17 @@ class PythonModuleBase(NodeBase): return False def attemptRecursion(self): - # Make sure the package is recursed to. + # Make sure the package is recursed to if any + package_name = self.module_name.getPackageName() + if package_name is None: + return () # Return the list of newly added modules. - result = [] - - package_name = self.module_name.getPackageName() - if package_name is not None and self.package is None: - self.package = getModuleByName(package_name) + result = [] + package = getModuleByName(package_name) - if package_name is not None and self.package is None: + if package_name is not None and package is None: package_package, package_filename, finding = findModule( importing=self, module_name=package_name, @@ -103,10 +102,10 @@ class PythonModuleBase(NodeBase): # TODO: Temporary, if we can't find the package for Python3.3 that # is semi-OK, maybe. if python_version >= 300 and not package_filename: - return [] + return () if package_name == "uniconvertor.app.modules": - return [] + return () assert package_filename is not None, (package_name, finding) @@ -122,7 +121,7 @@ class PythonModuleBase(NodeBase): ) if decision is not None: - self.package, is_added = recurseTo( + package, is_added = recurseTo( module_package=package_package, module_filename=package_filename, module_relpath=relpath(package_filename), @@ -132,15 +131,14 @@ class PythonModuleBase(NodeBase): ) if is_added: - result.append(self.package) + result.append(package) - if self.package: + if package: from nuitka.ModuleRegistry import addUsedModule - addUsedModule(self.package) + addUsedModule(package) - # print "Recursed to package", self.package_name - result.extend(self.package.attemptRecursion()) + result.extend(package.attemptRecursion()) return result
Pass block to new BlockPublisher Unwrap the blockWrapper passed to the BlockPublisher, before passing it to the rust layer.
@@ -28,6 +28,7 @@ from sawtooth_validator.ffi import OwnedPointer from sawtooth_validator.consensus.handlers import BlockEmpty from sawtooth_validator.consensus.handlers import BlockInProgress from sawtooth_validator.consensus.handlers import BlockNotInitialized +from sawtooth_validator.journal.block_wrapper import BlockWrapper LOGGER = logging.getLogger(__name__) @@ -167,6 +168,12 @@ class BlockPublisher(OwnedPointer): """ super(BlockPublisher, self).__init__('block_publisher_drop') + if chain_head is not None: + chain_head = BlockWrapper.wrap(chain_head) + chain_head_block = chain_head.block + else: + chain_head_block = None + self._to_exception(PY_LIBRARY.call( 'block_publisher_new', block_manager.pointer, @@ -177,7 +184,7 @@ class BlockPublisher(OwnedPointer): ctypes.py_object(settings_cache), ctypes.py_object(block_sender), ctypes.py_object(batch_sender), - ctypes.py_object(chain_head), + ctypes.py_object(chain_head_block), ctypes.py_object(identity_signer), ctypes.py_object(data_dir), ctypes.py_object(config_dir),
Move TenantResponse from premium to manager rest's responses As it is needed in community version as well
@@ -70,3 +70,18 @@ class UserResponse(object): self.role = kwargs.get('role') self.active = kwargs.get('active') self.last_login_at = kwargs.get('last_login_at') + + [email protected] +class TenantResponse(object): + + resource_fields = { + 'name': fields.String, + 'groups': fields.Raw, + 'users': fields.Raw, + } + + def __init__(self, **kwargs): + self.name = kwargs.get('name') + self.groups = kwargs.get('groups') + self.users = kwargs.get('users')
Adapt to SQLAlchemy 1.4 This patch makes the necessary change to adapt to the SQLAlchemy 1.4 release in a way that is still compatible with the currently pinned 1.3 versions. This is related to the overall effort to bump SQLAlchemy to 1.4 Closes-Bug:
@@ -706,7 +706,16 @@ class CellV2Commands(object): # worry about parsing and splitting a URL which could have special # characters in the password, which makes parsing a nightmare. url = sqla_url.make_url(connection) + + # TODO(gibi): remove hasattr() conditional in favor of "url.set()" + # when SQLAlchemy 1.4 is the minimum version in requirements + if hasattr(url, "set"): + url = url.set(database=url.database + '_cell0') + else: + # TODO(zzzeek): remove when SQLAlchemy 1.4 + # is the minimum version in requirements url.database = url.database + '_cell0' + return urlparse.unquote(str(url)) dbc = database_connection or cell0_default_connection()
BUG: fixed variable name Fixed the variable name in the error message.
@@ -325,7 +325,7 @@ class Instrument(object): # Check if required keys present in input. if req_key not in cust: estr = ''.join(('Input dict to custom is missing the ', - 'required key: ', rkey)) + 'required key: ', req_key)) raise ValueError(estr) # Check if optional arguments present. If not, provide
adding nODE tutorial to readme The notebook Introducing_JaxModel_and_PINNModel.ipynb is not there in readme, so I've added that too
@@ -54,7 +54,9 @@ tutorials discuss about using DeepChem for specific applications. * [15 Training a Generative Adversarial Network on MNIST](Training_a_Generative_Adversarial_Network_on_MNIST.ipynb) * [16 Distributed Multi-GPU Training of DeepChem Models with LitMatter](https://github.com/ncfrey/litmatter/blob/main/LitDeepChem.ipynb) * [17 Advanced model training using Hyperopt](Advanced_model_training_using_hyperopt.ipynb) -* [18 Using Torchdiffeq in Deepchem](Using_Torchdiffeq_in_Deepchem.ipynb) + + + ### Molecular Machine Learning * [1 Molecular Fingerprints](Molecular_Fingerprints.ipynb) @@ -92,3 +94,5 @@ tutorials discuss about using DeepChem for specific applications. ### Physics Informed Neural Networks * [1 Physics Informed Neural Networks (Burgers Equation)](Physics_Informed_Neural_Networks.ipynb) +* [2 Introducing_JaxModel_and_PINNModel](Introducing_JaxModel_and_PINNModel.ipynb) +* [3 About_NODE_Using_Torchdiffeq_in_Deepchem](About_NODE_Using_Torchdiffeq_in_Deepchem.ipynb)
Change type of num_children array in docstring Docstring did not reflect the changes in
@@ -1185,7 +1185,7 @@ class Tree: @property def num_children_array(self): """ - A numpy array (dtype=np.uint64) encoding the number of children of + A numpy array (dtype=np.int32) encoding the number of children of each node in this tree, such that ``tree.num_children_array[u] == tree.num_children(u)`` for all ``0 <= u <= ts.num_nodes``. See the :meth:`~.num_children`
Give PauliStringPhasor nonzero default atol (Fixes last broken Windows tests) Fixes The issue was that a default value of 0 for atol in PauliStringPhasor meant that some machines didn't failed to recognize gate sequence simplifications. Feel free to verify in your Windows build PR.
@@ -40,7 +40,7 @@ class ConvertToPauliStringPhasors(PointOptimizer): def __init__(self, ignore_failures: bool = False, keep_clifford: bool = False, - atol: float = 0) -> None: + atol: float = 1e-14) -> None: """ Args: ignore_failures: If set, gates that fail to convert are forwarded
Typo fixing and formatting change Typo fixing and formatting change in autograd tutorial
@@ -127,8 +127,8 @@ y.backward(gradients) print(x.grad) ############################################################### -# You can also stops autograd from tracking history on Tensors -# with requires_grad=True by wrapping the code block in +# You can also stop autograd from tracking history on Tensors +# with ``.requires_grad``=True by wrapping the code block in # ``with torch.no_grad():`` print(x.requires_grad) print((x ** 2).requires_grad)
Touchring fixes Bug fix: Ghost touch removed. Bug fix: if mtdev reconnecting _touchmove might occur before _tocuhdow (person is touching touchscreen while it gots reconnected)
@@ -41,7 +41,6 @@ pointer_alpha = 0.7 def _touch_down(win, touch): ud = touch.ud - touch.scale_for_screen(win.width, win.height) with win.canvas.after: ud['tr.color'] = Color(1, 1, 1, pointer_alpha) iw, ih = pointer_image.size @@ -59,6 +58,8 @@ def _touch_down(win, touch): def _touch_move(win, touch): ud = touch.ud + if not ud.get('tr.rect', False): + _touch_down(win, touch) ud['tr.rect'].pos = ( touch.x - (pointer_image.width / 2. * pointer_scale), touch.y - (pointer_image.height / 2. * pointer_scale))
Do not mark SMTP relays as sinkholes We received SMTP traffic from these IP addresses. They are not likely to be sinkholes.
195.22.26.208 195.22.26.209 195.22.26.210 -195.22.26.211 -195.22.26.212 -195.22.26.213 -195.22.26.214 +# 195.22.26.211 # relay.net.vodafone.pt +# 195.22.26.212 # relay2.net.vodafone.pt +# 195.22.26.213 # relay3.net.vodafone.pt +# 195.22.26.214 # relay4.net.vodafone.pt 195.22.26.215 195.22.26.216 # 195.22.26.217 # anubisnetworks.com
tools/downloader: read the config files in binary mode This ensures that decoding is done with YAML rules (i.e. by default) instead of Python rules (i.e. the locale encoding by default).
@@ -320,7 +320,7 @@ def load_topologies(args): for config_path in sorted(model_root.glob('**/model.yml')): subdirectory = config_path.parent.relative_to(model_root) - with config_path.open() as config_file, \ + with config_path.open('rb') as config_file, \ deserialization_context('In config "{}"'.format(config_path)): top = yaml.safe_load(config_file) @@ -334,7 +334,7 @@ def load_topologies(args): else: # monolithic config print('########## Warning: the --config option is deprecated and will be removed in a future release', file=sys.stderr) - with args.config.open() as config_file, \ + with args.config.open('rb') as config_file, \ deserialization_context('In config "{}"'.format(args.config)): for i, top in enumerate(yaml.safe_load(config_file)['topologies']): with deserialization_context('In topology #{}'.format(i)):
Add tuple memoization Fixes
@@ -62,6 +62,19 @@ def id_for_memo_list(denormalized_list, output_ref=False): return serialize(normalized_list) +@id_for_memo.register(tuple) +def id_for_memo_tuple(denormalized_tuple, output_ref=False): + if type(denormalized_tuple) != tuple: + raise ValueError("id_for_memo_tuple cannot work on subclasses of tuple") + + normalized_list = [] + + for e in denormalized_tuple: + normalized_list.append(id_for_memo(e, output_ref=output_ref)) + + return serialize(normalized_list) + + @id_for_memo.register(dict) def id_for_memo_dict(denormalized_dict, output_ref=False): """This normalises the keys and values of the supplied dictionary.
Moved GPIO Set Code GPIO pins have to be set in a while loop, in contrast with the pwm pins
@@ -134,6 +134,24 @@ int main(void) { do { status = HAL_UART_Receive(&huart2, (unsigned char *) rx_pwm_buf, 4, 1000); + + switch(motor1_dir) { + case MOTOR_FORWARD: + HAL_GPIO_WritePin(GPIOB, GPIO_PIN_0, GPIO_PIN_SET); + HAL_GPIO_WritePin(GPIOB, GPIO_PIN_1, GPIO_PIN_RESET); + break; + case MOTOR_BACKWARD: + HAL_GPIO_WritePin(GPIOB, GPIO_PIN_0, GPIO_PIN_RESET); + HAL_GPIO_WritePin(GPIOB, GPIO_PIN_1, GPIO_PIN_SET); + break; + case MOTOR_STOP: + HAL_GPIO_WritePin(GPIOB, GPIO_PIN_0, GPIO_PIN_RESET); + HAL_GPIO_WritePin(GPIOB, GPIO_PIN_1, GPIO_PIN_RESET); + break; + default: + break; + } + } while (status != HAL_OK); status = HAL_ERROR; @@ -275,8 +293,7 @@ void driveMotor1(uint16_t pwm_value, uint16_t dir) { switch (dir) { case MOTOR_FORWARD: user_pwm_setvalue(pwm_value, 1); - HAL_GPIO_WritePin(GPIOB, GPIO_PIN_0, GPIO_PIN_SET); - HAL_GPIO_WritePin(GPIOB, GPIO_PIN_1, GPIO_PIN_RESET); + motor1_dir = MOTOR_FORWARD; break; case MOTOR_BACKWARD:
Make add_inferred_export_properties not use pickle serializer Safe because all its arguments are strings (or list of strings)
@@ -151,7 +151,7 @@ def get_saved_export_task_status(export_instance_id): return get_task_status(download_data.task) -@serial_task('{domain}-{case_type}', queue='background_queue', serializer='pickle') +@serial_task('{domain}-{case_type}', queue='background_queue') def add_inferred_export_properties(sender, domain, case_type, properties): _cached_add_inferred_export_properties(sender, domain, case_type, properties)